hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c537c0d9b2156872fb4bde973350ec623c843c65
| 715
|
py
|
Python
|
matplotlib/gallery_python/lines_bars_and_markers/barh.py
|
gottaegbert/penter
|
8cbb6be3c4bf67c7c69fa70e597bfbc3be4f0a2d
|
[
"MIT"
] | 13
|
2020-01-04T07:37:38.000Z
|
2021-08-31T05:19:58.000Z
|
matplotlib/gallery_python/lines_bars_and_markers/barh.py
|
gottaegbert/penter
|
8cbb6be3c4bf67c7c69fa70e597bfbc3be4f0a2d
|
[
"MIT"
] | 5
|
2021-11-06T10:59:26.000Z
|
2022-03-11T14:26:38.000Z
|
matplotlib/examples/barh.py
|
yy0931/vscode-mplstyle
|
cd338874402a7128844d26850569e3e14eb18c08
|
[
"MIT"
] | 9
|
2020-10-19T04:53:06.000Z
|
2021-08-31T05:20:01.000Z
|
"""
====================
Horizontal bar chart
====================
This example showcases a simple horizontal bar chart.
"""
import matplotlib.pyplot as plt
import numpy as np
# Fixing random state for reproducibility
np.random.seed(19680801)
plt.rcdefaults()
fig, ax = plt.subplots()
# Example data
people = ('Tom', 'Dick', 'Harry', 'Slim', 'Jim')
y_pos = np.arange(len(people))
performance = 3 + 10 * np.random.rand(len(people))
error = np.random.rand(len(people))
ax.barh(y_pos, performance, xerr=error, align='center')
ax.set_yticks(y_pos)
ax.set_yticklabels(people)
ax.invert_yaxis() # labels read top-to-bottom
ax.set_xlabel('Performance')
ax.set_title('How fast do you want to go today?')
plt.show()
| 22.34375
| 55
| 0.686713
|
88b4f0a2574fdbb618c4134f69c7514b4ce8f6e2
| 68
|
py
|
Python
|
Courses/HSEPython/7 week/4.py
|
searayeah/sublime-snippets
|
deff53a06948691cd5e5d7dcfa85515ddd8fab0b
|
[
"MIT"
] | null | null | null |
Courses/HSEPython/7 week/4.py
|
searayeah/sublime-snippets
|
deff53a06948691cd5e5d7dcfa85515ddd8fab0b
|
[
"MIT"
] | null | null | null |
Courses/HSEPython/7 week/4.py
|
searayeah/sublime-snippets
|
deff53a06948691cd5e5d7dcfa85515ddd8fab0b
|
[
"MIT"
] | null | null | null |
import sys
f = sys.stdin.read()
f = f.split()
print(len(set(f)))
| 8.5
| 20
| 0.602941
|
01435d12e3280bd92e18551139797e58f3d38bfe
| 2,240
|
py
|
Python
|
rl/environment/mdp/MDPGrid.py
|
AliKhalili/girdword
|
3f31a961847599b84a11f76846d2d361caeb7e5b
|
[
"MIT"
] | null | null | null |
rl/environment/mdp/MDPGrid.py
|
AliKhalili/girdword
|
3f31a961847599b84a11f76846d2d361caeb7e5b
|
[
"MIT"
] | null | null | null |
rl/environment/mdp/MDPGrid.py
|
AliKhalili/girdword
|
3f31a961847599b84a11f76846d2d361caeb7e5b
|
[
"MIT"
] | null | null | null |
import json
import numpy as np
from rl.environment.mdp.State import State, states_parser
from rl.environment.mdp.MDP import MDP
from rl.environment.mdp.Frame import Frame
import rl.common.Constant as CNSTNT
from rl.environment.mdp.Trajectory import Trajectory
class MDPGrid(MDP, Frame, Trajectory):
def __init__(self, json_path: str):
with open(json_path, 'r', encoding='utf-8') as json_file:
configuration = json.load(json_file)
self.width = configuration.get('width', 0)
self.height = configuration.get('height', 0)
save_frame = configuration.get('save_frame', False)
self.save_trajectory = configuration.get('save_frame', False)
super(MDPGrid, self).__init__(number_of_state=self.height * self.width, save_frame=save_frame, save_trajectory=self.save_trajectory)
reward = configuration.get("reward", -1)
self._states, self.start, self.grid_move = states_parser(configuration.get("states", None), reward, self.height, self.width)
super().state(self.start)
super().add_frame(super().time(), self._get_frame())
def _get_frame(self):
frame = np.array([CNSTNT.COLORS[state.get_color()] for index, state in self._states.items()]).reshape(
(self.height, self.width))
frame[self.grid_move.get_position(super().state())] = CNSTNT.COLORS[CNSTNT.ACTION_COLORS["start"]]
return frame
def step(self, action_name):
next_state, reward, is_terminal, time_step = super().step(action_name)
super().add_frame(time_step, self._get_frame())
if self.save_trajectory:
super().add_trajectory(time_step, reward, super().state(), action_name, next_state)
return next_state, reward, is_terminal, time_step
def get_position(self, state):
return self.grid_move.get_position(state)
def render(self, time_step=None):
if time_step is None:
super().render(self.time())
else:
super().render(time_step)
def reset(self):
MDP.reset(self)
Frame.reset(self)
Trajectory.reset(self)
state = self.state(self.start)
super().add_frame(self.time(), self._get_frame())
return state
| 40
| 140
| 0.674107
|
3821914a163a1a6ee668e1290cb02e488819feb6
| 3,429
|
py
|
Python
|
azure/mgmt/compute/v2016_03_30/models/virtual_machine_scale_set.py
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
5668b5785296b314ea1321057420bcd077dba9ea
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 1
|
2022-01-25T22:52:58.000Z
|
2022-01-25T22:52:58.000Z
|
azure/mgmt/compute/v2016_03_30/models/virtual_machine_scale_set.py
|
EnjoyLifeFund/Debian_py36_packages
|
1985d4c73fabd5f08f54b922e73a9306e09c77a5
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
azure/mgmt/compute/v2016_03_30/models/virtual_machine_scale_set.py
|
EnjoyLifeFund/Debian_py36_packages
|
1985d4c73fabd5f08f54b922e73a9306e09c77a5
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class VirtualMachineScaleSet(Resource):
"""Describes a Virtual Machine Scale Set.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id
:vartype id: str
:ivar name: Resource name
:vartype name: str
:ivar type: Resource type
:vartype type: str
:param location: Resource location
:type location: str
:param tags: Resource tags
:type tags: dict
:param sku: The virtual machine scale set sku.
:type sku: :class:`Sku <azure.mgmt.compute.v2016_03_30.models.Sku>`
:param upgrade_policy: The upgrade policy.
:type upgrade_policy: :class:`UpgradePolicy
<azure.mgmt.compute.v2016_03_30.models.UpgradePolicy>`
:param virtual_machine_profile: The virtual machine profile.
:type virtual_machine_profile: :class:`VirtualMachineScaleSetVMProfile
<azure.mgmt.compute.v2016_03_30.models.VirtualMachineScaleSetVMProfile>`
:ivar provisioning_state: The provisioning state, which only appears in
the response.
:vartype provisioning_state: str
:param overprovision: Specifies whether the Virtual Machine Scale Set
should be overprovisioned.
:type overprovision: bool
:param identity: The identity of the virtual machine scale set, if
configured.
:type identity: :class:`VirtualMachineScaleSetIdentity
<azure.mgmt.compute.v2016_03_30.models.VirtualMachineScaleSetIdentity>`
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'Sku'},
'upgrade_policy': {'key': 'properties.upgradePolicy', 'type': 'UpgradePolicy'},
'virtual_machine_profile': {'key': 'properties.virtualMachineProfile', 'type': 'VirtualMachineScaleSetVMProfile'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'overprovision': {'key': 'properties.overprovision', 'type': 'bool'},
'identity': {'key': 'identity', 'type': 'VirtualMachineScaleSetIdentity'},
}
def __init__(self, location, tags=None, sku=None, upgrade_policy=None, virtual_machine_profile=None, overprovision=None, identity=None):
super(VirtualMachineScaleSet, self).__init__(location=location, tags=tags)
self.sku = sku
self.upgrade_policy = upgrade_policy
self.virtual_machine_profile = virtual_machine_profile
self.provisioning_state = None
self.overprovision = overprovision
self.identity = identity
| 42.333333
| 140
| 0.647127
|
61217b64eb7bde1b13d5b9d7d10a2652108aae50
| 14,031
|
py
|
Python
|
pythran/tests/test_itertools.py
|
PierreBlancfat/pythran2
|
37869bc73aae1054253c2b1643aee5c63f11b7e8
|
[
"BSD-3-Clause"
] | null | null | null |
pythran/tests/test_itertools.py
|
PierreBlancfat/pythran2
|
37869bc73aae1054253c2b1643aee5c63f11b7e8
|
[
"BSD-3-Clause"
] | null | null | null |
pythran/tests/test_itertools.py
|
PierreBlancfat/pythran2
|
37869bc73aae1054253c2b1643aee5c63f11b7e8
|
[
"BSD-3-Clause"
] | null | null | null |
from pythran.tests import TestEnv
import sys
import unittest
from pythran.typing import List
@TestEnv.module
class TestItertools(TestEnv):
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_imap(self):
self.run_test("def imap_(l0,v): from itertools import imap; return sum(imap(lambda x:x*v, l0))", [0,1,2], 2, imap_=[List[int], int])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_imap_on_generator(self):
self.run_test("def imap_on_generator(l,v): from itertools import imap; return sum(imap(lambda x:x*v, (y for x in l for y in xrange(x))))", [2,3,5], 1, imap_on_generator=[List[int], int])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_imap2(self):
self.run_test("def imap2_(l0, l1,v): from itertools import imap; return sum(imap(lambda x,y:x*v+y, l0, l1))", [0,1,2], [0.,1.1,2.2], 1, imap2_=[List[int], List[float], int])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_imap2_ineq_size(self):
""" Check imap with different size for the two list operand. """
self.run_test("""
def imap2_ineq_size(l0, l1, v):
from itertools import imap
return sum(imap(lambda x, y : x * v + y, l0, l1))""",
[0, 1, 2, 3], [0., 1.1, 2.2], 1,
imap2_ineq_size=[List[int], List[float], int])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_imap2_on_generator(self):
self.run_test("def imap2_on_generator(l0,l1,v): from itertools import imap; return sum(imap(lambda x,y:x*v+y, (z*z for x in l0 for z in xrange(x)), (z*2 for y in l1 for z in xrange(y))))", [0,1,2,3], [3,2,1,0], 2, imap2_on_generator=[List[int], List[int], int])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_imap_none(self):
self.run_test("""
def imap_none(l0):
from itertools import imap
t= 0
for a in imap(None, l0) :
t += a[0]
return t
""", [0,1,2], imap_none=[List[int]])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_imap_none2(self):
self.run_test("""
def imap_none2(l0):
from itertools import imap
t=0
for a in imap(None, l0, l0) :
t += sum(a)
return t
""", [0,1,2], imap_none2=[List[int]])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_imap_none_on_generators(self):
self.run_test("""
def imap_none_g(l0):
from itertools import imap
t= 0
for a in imap(None, (y for x in l0 for y in xrange(x))) :
t += a[0]
return t
""", [0,1,2], imap_none_g=[List[int]])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_imap_none2_on_generators(self):
self.run_test("""
def imap_none2_g(l0):
from itertools import imap
t=0
for a in imap(None, (z for x in l0 for z in xrange(x)), (z for y in l0 for z in xrange(y))) :
t += sum(a)
return t
""", [0,1,2], imap_none2_g=[List[int]])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_ifilter_init(self):
self.run_test("def ifilter_init(l0): from itertools import ifilter; return list(ifilter(lambda x: x > 2 , l0))", [0,1,2,3,4,5], ifilter_init=[List[int]])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_ifilter_final(self):
self.run_test("def ifilter_final(l0): from itertools import ifilter; return list(ifilter(lambda x: x < 2, l0))", [0,1,2,3,4,5], ifilter_final=[List[int]])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_ifilter_on_generator(self):
self.run_test("def ifilterg_(l0): from itertools import ifilter; return list(ifilter(lambda x: (x % 2) == 1, (y for x in l0 for y in xrange(x))))", [0,1,2,3,4,5], ifilterg_=[List[int]])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_ifilter_none(self):
self.run_test("""
def ifiltern_(l0):
from itertools import ifilter;
s = 0
for b in (ifilter(None, l0)):
s += 1
return b,s
""", [True,False,True,True], ifiltern_=[List[bool]])
def test_product(self):
self.run_test("def product_(l0,l1): from itertools import product; return sum(map(lambda (x,y) : x*y, product(l0,l1)))", [0,1,2,3,4,5], [10,11], product_=[List[int],List[int]])
def test_product_on_generator(self):
self.run_test("def product_g(l0,l1): from itertools import product; return sum(map(lambda (x,y) : x*y, product((y for x in l0 for y in xrange(x)),(y for x in l1 for y in xrange(x)))))", [0,1,2,3,4], [4,3,2,1,0], product_g=[List[int],List[int]])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_itertools(self):
self.run_test("def test_it(l0,l1): import itertools; return sum(itertools.imap(lambda (x,y) : x*y, itertools.product(itertools.ifilter(lambda x : x > 2, l0), itertools.ifilter(lambda x : x < 12, l1))))", [0,1,2,3,4,5], [10,11,12,13,14,15], test_it=[List[int],List[int]])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_izip(self):
self.run_test("def izip_(l0,l1): from itertools import izip; return sum(map(lambda (x,y) : x*y, izip(l0,l1)))", [0,1,2], [10,11,12], izip_=[List[int],List[int]])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_izip_on_generator(self):
self.run_test("def izipg_(l0,l1): from itertools import izip; return sum(map(lambda (x,y) : x*y, izip((z for x in l0 for z in xrange(x)),(z for x in l1 for z in xrange(x)))))", [0,1,2,3], [3,2,1,0], izipg_=[List[int],List[int]])
def test_zip_iter(self):
self.run_test("""
def zip_iter(l0):
from itertools import izip
s = 0
for x in izip(l0, l0):
for y in x:
s += y
return s""",
[0,1,2],
zip_iter=[List[int]])
def test_islice0(self):
self.run_test("def islice0(l): from itertools import islice ; return [x for x in islice(l, 1,30,3)]", list(range(100)), islice0=[List[int]])
def test_islice1(self):
self.run_test("def islice1(l): from itertools import islice ; return [x for x in islice(l, 16)]", list(range(100)), islice1=[List[int]])
def test_count0(self):
self.run_test("def count0(): from itertools import count ; c = count() ; next(c); next(c); return next(c)", count0=[])
def test_count1(self):
self.run_test("def count1(n): from itertools import count ; c = count(n) ; next(c); next(c); return next(c)", 100, count1=[int])
def test_count2(self):
self.run_test("def count2(n): from itertools import count ; c = count(n,3.2) ; next(c); next(c); return next(c)", 100, count2=[int])
def test_count3(self):
self.run_test("def count3(n):\n from itertools import count\n j = 1\n for i in count(n):\n if i == 10: return j\n else: j +=1", 1, count3=[int])
def test_next_enumerate(self):
self.run_test("def next_enumerate(n): x = enumerate(n) ; next(x) ; return map(None, x)", list(range(5)), next_enumerate=[List[int]])
def test_next_generator(self):
self.run_test("def next_generator(n): x = (i for i in xrange(n) for j in xrange(i)) ; next(x) ; return map(None, x)", 5, next_generator=[int])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_next_imap(self):
self.run_test("def next_imap(n): from itertools import imap ; x = imap(abs,n) ; next(x) ; return map(None, x)", range(-5,5), next_imap=[List[int]])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_next_imap_none(self):
self.run_test("def next_imap_none(n): from itertools import imap ; x = imap(None,n) ; next(x) ; return map(None, x)", range(-5,5), next_imap_none=[List[int]])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_next_ifilter(self):
self.run_test("def next_ifilter(n): from itertools import ifilter ; x = ifilter(abs,n) ; next(x) ; return map(None, x)", range(-5,5), next_ifilter=[List[int]])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_next_ifilter_none(self):
self.run_test("def next_ifilter_none(n): from itertools import ifilter ; x = ifilter(None,n) ; next(x) ; return map(None, x)", range(-5,5), next_ifilter_none=[List[int]])
def test_product_next(self):
self.run_test("def next_product(n): from itertools import product ; x = product(n,n) ; next(x) ; return map(None, x)", list(range(-5,5)), next_product=[List[int]])
def test_product_iter(self):
self.run_test("""
def product_iter(n):
from itertools import product
s = 0
for x in product(n,n):
for y in x:
s += y
return s""",
list(range(-5,5)), product_iter=[List[int]])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_next_izip(self):
self.run_test("def next_izip(n): from itertools import izip ; x = izip(n,n) ; next(x) ; return map(None, x)", range(-5,5), next_izip=[List[int]])
def test_next_islice(self):
self.run_test("def next_islice(n): from itertools import islice ; x = islice(n,8) ; next(x) ; return map(None, x)", list(range(-5,5)), next_islice=[List[int]])
def test_next_count(self):
self.run_test("def next_count(n): from itertools import count ; x = count(n) ; next(x) ; return next(x)", 5, next_count=[int])
def test_iter(self):
self.run_test("def iter_(n): r = iter(range(5,n)) ; next(r) ; return next(r)", 12, iter_=[int])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_ifilter_with_nested_lambdas(self):
code = '''
def ifilter_with_nested_lambdas(N):
perf = lambda n: n == sum(i for i in xrange(1, n) if n % i == 0)
return map(perf, xrange(20))'''
self.run_test(code, 10, ifilter_with_nested_lambdas=[int])
def test_combinations_on_generator(self):
self.run_test("def combinations_g(l0,a): from itertools import combinations; return sum(map(lambda (x,y) : x*y, combinations((y for x in l0 for y in xrange(x)),a)))", [0,1,2], 2, combinations_g=[List[int],int])
def test_next_combinations(self):
self.run_test("def next_combinations(n): from itertools import combinations ; x = combinations(n,2) ; next(x) ; return map(None, x)", list(range(5)), next_combinations=[List[int]])
def test_combinations(self):
self.run_test("def combinations_(l0,a): from itertools import combinations; return sum(map(lambda (x,y) : x*y, combinations(l0,a)))", [0,1,2,3,4,5], 2, combinations_=[List[int],int])
def test_permutations_on_generator(self):
self.run_test("def permutations_g(l0,a): from itertools import permutations; return sum(map(lambda (x,y) : x*y, permutations((y for x in l0 for y in xrange(x)),a)))", [0,1,2], 2, permutations_g=[List[int],int])
def test_next_permutations(self):
self.run_test("def next_permutations(n):"
" from itertools import permutations ;"
" x = permutations(n,2) ;"
" next(x) ;"
" return map(None, x)",
list(range(5)),
next_permutations=[List[int]])
def test_permutations(self):
'''Test permutation without second arg'''
self.run_test("def permutations_2_(l0): "
" from itertools import permutations;"
" return list(permutations(l0))",
[0, 1, 2, 3],
permutations_2_=[List[int]])
def test_permutations_with_prefix(self):
self.run_test("def permutations_(l0,a):"
" from itertools import permutations;"
" return list(permutations(l0,a))",
[0,1,2,3,4,5], 2,
permutations_=[List[int],int])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_imap_over_array(self):
self.run_test("def imap_over_array(l):"
" from itertools import imap ;"
" from numpy import arange ;"
" t = tuple(imap(lambda x: 1, (l,l))) ;"
" return arange(10).reshape((5,2))[t]",
3,
imap_over_array=[int])
@unittest.skipIf(sys.version_info.major == 3, "not supported in pythran3")
def test_imap_over_several_arrays(self):
self.run_test("def imap_over_several_arrays(l):"
" from itertools import imap ;"
" from numpy import arange ;"
" t = tuple(imap(lambda x,y: 1, (l,l), (l, l, l))) ;"
" return arange(10).reshape((5,2))[t]",
3,
imap_over_several_arrays=[int])
def test_itertools_repeat0(self):
code = 'def itertools_repeat0(n): import itertools; return list(itertools.repeat(n, n))'
self.run_test(code, 3, itertools_repeat0=[int])
def test_itertools_repeat1(self):
code = '''
def itertools_repeat1(n):
import itertools
s = []
i = 0
for l in itertools.repeat([n]):
s.append(l)
i += 1
if i < n:
break
return s'''
self.run_test(code, 3, itertools_repeat1=[int])
| 49.40493
| 278
| 0.606514
|
75f0b53d07346ca530abf1309953748a5dd36458
| 1,377
|
py
|
Python
|
lightbus/utilities/frozendict.py
|
gcollard/lightbus
|
d04deeda8ccef5a582b79255725ca2025a085c02
|
[
"Apache-2.0"
] | 178
|
2017-07-22T12:35:00.000Z
|
2022-03-28T07:53:13.000Z
|
lightbus/utilities/frozendict.py
|
adamcharnock/warren
|
5e7069da06cd37a8131e8c592ee957ccb73603d5
|
[
"Apache-2.0"
] | 26
|
2017-08-03T12:09:29.000Z
|
2021-10-19T16:47:18.000Z
|
lightbus/utilities/frozendict.py
|
adamcharnock/warren
|
5e7069da06cd37a8131e8c592ee957ccb73603d5
|
[
"Apache-2.0"
] | 19
|
2017-09-15T17:51:24.000Z
|
2022-02-28T13:00:16.000Z
|
""" An immutable dictionary
This has been vendored from [python-frozendict](https://github.com/slezica/python-frozendict)
and subsequently modified.
"""
import collections.abc
class frozendict(collections.abc.Mapping):
"""
An immutable wrapper around dictionaries that implements the complete :py:class:`collections.Mapping`
interface. It can be used as a drop-in replacement for dictionaries where immutability is desired.
"""
dict_cls = dict
def __init__(self, *args, **kwargs):
self._dict = self.dict_cls(*args, **kwargs)
self._hash = None
def __getitem__(self, key):
return self._dict[key]
def __contains__(self, key):
return key in self._dict
def copy(self, **add_or_replace):
return self.__class__(self, **add_or_replace)
def __iter__(self):
return iter(self._dict)
def __len__(self):
return len(self._dict)
def __repr__(self):
return "<%s %r>" % (self.__class__.__name__, self._dict)
def __hash__(self):
if self._hash is None:
h = 0
for key, value in self._dict.items():
h ^= hash((key, value))
self._hash = h
return self._hash
class FrozenOrderedDict(frozendict):
"""
A frozendict subclass that maintains key order
"""
dict_cls = collections.OrderedDict
| 25.5
| 105
| 0.644154
|
57a3bb280cf55551a02585208691b5c895c36573
| 3,241
|
py
|
Python
|
aws_xray_sdk/core/sampling/default_sampler.py
|
lukaasp/libs
|
2865fcfa6a13bae5ce16d2df4a119d96e7b4d514
|
[
"Unlicense"
] | null | null | null |
aws_xray_sdk/core/sampling/default_sampler.py
|
lukaasp/libs
|
2865fcfa6a13bae5ce16d2df4a119d96e7b4d514
|
[
"Unlicense"
] | null | null | null |
aws_xray_sdk/core/sampling/default_sampler.py
|
lukaasp/libs
|
2865fcfa6a13bae5ce16d2df4a119d96e7b4d514
|
[
"Unlicense"
] | null | null | null |
import os
import json
from random import Random
from .sampling_rule import SamplingRule
from ..exceptions.exceptions import InvalidSamplingManifestError
__location__ = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
with open(os.path.join(__location__, 'default_sampling_rule.json')) as f:
default_sampling_rule = json.load(f)
class DefaultSampler(object):
"""
The default sampler that holds either custom sampling rules
or default sampling rules. Every time before the X-Ray recorder
generates a segment, it calculates if this segment is sampled
or not.
"""
def __init__(self, rules=default_sampling_rule):
"""
:param dict rules: a dict that defines custom sampling rules.
An example configuration:
{
"version": 1,
"rules": [
{
"description": "Player moves.",
"service_name": "*",
"http_method": "*",
"url_path": "/api/move/*",
"fixed_target": 0,
"rate": 0.05
}
],
"default": {
"fixed_target": 1,
"rate": 0.1
}
}
This example defines one custom rule and a default rule.
The custom rule applies a five-percent sampling rate with no minimum
number of requests to trace for paths under /api/move/. The default
rule traces the first request each second and 10 percent of additional requests.
The SDK applies custom rules in the order in which they are defined.
If a request matches multiple custom rules, the SDK applies only the first rule.
"""
version = rules.get('version', None)
if version != 1:
raise InvalidSamplingManifestError('Manifest version: %s is not supported.', version)
if 'default' not in rules:
raise InvalidSamplingManifestError('A default rule must be provided.')
self._default_rule = SamplingRule(rule_dict=rules['default'],
default=True)
self._rules = []
if 'rules' in rules:
for rule in rules['rules']:
self._rules.append(SamplingRule(rule))
self._random = Random()
def should_trace(self, service_name=None, method=None, path=None):
"""
Return True if the sampler decide to sample based on input
information and sampling rules. It will first check if any
custom rule should be applied, if not it falls back to the
default sampling rule.
All optional arugments are extracted from incoming requests by
X-Ray middleware to perform path based sampling.
"""
if service_name and method and path:
for rule in self._rules:
if rule.applies(service_name, method, path):
return self._should_trace(rule)
return self._should_trace(self._default_rule)
def _should_trace(self, sampling_rule):
if sampling_rule.reservoir.take():
return True
else:
return self._random.random() < sampling_rule.rate
| 36.011111
| 97
| 0.603209
|
db4d2c169c687d1a419a9cb9f6a2d305ee66b986
| 2,980
|
py
|
Python
|
esmvalcore/preprocessor/_derive/__init__.py
|
ssmithClimate/ESMValCore
|
36b625400768493e593d38298e825c84625e1a59
|
[
"Apache-2.0"
] | null | null | null |
esmvalcore/preprocessor/_derive/__init__.py
|
ssmithClimate/ESMValCore
|
36b625400768493e593d38298e825c84625e1a59
|
[
"Apache-2.0"
] | null | null | null |
esmvalcore/preprocessor/_derive/__init__.py
|
ssmithClimate/ESMValCore
|
36b625400768493e593d38298e825c84625e1a59
|
[
"Apache-2.0"
] | null | null | null |
"""Automatically derive variables."""
import importlib
import logging
from copy import deepcopy
from pathlib import Path
import iris
logger = logging.getLogger(__name__)
def _get_all_derived_variables():
"""Get all possible derived variables.
Returns
-------
dict
All derived variables with `short_name` (keys) and the associated
python classes (values).
"""
derivers = {}
for path in Path(__file__).parent.glob('[a-z]*.py'):
short_name = path.stem
module = importlib.import_module(
f'esmvalcore.preprocessor._derive.{short_name}')
derivers[short_name] = getattr(module, 'DerivedVariable')
return derivers
ALL_DERIVED_VARIABLES = _get_all_derived_variables()
__all__ = list(ALL_DERIVED_VARIABLES)
def get_required(short_name, project):
"""Return all required variables for derivation.
Get all information (at least `short_name`) required for derivation.
Parameters
----------
short_name : str
`short_name` of the variable to derive.
project : str
`project` of the variable to derive.
Returns
-------
list
List of dictionaries (including at least the key `short_name`).
"""
if short_name not in ALL_DERIVED_VARIABLES:
raise NotImplementedError(
f"Cannot derive variable '{short_name}', no derivation script "
f"available")
DerivedVariable = ALL_DERIVED_VARIABLES[short_name] # noqa: N806
variables = deepcopy(DerivedVariable().required(project))
return variables
def derive(cubes, short_name, long_name, units, standard_name=None):
"""Derive variable.
Parameters
----------
cubes: iris.cube.CubeList
Includes all the needed variables for derivation defined in
:func:`get_required`.
short_name: str
short_name
long_name: str
long_name
units: str
units
standard_name: str, optional
standard_name
Returns
-------
iris.cube.Cube
The new derived variable.
"""
if short_name == cubes[0].var_name:
return cubes[0]
cubes = iris.cube.CubeList(cubes)
# Derive variable
DerivedVariable = ALL_DERIVED_VARIABLES[short_name.lower()] # noqa: N806
try:
cube = DerivedVariable().calculate(cubes)
except Exception as exc:
msg = (f"Derivation of variable '{short_name}' failed. If you used "
f"the option '--skip-nonexistent' for running your recipe, "
f"this might be caused by missing input data for derivation")
raise ValueError(msg) from exc
# Set standard attributes
cube.var_name = short_name
cube.standard_name = standard_name if standard_name else None
cube.long_name = long_name
cube.units = units
for temp in cubes:
if 'source_file' in temp.attributes:
cube.attributes['source_file'] = temp.attributes['source_file']
return cube
| 26.607143
| 77
| 0.658389
|
18a5be70d3e273f4b8bd774b8f292e3b67df41d0
| 7,536
|
py
|
Python
|
Prototype/python/biopredyn/algorithm.py
|
Cosmo-Tech/biopredyn
|
0f5bcd4cb1f723bfdea07d4973e46e676f4175e8
|
[
"BSD-3-Clause"
] | null | null | null |
Prototype/python/biopredyn/algorithm.py
|
Cosmo-Tech/biopredyn
|
0f5bcd4cb1f723bfdea07d4973e46e676f4175e8
|
[
"BSD-3-Clause"
] | 95
|
2015-03-06T12:14:06.000Z
|
2015-03-20T11:15:54.000Z
|
Prototype/python/biopredyn/algorithm.py
|
Cosmo-Tech/biopredyn
|
0f5bcd4cb1f723bfdea07d4973e46e676f4175e8
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# coding=utf-8
## @package biopredyn
## Copyright: [2012-2019] Cosmo Tech, All Rights Reserved
## License: BSD 3-Clause
import sys
import libsedml
## Representation of an algorithm in SED-ML workflows; an algorithm is defined
## using a KiSAO id along with several optional algorithm parameters.
class Algorithm:
## @var id
# A unique identifier for this object.
## @var kisao_id
# A KiSAO identifier (syntax KISAO:0000XYZ) for the algorithm encoded by this.
## @var name
# Name of this object.
## @var parameters
# A list of AlgorithmParameter objects.
## Constructor; either 'algo' or 'idf' and 'kid' must be passed as keyword
## argument(s).
# @param self The object pointer.
# @param algo A libsedml.SedAlgorithm element; optional (default: None).
# @param idf A unique identifier; optional (default: None).
# @param name A name for 'self'; optional (default: None).
# @param kid A valid KiSAO identifier; optional (default: None).
def __init__(self, algo=None, idf=None, name=None, kid=None):
if algo is None and (idf is None or kid is None):
raise RuntimeError("Either 'algo' or 'idf' and 'kid' must be " +
"passed as keyword argument(s).")
else:
self.parameters = []
if algo is not None:
self.id = algo.getId()
self.name = algo.getName()
self.kisao_id = algo.getKisaoID()
for p in algo.getListOfAlgorithmParameters():
self.add_parameter(AlgorithmParameter(parameter=p))
else:
self.id = idf
self.name = name
self.kisao_id = kid
## Appends the input biopredyn.parameter.AlgorithmParameter object to
## self.parameters.
# @param self The object pointer.
# @param par A biopredyn.parameter.AlgorithmParameter object.
def add_parameter(self, par):
self.parameters.append(par)
## Getter. Returns self.id.
# @param self The object pointer.
# @return self.id
def get_id(self):
return self.id
## Getter. Returns self.kisao_id.
# @param self The object pointer.
# @return self.kisao_id
def get_kisao_id(self):
return self.kisao_id
## Getter. Returns self.name.
# @param self The object pointer.
# @return self.name
def get_name(self):
return self.name
## Getter. Returns the first AlgorithmParameter object with the input id in
## self.parameters.
# @param self The object pointer.
# @param id ID of the object to be returned in self.parameters.
# @return An AlgorithmParameter object.
def get_parameter_by_id(self, id):
res = None
for p in self.parameters:
if (p.get_id() == id):
res = p
return res
## Getter. Returns the first AlgorithmParameter object with the input name in
## self.parameters.
# @param self The object pointer.
# @param name Name of the object to be returned in self.parameters.
# @return An AlgorithmParameter object.
def get_parameter_by_name(self, name):
res = None
for p in self.parameters:
if (p.get_name() == name):
res = p
return res
## Getter. Returns self.parameters.
# @param self The object pointer.
# @return self.parameters
def get_parameters(self):
return self.parameters
## Setter for self.id.
# @param self The object pointer.
# @param id New value for self.id
def set_id(self, id):
self.id = id
## Setter for self.kisao_id.
# @param self The object pointer.
# @param kisao_id New value for self.kisao_id
def set_kisao_id(self, kisao_id):
self.kisao_id = kisao_id
## Setter for self.name.
# @param self The object pointer.
# @param name New value for self.name
def set_name(self, name):
self.name = name
## Returns the libsedml.SedAlgorithm representation of this.
# @param self The object pointer.
# @param level Level of SED-ML language to be used.
# @param version Version of SED-ML language to be used.
# @return A libsedml.SedAlgorithm object.
def to_sedml(self, level, version):
alg = libsedml.SedAlgorithm(level, version)
if self.get_name() is not None:
alg.setName(str(self.get_name()))
alg.setKisaoID(self.get_kisao_id())
for p in self.get_parameters():
alg.addAlgorithmParameter(p.to_sedml(level, version))
return alg
## Representation of an algorithm parameter in SED-ML workflows; an algorithm
## parameter is defined using a KiSAO id, and has a value.
class AlgorithmParameter:
## @var id
# A unique identifier for this object.
## @var kisao_id
# A KiSAO identifier (syntax KISAO:0000XYZ) for the parameter encoded by this.
## @var name
# Name of this object.
## @var value
# A string value for this parameter.
## Constructor; either 'parameter' or 'idf', 'kid' and 'value' must be passed
## as keyword argument(s).
# @param self the object pointer.
# @param parameter A libsedml.SedAlgorithmParameter object; optional (default:
# None).
# @param idf A unique identifier; optional (default: None).
# @param name A name for 'self'; optional (default: None).
# @param kid A valid KiSAO identifier; optional (default: None).
# @param value A string value for this parameter; optional (default: None).
def __init__(self, parameter=None, idf=None, name=None, kid=None, value=None):
if parameter is None and (idf is None or kid is None or value is None):
raise RuntimeError("Either 'parameter' or 'idf', 'kid' and " +
"'value' must be passed as keyword argument(s).")
else:
if parameter is not None:
self.id = parameter.getId()
self.name = parameter.getName()
self.kisao_id = parameter.getKisaoID()
self.value = parameter.getValue()
else:
self.id = idf
self.name = name
self.kisao_id = kid
self.value = value
## Getter. Returns self.id.
# @param self The object pointer.
# @return self.id
def get_id(self):
return self.id
## Getter. Returns self.kisao_id.
# @param self The object pointer.
# @return self.kisao_id
def get_kisao_id(self):
return self.kisao_id
## Getter. Returns self.name.
# @param self The object pointer.
# @return self.name
def get_name(self):
return self.name
## Getter. Returns self.value.
# @param self The object pointer.
# @return self.value
def get_value(self):
return self.value
## Setter for self.id.
# @param self The object pointer.
# @param id New value for self.id
def set_id(self, id):
self.id = id
## Setter for self.kisao_id.
# @param self The object pointer.
# @param kisao_id New value for self.kisao_id
def set_kisao_id(self, kisao_id):
self.kisao_id = kisao_id
## Setter for self.name.
# @param self The object pointer.
# @param name New value for self.name
def set_name(self, name):
self.name = name
## Setter for self.value.
# @param self The object pointer.
# @param value New value for self.value
def set_value(self, value):
self.value = value
## Returns the libsedml.SedAlgorithmParameter representation of this.
# @param self The object pointer.
# @param level Level of SED-ML language to be used.
# @param version Version of SED-ML language to be used.
# @return A libsedml.SedAlgorithmParameter object.
def to_sedml(self, level, version):
par = libsedml.SedAlgorithmParameter(level, version)
par.setId(self.get_id())
if self.get_name() is not None:
par.setName(str(self.get_name()))
par.setKisaoID(self.get_kisao_id())
par.setValue(str(self.get_value()))
return par
| 32.482759
| 80
| 0.680732
|
eb5dbb8de2b39e12506e8dc4fc72853ba7053c60
| 35,690
|
py
|
Python
|
python/pyspark/cloudpickle.py
|
kalpit/spark
|
b2ebf429e24566c29850c570f8d76943151ad78c
|
[
"Apache-2.0"
] | 24
|
2015-01-14T10:55:42.000Z
|
2020-01-14T20:42:45.000Z
|
python/pyspark/cloudpickle.py
|
kalpit/spark
|
b2ebf429e24566c29850c570f8d76943151ad78c
|
[
"Apache-2.0"
] | 8
|
2021-04-14T14:44:23.000Z
|
2021-04-18T02:44:19.000Z
|
python/pyspark/cloudpickle.py
|
kalpit/spark
|
b2ebf429e24566c29850c570f8d76943151ad78c
|
[
"Apache-2.0"
] | 22
|
2015-01-05T04:51:57.000Z
|
2020-06-08T12:01:25.000Z
|
"""
This class is defined to override standard pickle functionality
The goals of it follow:
-Serialize lambdas and nested functions to compiled byte code
-Deal with main module correctly
-Deal with other non-serializable objects
It does not include an unpickler, as standard python unpickling suffices.
This module was extracted from the `cloud` package, developed by `PiCloud, Inc.
<http://www.picloud.com>`_.
Copyright (c) 2012, Regents of the University of California.
Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the University of California, Berkeley nor the
names of its contributors may be used to endorse or promote
products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import operator
import os
import pickle
import struct
import sys
import types
from functools import partial
import itertools
from copy_reg import _extension_registry, _inverted_registry, _extension_cache
import new
import dis
import traceback
#relevant opcodes
STORE_GLOBAL = chr(dis.opname.index('STORE_GLOBAL'))
DELETE_GLOBAL = chr(dis.opname.index('DELETE_GLOBAL'))
LOAD_GLOBAL = chr(dis.opname.index('LOAD_GLOBAL'))
GLOBAL_OPS = [STORE_GLOBAL, DELETE_GLOBAL, LOAD_GLOBAL]
HAVE_ARGUMENT = chr(dis.HAVE_ARGUMENT)
EXTENDED_ARG = chr(dis.EXTENDED_ARG)
import logging
cloudLog = logging.getLogger("Cloud.Transport")
try:
import ctypes
except (MemoryError, ImportError):
logging.warning('Exception raised on importing ctypes. Likely python bug.. some functionality will be disabled', exc_info = True)
ctypes = None
PyObject_HEAD = None
else:
# for reading internal structures
PyObject_HEAD = [
('ob_refcnt', ctypes.c_size_t),
('ob_type', ctypes.c_void_p),
]
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
# These helper functions were copied from PiCloud's util module.
def islambda(func):
return getattr(func,'func_name') == '<lambda>'
def xrange_params(xrangeobj):
"""Returns a 3 element tuple describing the xrange start, step, and len
respectively
Note: Only guarentees that elements of xrange are the same. parameters may
be different.
e.g. xrange(1,1) is interpretted as xrange(0,0); both behave the same
though w/ iteration
"""
xrange_len = len(xrangeobj)
if not xrange_len: #empty
return (0,1,0)
start = xrangeobj[0]
if xrange_len == 1: #one element
return start, 1, 1
return (start, xrangeobj[1] - xrangeobj[0], xrange_len)
#debug variables intended for developer use:
printSerialization = False
printMemoization = False
useForcedImports = True #Should I use forced imports for tracking?
class CloudPickler(pickle.Pickler):
dispatch = pickle.Pickler.dispatch.copy()
savedForceImports = False
savedDjangoEnv = False #hack tro transport django environment
def __init__(self, file, protocol=None, min_size_to_save= 0):
pickle.Pickler.__init__(self,file,protocol)
self.modules = set() #set of modules needed to depickle
self.globals_ref = {} # map ids to dictionary. used to ensure that functions can share global env
def dump(self, obj):
# note: not thread safe
# minimal side-effects, so not fixing
recurse_limit = 3000
base_recurse = sys.getrecursionlimit()
if base_recurse < recurse_limit:
sys.setrecursionlimit(recurse_limit)
self.inject_addons()
try:
return pickle.Pickler.dump(self, obj)
except RuntimeError, e:
if 'recursion' in e.args[0]:
msg = """Could not pickle object as excessively deep recursion required.
Try _fast_serialization=2 or contact PiCloud support"""
raise pickle.PicklingError(msg)
finally:
new_recurse = sys.getrecursionlimit()
if new_recurse == recurse_limit:
sys.setrecursionlimit(base_recurse)
def save_buffer(self, obj):
"""Fallback to save_string"""
pickle.Pickler.save_string(self,str(obj))
dispatch[buffer] = save_buffer
#block broken objects
def save_unsupported(self, obj, pack=None):
raise pickle.PicklingError("Cannot pickle objects of type %s" % type(obj))
dispatch[types.GeneratorType] = save_unsupported
#python2.6+ supports slice pickling. some py2.5 extensions might as well. We just test it
try:
slice(0,1).__reduce__()
except TypeError: #can't pickle -
dispatch[slice] = save_unsupported
#itertools objects do not pickle!
for v in itertools.__dict__.values():
if type(v) is type:
dispatch[v] = save_unsupported
def save_dict(self, obj):
"""hack fix
If the dict is a global, deal with it in a special way
"""
#print 'saving', obj
if obj is __builtins__:
self.save_reduce(_get_module_builtins, (), obj=obj)
else:
pickle.Pickler.save_dict(self, obj)
dispatch[pickle.DictionaryType] = save_dict
def save_module(self, obj, pack=struct.pack):
"""
Save a module as an import
"""
#print 'try save import', obj.__name__
self.modules.add(obj)
self.save_reduce(subimport,(obj.__name__,), obj=obj)
dispatch[types.ModuleType] = save_module #new type
def save_codeobject(self, obj, pack=struct.pack):
"""
Save a code object
"""
#print 'try to save codeobj: ', obj
args = (
obj.co_argcount, obj.co_nlocals, obj.co_stacksize, obj.co_flags, obj.co_code,
obj.co_consts, obj.co_names, obj.co_varnames, obj.co_filename, obj.co_name,
obj.co_firstlineno, obj.co_lnotab, obj.co_freevars, obj.co_cellvars
)
self.save_reduce(types.CodeType, args, obj=obj)
dispatch[types.CodeType] = save_codeobject #new type
def save_function(self, obj, name=None, pack=struct.pack):
""" Registered with the dispatch to handle all function types.
Determines what kind of function obj is (e.g. lambda, defined at
interactive prompt, etc) and handles the pickling appropriately.
"""
write = self.write
name = obj.__name__
modname = pickle.whichmodule(obj, name)
#print 'which gives %s %s %s' % (modname, obj, name)
try:
themodule = sys.modules[modname]
except KeyError: # eval'd items such as namedtuple give invalid items for their function __module__
modname = '__main__'
if modname == '__main__':
themodule = None
if themodule:
self.modules.add(themodule)
if not self.savedDjangoEnv:
#hack for django - if we detect the settings module, we transport it
django_settings = os.environ.get('DJANGO_SETTINGS_MODULE', '')
if django_settings:
django_mod = sys.modules.get(django_settings)
if django_mod:
cloudLog.debug('Transporting django settings %s during save of %s', django_mod, name)
self.savedDjangoEnv = True
self.modules.add(django_mod)
write(pickle.MARK)
self.save_reduce(django_settings_load, (django_mod.__name__,), obj=django_mod)
write(pickle.POP_MARK)
# if func is lambda, def'ed at prompt, is in main, or is nested, then
# we'll pickle the actual function object rather than simply saving a
# reference (as is done in default pickler), via save_function_tuple.
if islambda(obj) or obj.func_code.co_filename == '<stdin>' or themodule == None:
#Force server to import modules that have been imported in main
modList = None
if themodule == None and not self.savedForceImports:
mainmod = sys.modules['__main__']
if useForcedImports and hasattr(mainmod,'___pyc_forcedImports__'):
modList = list(mainmod.___pyc_forcedImports__)
self.savedForceImports = True
self.save_function_tuple(obj, modList)
return
else: # func is nested
klass = getattr(themodule, name, None)
if klass is None or klass is not obj:
self.save_function_tuple(obj, [themodule])
return
if obj.__dict__:
# essentially save_reduce, but workaround needed to avoid recursion
self.save(_restore_attr)
write(pickle.MARK + pickle.GLOBAL + modname + '\n' + name + '\n')
self.memoize(obj)
self.save(obj.__dict__)
write(pickle.TUPLE + pickle.REDUCE)
else:
write(pickle.GLOBAL + modname + '\n' + name + '\n')
self.memoize(obj)
dispatch[types.FunctionType] = save_function
def save_function_tuple(self, func, forced_imports):
""" Pickles an actual func object.
A func comprises: code, globals, defaults, closure, and dict. We
extract and save these, injecting reducing functions at certain points
to recreate the func object. Keep in mind that some of these pieces
can contain a ref to the func itself. Thus, a naive save on these
pieces could trigger an infinite loop of save's. To get around that,
we first create a skeleton func object using just the code (this is
safe, since this won't contain a ref to the func), and memoize it as
soon as it's created. The other stuff can then be filled in later.
"""
save = self.save
write = self.write
# save the modules (if any)
if forced_imports:
write(pickle.MARK)
save(_modules_to_main)
#print 'forced imports are', forced_imports
forced_names = map(lambda m: m.__name__, forced_imports)
save((forced_names,))
#save((forced_imports,))
write(pickle.REDUCE)
write(pickle.POP_MARK)
code, f_globals, defaults, closure, dct, base_globals = self.extract_func_data(func)
save(_fill_function) # skeleton function updater
write(pickle.MARK) # beginning of tuple that _fill_function expects
# create a skeleton function object and memoize it
save(_make_skel_func)
save((code, len(closure), base_globals))
write(pickle.REDUCE)
self.memoize(func)
# save the rest of the func data needed by _fill_function
save(f_globals)
save(defaults)
save(closure)
save(dct)
write(pickle.TUPLE)
write(pickle.REDUCE) # applies _fill_function on the tuple
@staticmethod
def extract_code_globals(co):
"""
Find all globals names read or written to by codeblock co
"""
code = co.co_code
names = co.co_names
out_names = set()
n = len(code)
i = 0
extended_arg = 0
while i < n:
op = code[i]
i = i+1
if op >= HAVE_ARGUMENT:
oparg = ord(code[i]) + ord(code[i+1])*256 + extended_arg
extended_arg = 0
i = i+2
if op == EXTENDED_ARG:
extended_arg = oparg*65536L
if op in GLOBAL_OPS:
out_names.add(names[oparg])
#print 'extracted', out_names, ' from ', names
return out_names
def extract_func_data(self, func):
"""
Turn the function into a tuple of data necessary to recreate it:
code, globals, defaults, closure, dict
"""
code = func.func_code
# extract all global ref's
func_global_refs = CloudPickler.extract_code_globals(code)
if code.co_consts: # see if nested function have any global refs
for const in code.co_consts:
if type(const) is types.CodeType and const.co_names:
func_global_refs = func_global_refs.union( CloudPickler.extract_code_globals(const))
# process all variables referenced by global environment
f_globals = {}
for var in func_global_refs:
#Some names, such as class functions are not global - we don't need them
if func.func_globals.has_key(var):
f_globals[var] = func.func_globals[var]
# defaults requires no processing
defaults = func.func_defaults
def get_contents(cell):
try:
return cell.cell_contents
except ValueError, e: #cell is empty error on not yet assigned
raise pickle.PicklingError('Function to be pickled has free variables that are referenced before assignment in enclosing scope')
# process closure
if func.func_closure:
closure = map(get_contents, func.func_closure)
else:
closure = []
# save the dict
dct = func.func_dict
if printSerialization:
outvars = ['code: ' + str(code) ]
outvars.append('globals: ' + str(f_globals))
outvars.append('defaults: ' + str(defaults))
outvars.append('closure: ' + str(closure))
print 'function ', func, 'is extracted to: ', ', '.join(outvars)
base_globals = self.globals_ref.get(id(func.func_globals), {})
self.globals_ref[id(func.func_globals)] = base_globals
return (code, f_globals, defaults, closure, dct, base_globals)
def save_global(self, obj, name=None, pack=struct.pack):
write = self.write
memo = self.memo
if name is None:
name = obj.__name__
modname = getattr(obj, "__module__", None)
if modname is None:
modname = pickle.whichmodule(obj, name)
try:
__import__(modname)
themodule = sys.modules[modname]
except (ImportError, KeyError, AttributeError): #should never occur
raise pickle.PicklingError(
"Can't pickle %r: Module %s cannot be found" %
(obj, modname))
if modname == '__main__':
themodule = None
if themodule:
self.modules.add(themodule)
sendRef = True
typ = type(obj)
#print 'saving', obj, typ
try:
try: #Deal with case when getattribute fails with exceptions
klass = getattr(themodule, name)
except (AttributeError):
if modname == '__builtin__': #new.* are misrepeported
modname = 'new'
__import__(modname)
themodule = sys.modules[modname]
try:
klass = getattr(themodule, name)
except AttributeError, a:
#print themodule, name, obj, type(obj)
raise pickle.PicklingError("Can't pickle builtin %s" % obj)
else:
raise
except (ImportError, KeyError, AttributeError):
if typ == types.TypeType or typ == types.ClassType:
sendRef = False
else: #we can't deal with this
raise
else:
if klass is not obj and (typ == types.TypeType or typ == types.ClassType):
sendRef = False
if not sendRef:
#note: Third party types might crash this - add better checks!
d = dict(obj.__dict__) #copy dict proxy to a dict
if not isinstance(d.get('__dict__', None), property): # don't extract dict that are properties
d.pop('__dict__',None)
d.pop('__weakref__',None)
# hack as __new__ is stored differently in the __dict__
new_override = d.get('__new__', None)
if new_override:
d['__new__'] = obj.__new__
self.save_reduce(type(obj),(obj.__name__,obj.__bases__,
d),obj=obj)
#print 'internal reduce dask %s %s' % (obj, d)
return
if self.proto >= 2:
code = _extension_registry.get((modname, name))
if code:
assert code > 0
if code <= 0xff:
write(pickle.EXT1 + chr(code))
elif code <= 0xffff:
write("%c%c%c" % (pickle.EXT2, code&0xff, code>>8))
else:
write(pickle.EXT4 + pack("<i", code))
return
write(pickle.GLOBAL + modname + '\n' + name + '\n')
self.memoize(obj)
dispatch[types.ClassType] = save_global
dispatch[types.BuiltinFunctionType] = save_global
dispatch[types.TypeType] = save_global
def save_instancemethod(self, obj):
#Memoization rarely is ever useful due to python bounding
self.save_reduce(types.MethodType, (obj.im_func, obj.im_self,obj.im_class), obj=obj)
dispatch[types.MethodType] = save_instancemethod
def save_inst_logic(self, obj):
"""Inner logic to save instance. Based off pickle.save_inst
Supports __transient__"""
cls = obj.__class__
memo = self.memo
write = self.write
save = self.save
if hasattr(obj, '__getinitargs__'):
args = obj.__getinitargs__()
len(args) # XXX Assert it's a sequence
pickle._keep_alive(args, memo)
else:
args = ()
write(pickle.MARK)
if self.bin:
save(cls)
for arg in args:
save(arg)
write(pickle.OBJ)
else:
for arg in args:
save(arg)
write(pickle.INST + cls.__module__ + '\n' + cls.__name__ + '\n')
self.memoize(obj)
try:
getstate = obj.__getstate__
except AttributeError:
stuff = obj.__dict__
#remove items if transient
if hasattr(obj, '__transient__'):
transient = obj.__transient__
stuff = stuff.copy()
for k in list(stuff.keys()):
if k in transient:
del stuff[k]
else:
stuff = getstate()
pickle._keep_alive(stuff, memo)
save(stuff)
write(pickle.BUILD)
def save_inst(self, obj):
# Hack to detect PIL Image instances without importing Imaging
# PIL can be loaded with multiple names, so we don't check sys.modules for it
if hasattr(obj,'im') and hasattr(obj,'palette') and 'Image' in obj.__module__:
self.save_image(obj)
else:
self.save_inst_logic(obj)
dispatch[types.InstanceType] = save_inst
def save_property(self, obj):
# properties not correctly saved in python
self.save_reduce(property, (obj.fget, obj.fset, obj.fdel, obj.__doc__), obj=obj)
dispatch[property] = save_property
def save_itemgetter(self, obj):
"""itemgetter serializer (needed for namedtuple support)
a bit of a pain as we need to read ctypes internals"""
class ItemGetterType(ctypes.Structure):
_fields_ = PyObject_HEAD + [
('nitems', ctypes.c_size_t),
('item', ctypes.py_object)
]
itemgetter_obj = ctypes.cast(ctypes.c_void_p(id(obj)), ctypes.POINTER(ItemGetterType)).contents
return self.save_reduce(operator.itemgetter, (itemgetter_obj.item,))
if PyObject_HEAD:
dispatch[operator.itemgetter] = save_itemgetter
def save_reduce(self, func, args, state=None,
listitems=None, dictitems=None, obj=None):
"""Modified to support __transient__ on new objects
Change only affects protocol level 2 (which is always used by PiCloud"""
# Assert that args is a tuple or None
if not isinstance(args, types.TupleType):
raise pickle.PicklingError("args from reduce() should be a tuple")
# Assert that func is callable
if not hasattr(func, '__call__'):
raise pickle.PicklingError("func from reduce should be callable")
save = self.save
write = self.write
# Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
#Added fix to allow transient
cls = args[0]
if not hasattr(cls, "__new__"):
raise pickle.PicklingError(
"args[0] from __newobj__ args has no __new__")
if obj is not None and cls is not obj.__class__:
raise pickle.PicklingError(
"args[0] from __newobj__ args has the wrong class")
args = args[1:]
save(cls)
#Don't pickle transient entries
if hasattr(obj, '__transient__'):
transient = obj.__transient__
state = state.copy()
for k in list(state.keys()):
if k in transient:
del state[k]
save(args)
write(pickle.NEWOBJ)
else:
save(func)
save(args)
write(pickle.REDUCE)
if obj is not None:
self.memoize(obj)
# More new special cases (that work with older protocols as
# well): when __reduce__ returns a tuple with 4 or 5 items,
# the 4th and 5th item should be iterators that provide list
# items and dict items (as (key, value) tuples), or None.
if listitems is not None:
self._batch_appends(listitems)
if dictitems is not None:
self._batch_setitems(dictitems)
if state is not None:
#print 'obj %s has state %s' % (obj, state)
save(state)
write(pickle.BUILD)
def save_xrange(self, obj):
"""Save an xrange object in python 2.5
Python 2.6 supports this natively
"""
range_params = xrange_params(obj)
self.save_reduce(_build_xrange,range_params)
#python2.6+ supports xrange pickling. some py2.5 extensions might as well. We just test it
try:
xrange(0).__reduce__()
except TypeError: #can't pickle -- use PiCloud pickler
dispatch[xrange] = save_xrange
def save_partial(self, obj):
"""Partial objects do not serialize correctly in python2.x -- this fixes the bugs"""
self.save_reduce(_genpartial, (obj.func, obj.args, obj.keywords))
if sys.version_info < (2,7): #2.7 supports partial pickling
dispatch[partial] = save_partial
def save_file(self, obj):
"""Save a file"""
import StringIO as pystringIO #we can't use cStringIO as it lacks the name attribute
from ..transport.adapter import SerializingAdapter
if not hasattr(obj, 'name') or not hasattr(obj, 'mode'):
raise pickle.PicklingError("Cannot pickle files that do not map to an actual file")
if obj.name == '<stdout>':
return self.save_reduce(getattr, (sys,'stdout'), obj=obj)
if obj.name == '<stderr>':
return self.save_reduce(getattr, (sys,'stderr'), obj=obj)
if obj.name == '<stdin>':
raise pickle.PicklingError("Cannot pickle standard input")
if hasattr(obj, 'isatty') and obj.isatty():
raise pickle.PicklingError("Cannot pickle files that map to tty objects")
if 'r' not in obj.mode:
raise pickle.PicklingError("Cannot pickle files that are not opened for reading")
name = obj.name
try:
fsize = os.stat(name).st_size
except OSError:
raise pickle.PicklingError("Cannot pickle file %s as it cannot be stat" % name)
if obj.closed:
#create an empty closed string io
retval = pystringIO.StringIO("")
retval.close()
elif not fsize: #empty file
retval = pystringIO.StringIO("")
try:
tmpfile = file(name)
tst = tmpfile.read(1)
except IOError:
raise pickle.PicklingError("Cannot pickle file %s as it cannot be read" % name)
tmpfile.close()
if tst != '':
raise pickle.PicklingError("Cannot pickle file %s as it does not appear to map to a physical, real file" % name)
elif fsize > SerializingAdapter.max_transmit_data:
raise pickle.PicklingError("Cannot pickle file %s as it exceeds cloudconf.py's max_transmit_data of %d" %
(name,SerializingAdapter.max_transmit_data))
else:
try:
tmpfile = file(name)
contents = tmpfile.read(SerializingAdapter.max_transmit_data)
tmpfile.close()
except IOError:
raise pickle.PicklingError("Cannot pickle file %s as it cannot be read" % name)
retval = pystringIO.StringIO(contents)
curloc = obj.tell()
retval.seek(curloc)
retval.name = name
self.save(retval) #save stringIO
self.memoize(obj)
dispatch[file] = save_file
"""Special functions for Add-on libraries"""
def inject_numpy(self):
numpy = sys.modules.get('numpy')
if not numpy or not hasattr(numpy, 'ufunc'):
return
self.dispatch[numpy.ufunc] = self.__class__.save_ufunc
numpy_tst_mods = ['numpy', 'scipy.special']
def save_ufunc(self, obj):
"""Hack function for saving numpy ufunc objects"""
name = obj.__name__
for tst_mod_name in self.numpy_tst_mods:
tst_mod = sys.modules.get(tst_mod_name, None)
if tst_mod:
if name in tst_mod.__dict__:
self.save_reduce(_getobject, (tst_mod_name, name))
return
raise pickle.PicklingError('cannot save %s. Cannot resolve what module it is defined in' % str(obj))
def inject_timeseries(self):
"""Handle bugs with pickling scikits timeseries"""
tseries = sys.modules.get('scikits.timeseries.tseries')
if not tseries or not hasattr(tseries, 'Timeseries'):
return
self.dispatch[tseries.Timeseries] = self.__class__.save_timeseries
def save_timeseries(self, obj):
import scikits.timeseries.tseries as ts
func, reduce_args, state = obj.__reduce__()
if func != ts._tsreconstruct:
raise pickle.PicklingError('timeseries using unexpected reconstruction function %s' % str(func))
state = (1,
obj.shape,
obj.dtype,
obj.flags.fnc,
obj._data.tostring(),
ts.getmaskarray(obj).tostring(),
obj._fill_value,
obj._dates.shape,
obj._dates.__array__().tostring(),
obj._dates.dtype, #added -- preserve type
obj.freq,
obj._optinfo,
)
return self.save_reduce(_genTimeSeries, (reduce_args, state))
def inject_email(self):
"""Block email LazyImporters from being saved"""
email = sys.modules.get('email')
if not email:
return
self.dispatch[email.LazyImporter] = self.__class__.save_unsupported
def inject_addons(self):
"""Plug in system. Register additional pickling functions if modules already loaded"""
self.inject_numpy()
self.inject_timeseries()
self.inject_email()
"""Python Imaging Library"""
def save_image(self, obj):
if not obj.im and obj.fp and 'r' in obj.fp.mode and obj.fp.name \
and not obj.fp.closed and (not hasattr(obj, 'isatty') or not obj.isatty()):
#if image not loaded yet -- lazy load
self.save_reduce(_lazyloadImage,(obj.fp,), obj=obj)
else:
#image is loaded - just transmit it over
self.save_reduce(_generateImage, (obj.size, obj.mode, obj.tostring()), obj=obj)
"""
def memoize(self, obj):
pickle.Pickler.memoize(self, obj)
if printMemoization:
print 'memoizing ' + str(obj)
"""
# Shorthands for legacy support
def dump(obj, file, protocol=2):
CloudPickler(file, protocol).dump(obj)
def dumps(obj, protocol=2):
file = StringIO()
cp = CloudPickler(file,protocol)
cp.dump(obj)
#print 'cloud dumped', str(obj), str(cp.modules)
return file.getvalue()
#hack for __import__ not working as desired
def subimport(name):
__import__(name)
return sys.modules[name]
#hack to load django settings:
def django_settings_load(name):
modified_env = False
if 'DJANGO_SETTINGS_MODULE' not in os.environ:
os.environ['DJANGO_SETTINGS_MODULE'] = name # must set name first due to circular deps
modified_env = True
try:
module = subimport(name)
except Exception, i:
print >> sys.stderr, 'Cloud not import django settings %s:' % (name)
print_exec(sys.stderr)
if modified_env:
del os.environ['DJANGO_SETTINGS_MODULE']
else:
#add project directory to sys,path:
if hasattr(module,'__file__'):
dirname = os.path.split(module.__file__)[0] + '/'
sys.path.append(dirname)
# restores function attributes
def _restore_attr(obj, attr):
for key, val in attr.items():
setattr(obj, key, val)
return obj
def _get_module_builtins():
return pickle.__builtins__
def print_exec(stream):
ei = sys.exc_info()
traceback.print_exception(ei[0], ei[1], ei[2], None, stream)
def _modules_to_main(modList):
"""Force every module in modList to be placed into main"""
if not modList:
return
main = sys.modules['__main__']
for modname in modList:
if type(modname) is str:
try:
mod = __import__(modname)
except Exception, i: #catch all...
sys.stderr.write('warning: could not import %s\n. Your function may unexpectedly error due to this import failing; \
A version mismatch is likely. Specific error was:\n' % modname)
print_exec(sys.stderr)
else:
setattr(main,mod.__name__, mod)
else:
#REVERSE COMPATIBILITY FOR CLOUD CLIENT 1.5 (WITH EPD)
#In old version actual module was sent
setattr(main,modname.__name__, modname)
#object generators:
def _build_xrange(start, step, len):
"""Built xrange explicitly"""
return xrange(start, start + step*len, step)
def _genpartial(func, args, kwds):
if not args:
args = ()
if not kwds:
kwds = {}
return partial(func, *args, **kwds)
def _fill_function(func, globals, defaults, closure, dict):
""" Fills in the rest of function data into the skeleton function object
that were created via _make_skel_func().
"""
func.func_globals.update(globals)
func.func_defaults = defaults
func.func_dict = dict
if len(closure) != len(func.func_closure):
raise pickle.UnpicklingError("closure lengths don't match up")
for i in range(len(closure)):
_change_cell_value(func.func_closure[i], closure[i])
return func
def _make_skel_func(code, num_closures, base_globals = None):
""" Creates a skeleton function object that contains just the provided
code and the correct number of cells in func_closure. All other
func attributes (e.g. func_globals) are empty.
"""
#build closure (cells):
if not ctypes:
raise Exception('ctypes failed to import; cannot build function')
cellnew = ctypes.pythonapi.PyCell_New
cellnew.restype = ctypes.py_object
cellnew.argtypes = (ctypes.py_object,)
dummy_closure = tuple(map(lambda i: cellnew(None), range(num_closures)))
if base_globals is None:
base_globals = {}
base_globals['__builtins__'] = __builtins__
return types.FunctionType(code, base_globals,
None, None, dummy_closure)
# this piece of opaque code is needed below to modify 'cell' contents
cell_changer_code = new.code(
1, 1, 2, 0,
''.join([
chr(dis.opmap['LOAD_FAST']), '\x00\x00',
chr(dis.opmap['DUP_TOP']),
chr(dis.opmap['STORE_DEREF']), '\x00\x00',
chr(dis.opmap['RETURN_VALUE'])
]),
(), (), ('newval',), '<nowhere>', 'cell_changer', 1, '', ('c',), ()
)
def _change_cell_value(cell, newval):
""" Changes the contents of 'cell' object to newval """
return new.function(cell_changer_code, {}, None, (), (cell,))(newval)
"""Constructors for 3rd party libraries
Note: These can never be renamed due to client compatibility issues"""
def _getobject(modname, attribute):
mod = __import__(modname, fromlist=[attribute])
return mod.__dict__[attribute]
def _generateImage(size, mode, str_rep):
"""Generate image from string representation"""
import Image
i = Image.new(mode, size)
i.fromstring(str_rep)
return i
def _lazyloadImage(fp):
import Image
fp.seek(0) #works in almost any case
return Image.open(fp)
"""Timeseries"""
def _genTimeSeries(reduce_args, state):
import scikits.timeseries.tseries as ts
from numpy import ndarray
from numpy.ma import MaskedArray
time_series = ts._tsreconstruct(*reduce_args)
#from setstate modified
(ver, shp, typ, isf, raw, msk, flv, dsh, dtm, dtyp, frq, infodict) = state
#print 'regenerating %s' % dtyp
MaskedArray.__setstate__(time_series, (ver, shp, typ, isf, raw, msk, flv))
_dates = time_series._dates
#_dates.__setstate__((ver, dsh, typ, isf, dtm, frq)) #use remote typ
ndarray.__setstate__(_dates,(dsh,dtyp, isf, dtm))
_dates.freq = frq
_dates._cachedinfo.update(dict(full=None, hasdups=None, steps=None,
toobj=None, toord=None, tostr=None))
# Update the _optinfo dictionary
time_series._optinfo.update(infodict)
return time_series
| 36.605128
| 144
| 0.614934
|
0d382c25e9f1d99c7e735f65c9d5a0b9b34361fe
| 590
|
py
|
Python
|
grayskull/cli/parser.py
|
BastianZim/grayskull
|
96eb516d1d87217de01a13a4993d1d85e97e8151
|
[
"Apache-2.0"
] | 86
|
2020-10-14T10:42:12.000Z
|
2022-03-27T08:01:53.000Z
|
grayskull/cli/parser.py
|
BastianZim/grayskull
|
96eb516d1d87217de01a13a4993d1d85e97e8151
|
[
"Apache-2.0"
] | 118
|
2020-10-14T13:35:52.000Z
|
2022-03-28T07:40:16.000Z
|
grayskull/cli/parser.py
|
BastianZim/grayskull
|
96eb516d1d87217de01a13a4993d1d85e97e8151
|
[
"Apache-2.0"
] | 19
|
2020-10-15T10:02:36.000Z
|
2022-03-27T21:17:59.000Z
|
import re
from typing import Optional, Tuple
from grayskull.utils import origin_is_github
def parse_pkg_name_version(pkg_name: str) -> Tuple[str, Optional[str]]:
origin = ""
if origin_is_github(pkg_name):
origin, pkg_name = pkg_name.rsplit("/", 1)
origin += "/"
if pkg_name.endswith(".git"):
pkg_name = pkg_name[:-4]
pkg = re.match(r"([a-zA-Z0-9\-_\.]+)=+([a-zA-Z0-9\-_\.]+)", pkg_name)
if pkg:
pkg_name = origin + pkg.group(1)
version = pkg.group(2)
return pkg_name, version
return origin + pkg_name, None
| 29.5
| 73
| 0.608475
|
4af0bac5c5dc76498c6acd9693607ba1d0246fee
| 359
|
py
|
Python
|
services/connectors/fronius_solar_api_connector/source/connector/tests/test_main.py
|
fzi-forschungszentrum-informatik/BEMCom
|
0a0c359d889c6d5975e4d4d3b17c24adb5bf883b
|
[
"MIT"
] | 4
|
2021-09-10T09:46:18.000Z
|
2021-12-05T17:55:14.000Z
|
services/connectors/fronius_solar_api_connector/source/connector/tests/test_main.py
|
fzi-forschungszentrum-informatik/BEMCom
|
0a0c359d889c6d5975e4d4d3b17c24adb5bf883b
|
[
"MIT"
] | null | null | null |
services/connectors/fronius_solar_api_connector/source/connector/tests/test_main.py
|
fzi-forschungszentrum-informatik/BEMCom
|
0a0c359d889c6d5975e4d4d3b17c24adb5bf883b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Place tests for the connector specific methods here.
"""
import unittest
import pytest
from ..main import Connector
class TestReceiveRawMsg(unittest.TestCase):
def test_dumy(self):
pass
class TestParseRawMsg(unittest.TestCase):
pass
class TestSendCommand(unittest.TestCase):
pass
| 14.958333
| 52
| 0.715877
|
e7f3418fb110b65ed7149dbba541754e5441bf77
| 722
|
py
|
Python
|
python/lib/Lib/site-packages/django/conf/locale/is/formats.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 790
|
2015-01-03T02:13:39.000Z
|
2020-05-10T19:53:57.000Z
|
django/conf/locale/is/formats.py
|
mradziej/django
|
5d38965743a369981c9a738a298f467f854a2919
|
[
"BSD-3-Clause"
] | 1,361
|
2015-01-08T23:09:40.000Z
|
2020-04-14T00:03:04.000Z
|
django/conf/locale/is/formats.py
|
mradziej/django
|
5d38965743a369981c9a738a298f467f854a2919
|
[
"BSD-3-Clause"
] | 155
|
2015-01-08T22:59:31.000Z
|
2020-04-08T08:01:53.000Z
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'H:i:s'
# DATETIME_FORMAT =
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'j.n.Y'
# SHORT_DATETIME_FORMAT =
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
# NUMBER_GROUPING =
| 30.083333
| 77
| 0.742382
|
9d3682a49f2d80511e50c5f1d407a1da6fbd1b09
| 627
|
py
|
Python
|
influxdb_metrics/tasks.py
|
realdanurbano/django-influxdb-metrics
|
ec30609394b81b80ad95025dac84419a0110f8df
|
[
"MIT"
] | 54
|
2016-11-25T10:00:23.000Z
|
2022-03-17T09:27:49.000Z
|
influxdb_metrics/tasks.py
|
realdanurbano/django-influxdb-metrics
|
ec30609394b81b80ad95025dac84419a0110f8df
|
[
"MIT"
] | 27
|
2016-12-01T17:35:37.000Z
|
2021-03-30T16:37:49.000Z
|
influxdb_metrics/tasks.py
|
realdanurbano/django-influxdb-metrics
|
ec30609394b81b80ad95025dac84419a0110f8df
|
[
"MIT"
] | 23
|
2016-11-22T09:26:28.000Z
|
2022-03-14T11:34:33.000Z
|
"""Celery tasks for the influxdb_metrics app."""
from __future__ import absolute_import
try:
from celery import shared_task
except ImportError:
def shared_task(func):
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
from .utils import write_points as write_points_normal
@shared_task(ignore_result=True)
def write_points(data, name='influxdb_metrics.tasks.write_points'):
"""
Wrapper around `utils.write_points`.
If you use this, make sure to set `INFLUXDB_USE_THREADING = False`
"""
write_points_normal(data, force_disable_threading=True)
| 26.125
| 70
| 0.722488
|
8b3a7f77ca65767ce6ab8803e219796695a413ed
| 382
|
py
|
Python
|
view.py
|
Jerohlee/OOP-1-2
|
541bd7279b98013352e7b1c02ed0f86e7591669f
|
[
"Apache-2.0"
] | null | null | null |
view.py
|
Jerohlee/OOP-1-2
|
541bd7279b98013352e7b1c02ed0f86e7591669f
|
[
"Apache-2.0"
] | null | null | null |
view.py
|
Jerohlee/OOP-1-2
|
541bd7279b98013352e7b1c02ed0f86e7591669f
|
[
"Apache-2.0"
] | null | null | null |
import pyodbc
try:
connect = pyodbc.connect(r'Driver={Microsoft Access Driver (*.mdb, *.accdb)};DBQ=C:\Users\lee\Documents\Database01.accdb;')
print("Database is Connected")
database = connect.cursor()
database.execute('SELECT * FROM Table1')
for x in database.fetchall():
print(x)
except pyodbc.Error:
print("Error in Connection")
| 25.466667
| 128
| 0.65445
|
6e0009e2a01e21e02a39b922aab0a23389fddb3c
| 7,700
|
py
|
Python
|
test/terra/extensions/test_snapshot_expectation_value.py
|
jakelishman/qiskit-aer
|
7512ecede820e0d2bc7ad7b6704bcf06a861ca3a
|
[
"Apache-2.0"
] | null | null | null |
test/terra/extensions/test_snapshot_expectation_value.py
|
jakelishman/qiskit-aer
|
7512ecede820e0d2bc7ad7b6704bcf06a861ca3a
|
[
"Apache-2.0"
] | null | null | null |
test/terra/extensions/test_snapshot_expectation_value.py
|
jakelishman/qiskit-aer
|
7512ecede820e0d2bc7ad7b6704bcf06a861ca3a
|
[
"Apache-2.0"
] | null | null | null |
# This code is part of Qiskit.
#
# (C) Copyright IBM 2018, 2019.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
import unittest
import numpy
from qiskit import QuantumCircuit, assemble
from qiskit.extensions.exceptions import ExtensionError
from qiskit.providers.aer.extensions.snapshot_expectation_value import SnapshotExpectationValue
from qiskit.quantum_info.operators import Pauli, Operator
from ..common import QiskitAerTestCase
class TestSnapshotExpectationValueExtension(QiskitAerTestCase):
"""SnapshotExpectationValue extension tests"""
@staticmethod
def snapshot_circuit_instr(circ_qubits, label, op, qubits, single_shot=False, variance=False):
"""Return QobjInstruction for circuit monkey patch method."""
circuit = QuantumCircuit(circ_qubits)
circuit.snapshot_expectation_value(label, op, qubits,
single_shot=single_shot,
variance=variance)
qobj = assemble(circuit)
instr = qobj.experiments[0].instructions[0]
return instr
def test_snapshot_label_raises(self):
"""Test snapshot label must be str"""
self.assertRaises(ExtensionError, lambda: SnapshotExpectationValue(1, [[1, 'X']]))
def test_snapshot_name(self):
"""Test snapshot instruction has correct name"""
for op in [Pauli('X'), Operator([[0, 1], [1, 0]])]:
instrs = [
SnapshotExpectationValue('snap', op).assemble(),
self.snapshot_circuit_instr(1, 'snap', op, [0])
]
for instr in instrs:
self.assertTrue(hasattr(instr, 'name'))
self.assertEqual(instr.name, 'snapshot')
def test_snapshot_label(self):
"""Test snapshot instruction has correct label"""
for op in [Pauli('X'), Operator([[0, 1], [1, 0]])]:
for label in ['snap0', 'snap1']:
instrs = [
SnapshotExpectationValue(label, op).assemble(),
self.snapshot_circuit_instr(1, label, op, [0])
]
for instr in instrs:
self.assertTrue(hasattr(instr, 'label'))
self.assertEqual(instr.label, label)
def test_snapshot_pauli_type(self):
"""Test snapshot instruction has correct type."""
pauli_ops = [
[[1, 'I'], [0.5, 'X'], [0.25, 'Y'], [-3, 'Z']],
[[1j, 'I'], [0.5j, 'X'], [0.25j, 'Y'], [-3j, 'Z']],
[[0.5j, Pauli('X')], [-0.5j, Pauli('Z')]]
]
for op in pauli_ops:
# standard
instrs = [
SnapshotExpectationValue('snap', op,
single_shot=False,
variance=False).assemble(),
self.snapshot_circuit_instr(1, 'snap', op, [0],
single_shot=False,
variance=False)
]
for instr in instrs:
self.assertTrue(hasattr(instr, 'snapshot_type'))
self.assertEqual(instr.snapshot_type, 'expectation_value_pauli')
# Single shot
instrs = [
SnapshotExpectationValue('snap', op,
single_shot=True,
variance=False).assemble(),
self.snapshot_circuit_instr(1, 'snap', op, [0],
single_shot=True,
variance=False)
]
for instr in instrs:
self.assertTrue(hasattr(instr, 'snapshot_type'))
self.assertEqual(instr.snapshot_type, 'expectation_value_pauli_single_shot')
# Variance
with self.assertWarns(DeprecationWarning):
instrs = [
SnapshotExpectationValue('snap', op,
single_shot=False,
variance=True).assemble(),
self.snapshot_circuit_instr(1, 'snap', op, [0],
single_shot=False,
variance=True)
]
for instr in instrs:
self.assertTrue(hasattr(instr, 'snapshot_type'))
self.assertEqual(instr.snapshot_type, 'expectation_value_pauli_with_variance')
def test_snapshot_matrix_type(self):
"""Test snapshot instruction has correct type."""
matrix_ops = [
numpy.eye(2),
numpy.array([[0, 1j], [-1j, 0]]),
Operator(Pauli('Z'))
]
for op in matrix_ops:
# standard
instrs = [
SnapshotExpectationValue('snap', op,
single_shot=False,
variance=False).assemble(),
self.snapshot_circuit_instr(1, 'snap', op, [0],
single_shot=False,
variance=False)
]
for instr in instrs:
self.assertTrue(hasattr(instr, 'snapshot_type'))
self.assertEqual(instr.snapshot_type, 'expectation_value_matrix')
# Single shot
instrs = [
SnapshotExpectationValue('snap', op,
single_shot=True,
variance=False).assemble(),
self.snapshot_circuit_instr(1, 'snap', op, [0],
single_shot=True,
variance=False)
]
for instr in instrs:
self.assertTrue(hasattr(instr, 'snapshot_type'))
self.assertEqual(instr.snapshot_type, 'expectation_value_matrix_single_shot')
# Variance
with self.assertWarns(DeprecationWarning):
instrs = [
SnapshotExpectationValue('snap', op,
single_shot=False,
variance=True).assemble(),
self.snapshot_circuit_instr(1, 'snap', op, [0],
single_shot=False,
variance=True)
]
for instr in instrs:
self.assertTrue(hasattr(instr, 'snapshot_type'))
self.assertEqual(instr.snapshot_type, 'expectation_value_matrix_with_variance')
def test_snapshot_specific_qubits(self):
"""Test snapshot instruction has correct qubits."""
for qubits in [[0], [0, 2], [1, 3, 0]]:
pauli = Pauli(len(qubits) * 'X')
instrs = [
self.snapshot_circuit_instr(5, 'snap', pauli, qubits),
self.snapshot_circuit_instr(5, 'snap', Operator(pauli), qubits)
]
for instr in instrs:
self.assertTrue(hasattr(instr, 'qubits'))
self.assertEqual(instr.qubits, qubits)
if __name__ == '__main__':
unittest.main()
| 44.252874
| 98
| 0.512078
|
8ddaea067c2b82e63db129584819412e882af2c2
| 2,954
|
py
|
Python
|
tests/libs/embeds/test_display.py
|
izm51/obniz-python-sdk
|
40a738b5fe2c0a415cdc09f46d28c143982bfb07
|
[
"MIT"
] | 11
|
2019-03-22T12:02:11.000Z
|
2021-01-21T04:57:18.000Z
|
tests/libs/embeds/test_display.py
|
izm51/obniz-python-sdk
|
40a738b5fe2c0a415cdc09f46d28c143982bfb07
|
[
"MIT"
] | 5
|
2019-03-02T08:28:25.000Z
|
2021-02-02T22:06:37.000Z
|
tests/libs/embeds/test_display.py
|
izm51/obniz-python-sdk
|
40a738b5fe2c0a415cdc09f46d28c143982bfb07
|
[
"MIT"
] | 3
|
2019-07-20T06:55:09.000Z
|
2019-12-04T05:05:00.000Z
|
import pytest
from ...utils import assert_finished, assert_obniz, assert_send, receive_json
class TestDisplay:
def test_clear(self, obniz):
obniz.display.clear()
assert_obniz(obniz)
assert_send(obniz, [{
"display": {
"clear": True
}
}])
assert_finished(obniz)
@pytest.mark.parametrize('text', ["Hello World!"])
def test_print(self, obniz, text):
obniz.display.print(text)
assert_obniz(obniz)
assert_send(obniz, [{
"display": {
"text": text
}
}])
assert_finished(obniz)
@pytest.mark.parametrize('text', ["Hello World!"])
def test_qr_without_correction(self, obniz, text):
obniz.display.qr(text)
assert_obniz(obniz)
assert_send(obniz, [{
"display": {
"qr": {
"text": text,
"correction": "M"
}
}
}])
assert_finished(obniz)
@pytest.mark.parametrize(
'text,correction',
[
("Hello World!", "L"),
("Hello World!", "M"),
("Hello World!", "Q"),
("Hello World!", "H")
]
)
def test_qr(self, obniz, text, correction):
obniz.display.qr(text, correction)
assert_obniz(obniz)
assert_send(obniz, [{
"display": {
"qr": {
"text": text,
"correction": correction
}
}
}])
assert_finished(obniz)
@pytest.mark.parametrize('data', [[1] * 1024])
def test_raw(self, obniz, data):
obniz.display.raw(data)
assert_obniz(obniz)
assert_send(obniz, [{
"display": {
"raw": data
}
}])
assert_finished(obniz)
def test_set_pin_name(self, obniz):
obniz.display.set_pin_name(0, "io", "input")
assert_obniz(obniz)
assert_send(obniz, [{
"display": {
"pin_assign": {
"0": {
"module_name": "io",
"pin_name": "input"
}
}
}
}])
assert_finished(obniz)
def test_set_pin_names(self, obniz):
obniz.display.set_pin_names("io", {
0: "input",
1: "output"
})
assert_obniz(obniz)
assert_send(obniz, [{
"display": {
"pin_assign": {
"0": {
"module_name": 'io',
"pin_name": 'input',
},
"1": {
"module_name": 'io',
"pin_name": 'output',
},
},
}
}])
assert_finished(obniz)
| 27.100917
| 77
| 0.423155
|
98cebbfa6d729d6f2e6af6f65a7b69c75f2e5429
| 2,740
|
py
|
Python
|
examples/plot_offline_tracks.py
|
KM3NeT/km3io
|
4a4b1fbcb3eb3368e1b839f6f83c6335fbeae7f9
|
[
"MIT"
] | 2
|
2021-01-06T08:08:23.000Z
|
2022-02-28T09:12:25.000Z
|
examples/plot_offline_tracks.py
|
KM3NeT/km3io
|
4a4b1fbcb3eb3368e1b839f6f83c6335fbeae7f9
|
[
"MIT"
] | null | null | null |
examples/plot_offline_tracks.py
|
KM3NeT/km3io
|
4a4b1fbcb3eb3368e1b839f6f83c6335fbeae7f9
|
[
"MIT"
] | null | null | null |
"""
Reading Offline tracks
======================
The following example shows how to access tracks data in an offline ROOT file.
Note: the offline files used here were intentionaly reduced to 10 events.
"""
import km3io as ki
from km3net_testdata import data_path
#####################################################
# We open the file using the
f = ki.OfflineReader(data_path("offline/numucc.root"))
#####################################################
# To access offline tracks/mc_tracks data:
f.tracks
f.mc_tracks
#####################################################
# Note that no data is loaded in memory at this point, so printing
# tracks will only return how many sub-branches (corresponding to
# events) were found.
f.tracks
#####################################################
# same for mc hits
f.mc_tracks
#####################################################
# Accessing the tracks/mc_tracks keys
# -----------------------------------
# to explore the reconstructed tracks fields:
f.tracks.fields
#####################################################
# the same for MC tracks
f.mc_tracks.fields
#####################################################
# Accessing tracks data
# ---------------------
# each field will return a nested `awkward.Array` and load everything into
# memory, so be careful if you are working with larger files.
f.tracks.E
######################################################
# The z direction of all reconstructed tracks
f.tracks.dir_z
######################################################
# The likelihoods
f.tracks.lik
#####################################################
# To select just a single event or a subset of events, use the indices or slices.
# The following will access all tracks and their fields
# of the third event (0 is the first):
f[2].tracks
######################################################
# The z direction of all tracks in the third event:
f[2].tracks.dir_z
#####################################################
# while here, we select the first 3 events. Notice that all fields will return
# nested arrays, as we have seem above where all events were selected.
f[:3]
######################################################
# All tracks for the first three events
f[:3].tracks
######################################################
# The z directions of all tracks of the first three events
f[:3].tracks.dir_z
#####################################################
# or events from 3 and 5 (again, 0 indexing):
f[2:5]
######################################################
# the tracks of those events
f[2:5].tracks
######################################################
# and just the z directions of those
f[2:5].tracks.dir_z
| 24.909091
| 81
| 0.471898
|
f767e6b18a5cc361d2c359165e458599d83bc8e4
| 64
|
py
|
Python
|
ytmdl/__version__.py
|
jrejaud/ytmdl
|
39af7c6565795da2a8cc9ab969b38a3b384d4e58
|
[
"MIT"
] | null | null | null |
ytmdl/__version__.py
|
jrejaud/ytmdl
|
39af7c6565795da2a8cc9ab969b38a3b384d4e58
|
[
"MIT"
] | null | null | null |
ytmdl/__version__.py
|
jrejaud/ytmdl
|
39af7c6565795da2a8cc9ab969b38a3b384d4e58
|
[
"MIT"
] | null | null | null |
# Store the version of the package
__version__ = "2020.11.20-1"
| 21.333333
| 34
| 0.734375
|
0ab7e047fa7f3a6dcd867e5cbe10a32011765653
| 12,870
|
py
|
Python
|
klasses/models.py
|
mitodl/bootcamp-ecommerce
|
ba7d6aefe56c6481ae2a5afc84cdd644538b6d50
|
[
"BSD-3-Clause"
] | 2
|
2018-06-20T19:37:03.000Z
|
2021-01-06T09:51:40.000Z
|
klasses/models.py
|
mitodl/bootcamp-ecommerce
|
ba7d6aefe56c6481ae2a5afc84cdd644538b6d50
|
[
"BSD-3-Clause"
] | 1,226
|
2017-02-23T14:52:28.000Z
|
2022-03-29T13:19:54.000Z
|
klasses/models.py
|
mitodl/bootcamp-ecommerce
|
ba7d6aefe56c6481ae2a5afc84cdd644538b6d50
|
[
"BSD-3-Clause"
] | 3
|
2017-03-20T03:51:27.000Z
|
2021-03-19T15:54:31.000Z
|
"""Models for bootcamps"""
import datetime
import uuid
from functools import partial
import pytz
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from mitol.common.models import TimestampedModel
from mitol.common.utils import now_in_utc
from main.models import AuditModel, AuditableModel
from main.utils import format_month_day, serialize_model_object
from klasses.constants import (
ApplicationSource,
INTEGRATION_PREFIX_PRODUCT,
ENROLL_CHANGE_STATUS_CHOICES,
DATE_RANGE_MONTH_FMT,
)
class ActiveCertificates(models.Manager):
"""
Return the active certificates only
"""
def get_queryset(self):
"""
Returns:
QuerySet: queryset for un-revoked certificates
"""
return super().get_queryset().filter(is_revoked=False)
class Bootcamp(models.Model):
"""
A bootcamp
"""
title = models.TextField()
legacy = models.BooleanField(default=False)
def __str__(self):
return "Bootcamp {title}".format(title=self.title)
class BootcampRun(models.Model):
"""
A class within a bootcamp
"""
bootcamp = models.ForeignKey(Bootcamp, on_delete=models.CASCADE)
title = models.TextField(blank=True)
source = models.CharField(
null=True,
blank=True,
choices=[(source, source) for source in ApplicationSource.SOURCE_CHOICES],
default=None,
max_length=10,
)
run_key = models.IntegerField(unique=True, db_index=True)
start_date = models.DateTimeField(null=True)
end_date = models.DateTimeField(null=True)
bootcamp_run_id = models.CharField(
null=True, unique=True, blank=True, max_length=255
)
novoed_course_stub = models.CharField(null=True, blank=True, max_length=100)
allows_skipped_steps = models.BooleanField(default=False)
@property
def page(self):
"""Gets the associated BootcampRunPage"""
return getattr(self, "bootcamprunpage", None)
@property
def price(self):
"""
Get price, the sum of all installments
"""
return self.installment_set.aggregate(price=models.Sum("amount"))["price"]
@property
def formatted_date_range(self):
"""
Returns a formatted date range.
Example return values:
- Start/end in same month: "May 5 - 10, 2017"
- Start/end in different months: "May 5 - Sep 10, 2017"
- Start/end in different years: "May 5, 2017 - May 5, 2018"
- No end date: "May 5, 2017"
"""
_format_month_day = partial(format_month_day, month_fmt=DATE_RANGE_MONTH_FMT)
if self.start_date and self.end_date:
start_month_day = _format_month_day(self.start_date)
if self.start_date.year == self.end_date.year:
if self.start_date.month == self.end_date.month:
formatted_end_date = self.end_date.day
else:
formatted_end_date = _format_month_day(self.end_date)
return "{} - {}, {}".format(
start_month_day, formatted_end_date, self.end_date.year
)
else:
return "{}, {} - {}, {}".format(
start_month_day,
self.start_date.year,
_format_month_day(self.end_date),
self.end_date.year,
)
elif self.start_date:
return "{}, {}".format(
_format_month_day(self.start_date), self.start_date.year
)
else:
return ""
@property
def display_title(self):
"""
Returns a string that will be used to represent the bootcamp/run in the app
"""
title_parts = [self.bootcamp.title]
formatted_date_range = self.formatted_date_range
if formatted_date_range:
title_parts.append(formatted_date_range)
return ", ".join(title_parts)
@property
def payment_deadline(self):
"""
Get the overall payment deadline
"""
return self.installment_set.aggregate(payment_deadline=models.Max("deadline"))[
"payment_deadline"
]
@property
def next_installment(self):
"""
Get the next installment
"""
return self.installment_set.filter(
deadline__gte=datetime.datetime.now(tz=pytz.UTC)
).first()
@property
def next_payment_deadline_days(self):
"""
Returns the number of days until the next payment is due
"""
next_installment = self.next_installment
if next_installment is None:
return
due_in = next_installment.deadline - datetime.datetime.now(tz=pytz.UTC)
return due_in.days
@property
def total_due_by_next_deadline(self):
"""
Returns the total amount due by the next deadline
"""
next_installment = self.next_installment
if next_installment is None:
return self.price
return self.installment_set.filter(
deadline__lte=next_installment.deadline
).aggregate(price=models.Sum("amount"))["price"]
@property
def integration_id(self):
"""
Return an integration id to be used by Hubspot as the unique product id.
This is necessary because the integration id used to be based on Bootcamp.id,
and is now based on BootcampRun (formerly Klass). This requires that there be no
overlap in integration ids between new and old products.
Returns:
str: the integration id
"""
return f"{INTEGRATION_PREFIX_PRODUCT}{self.id}"
@property
def is_payable(self):
"""
Returns True if the start date is set and is in the future
Returns:
bool: True if the start date is set and is in the future
"""
# NOTE: We have an Installment model with a 'deadline' property. Those installments are meant to
# specify increments when a user should pay for the bootcamp run. Practically, those deadlines are just
# "suggestions". For now, we're making a conscious decision to prevent a user from making payments based on
# the bootcamp run start date rather than the last installment deadline date.
return self.start_date is not None and now_in_utc() < self.start_date
def personal_price(self, user):
"""
Returns the personal price (if any) or standard price for a bootcamp run
Args:
user(User): the user to get a price for
Returns:
Decimal: the price for the bootcamp run
"""
personal_price = self.personal_prices.filter(user=user).first()
if personal_price is not None:
return personal_price.price
return self.price
def __str__(self):
return self.display_title
@property
def is_not_beyond_enrollment(self):
"""
Checks if the course is not beyond its enrollment period
Returns:
boolean: True if enrollment period has begun but not ended
"""
now = now_in_utc()
return self.end_date is None or self.end_date > now
class Installment(models.Model):
"""
A payment installment
"""
bootcamp_run = models.ForeignKey(BootcampRun, on_delete=models.CASCADE)
deadline = models.DateTimeField(null=False)
amount = models.DecimalField(max_digits=20, decimal_places=2)
class Meta:
index_together = ["bootcamp_run", "deadline"]
unique_together = ("bootcamp_run", "deadline")
ordering = ["bootcamp_run", "deadline"]
def __str__(self):
return (
"Installment for '{bootcamp_run}'; ${amount}; deadline {deadline}".format(
bootcamp_run=self.bootcamp_run.title,
amount=self.amount,
deadline=self.deadline.strftime("%b %d %Y"),
)
)
class PersonalPrice(models.Model):
"""Personal price for a bootcamp run"""
bootcamp_run = models.ForeignKey(
BootcampRun, on_delete=models.CASCADE, related_name="personal_prices"
)
user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="run_prices")
price = models.DecimalField(max_digits=20, decimal_places=2)
application_stage = models.TextField(blank=True)
class Meta:
unique_together = ("bootcamp_run", "user")
def __str__(self):
return f"user='{self.user.email}', run='{self.bootcamp_run.title}', price={self.price}"
class BootcampRunEnrollment(TimestampedModel, AuditableModel):
"""An enrollment in a bootcamp run by a user"""
user = models.ForeignKey(
settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="enrollments"
)
bootcamp_run = models.ForeignKey(
BootcampRun, on_delete=models.CASCADE, related_name="enrollments"
)
change_status = models.CharField(
choices=ENROLL_CHANGE_STATUS_CHOICES, max_length=20, null=True, blank=True
)
novoed_sync_date = models.DateTimeField(null=True, blank=True)
active = models.BooleanField(
default=True,
help_text="Indicates whether or not this enrollment should be considered active",
)
user_certificate_is_blocked = models.BooleanField(
default=False,
help_text="Indicates whether or not this user enrollment will get certificate.",
)
class Meta:
unique_together = ("user", "bootcamp_run")
def __str__(self):
return f"Enrollment for {self.bootcamp_run}"
def to_dict(self):
return {
**serialize_model_object(self),
"username": self.user.username,
"full_name": self.user.profile.name.strip(),
"email": self.user.email,
}
@classmethod
def get_audit_class(cls):
return BootcampRunEnrollmentAudit
def deactivate_and_save(self, change_status, no_user=False):
"""Sets an enrollment to inactive, sets the status, and saves"""
self.active = False
self.change_status = change_status
return self.save_and_log(None if no_user else self.user)
def reactivate_and_save(self, no_user=False):
"""Sets an enrollment to be active again and saves"""
self.active = True
self.change_status = None
return self.save_and_log(None if no_user else self.user)
class BootcampRunEnrollmentAudit(AuditModel):
"""Audit table for BootcampRunEnrollmentAudit"""
enrollment = models.ForeignKey(
BootcampRunEnrollment, null=True, on_delete=models.PROTECT
)
@classmethod
def get_related_field_name(cls):
return "enrollment"
class BaseCertificate(models.Model):
"""
Common properties for certificate models
"""
user = models.ForeignKey(
settings.AUTH_USER_MODEL, null=False, on_delete=models.CASCADE
)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
is_revoked = models.BooleanField(
default=False,
help_text="Indicates whether or not the certificate is revoked",
verbose_name="revoked",
)
class Meta:
abstract = True
def revoke(self):
"""Revokes certificate"""
self.is_revoked = True
self.save()
return self
def unrevoke(self):
"""Unrevokes certificate"""
self.is_revoked = False
self.save()
return self
def get_certified_object_id(self):
"""Gets the id of the certificate's bootcamp program/run"""
raise NotImplementedError
class BootcampRunCertificate(TimestampedModel, BaseCertificate):
"""
Model for storing bootcamp run certificates
"""
bootcamp_run = models.ForeignKey(
BootcampRun, null=False, on_delete=models.CASCADE, related_name="certificates"
)
objects = ActiveCertificates()
all_objects = models.Manager()
class Meta:
unique_together = ("user", "bootcamp_run")
def get_certified_object_id(self):
return self.bootcamp_run_id
@property
def link(self):
"""
Get the link at which this certificate will be served
Format: /certificate/<uuid>/
Example: /certificate/93ebd74e-5f88-4b47-bb09-30a6d575328f/
"""
return "/certificate/{}/".format(str(self.uuid))
@property
def start_end_dates(self):
"""Returns the start and end date for bootcamp object duration"""
return self.bootcamp_run.start_date, self.bootcamp_run.end_date
def __str__(self):
return "BootcampRunCertificate for user={user}, run={bootcamp_run} ({uuid})".format(
user=self.user.username, bootcamp_run=self.bootcamp_run.id, uuid=self.uuid
)
| 31.699507
| 115
| 0.643901
|
d6c7d332acfccb65688d98e5779f06fa79e0a51d
| 2,621
|
py
|
Python
|
nipype/interfaces/camino/tests/test_auto_TrackBallStick.py
|
vferat/nipype
|
536c57da150d157dcb5c121af43aaeab71cdbd5f
|
[
"Apache-2.0"
] | null | null | null |
nipype/interfaces/camino/tests/test_auto_TrackBallStick.py
|
vferat/nipype
|
536c57da150d157dcb5c121af43aaeab71cdbd5f
|
[
"Apache-2.0"
] | 2
|
2018-04-17T19:18:16.000Z
|
2020-03-04T22:05:02.000Z
|
nipype/interfaces/camino/tests/test_auto_TrackBallStick.py
|
oesteban/nipype
|
c14f24eba1da08711bbb894e049ee858ed740096
|
[
"Apache-2.0"
] | null | null | null |
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from __future__ import unicode_literals
from ..dti import TrackBallStick
def test_TrackBallStick_inputs():
input_map = dict(
anisfile=dict(
argstr='-anisfile %s',
extensions=None,
),
anisthresh=dict(argstr='-anisthresh %f', ),
args=dict(argstr='%s', ),
curveinterval=dict(
argstr='-curveinterval %f',
requires=['curvethresh'],
),
curvethresh=dict(argstr='-curvethresh %f', ),
data_dims=dict(
argstr='-datadims %s',
units='voxels',
),
environ=dict(
nohash=True,
usedefault=True,
),
gzip=dict(argstr='-gzip', ),
in_file=dict(
argstr='-inputfile %s',
extensions=None,
position=1,
),
inputdatatype=dict(argstr='-inputdatatype %s', ),
inputmodel=dict(
argstr='-inputmodel %s',
usedefault=True,
),
interpolator=dict(argstr='-interpolator %s', ),
ipthresh=dict(argstr='-ipthresh %f', ),
maxcomponents=dict(
argstr='-maxcomponents %d',
units='NA',
),
numpds=dict(
argstr='-numpds %d',
units='NA',
),
out_file=dict(
argstr='-outputfile %s',
extensions=None,
genfile=True,
position=-1,
),
output_root=dict(
argstr='-outputroot %s',
extensions=None,
position=-1,
),
outputtracts=dict(argstr='-outputtracts %s', ),
seed_file=dict(
argstr='-seedfile %s',
extensions=None,
position=2,
),
stepsize=dict(
argstr='-stepsize %f',
requires=['tracker'],
),
tracker=dict(
argstr='-tracker %s',
usedefault=True,
),
voxel_dims=dict(
argstr='-voxeldims %s',
units='mm',
),
)
inputs = TrackBallStick.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_TrackBallStick_outputs():
output_map = dict(tracked=dict(extensions=None, ), )
outputs = TrackBallStick.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| 29.122222
| 67
| 0.516215
|
cf4928816bd77b5c178c618b452f4f9aadd75900
| 8,268
|
py
|
Python
|
tests/transitfeed/testfrequency.py
|
aalekhpatel07/transitfeed
|
490c2342c53885da23bdd18f428073e60f4d8728
|
[
"Apache-2.0"
] | null | null | null |
tests/transitfeed/testfrequency.py
|
aalekhpatel07/transitfeed
|
490c2342c53885da23bdd18f428073e60f4d8728
|
[
"Apache-2.0"
] | null | null | null |
tests/transitfeed/testfrequency.py
|
aalekhpatel07/transitfeed
|
490c2342c53885da23bdd18f428073e60f4d8728
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (C) 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Unit tests for the frequency module.
from tests import util
import transitfeed
class FrequencyValidationTestCase(util.ValidationTestCase):
def setUp(self):
util.ValidationTestCase.setUp(self)
self.schedule = self.SimpleSchedule()
trip = transitfeed.Trip()
trip.route_id = "054C"
trip.service_id = "WEEK"
trip.trip_id = "054C-00"
trip.trip_headsign = "via Polish Hill"
trip.direction_id = "0"
trip.block_id = None
trip.shape_id = None
self.schedule.AddTripObject(trip, self.problems, True)
self.trip = trip
def testNonOverlappingPeriods(self):
headway_period1 = transitfeed.Frequency(
{
"trip_id": "054C-00",
"start_time": "06:00:00",
"end_time": "12:00:00",
"headway_secs": 600,
}
)
headway_period2 = transitfeed.Frequency(
{
"trip_id": "054C-00",
"start_time": "01:00:00",
"end_time": "02:00:00",
"headway_secs": 1200,
}
)
headway_period3 = transitfeed.Frequency(
{
"trip_id": "054C-00",
"start_time": "04:00:00",
"end_time": "05:00:00",
"headway_secs": 1000,
}
)
headway_period4 = transitfeed.Frequency(
{
"trip_id": "054C-00",
"start_time": "12:00:00",
"end_time": "19:00:00",
"headway_secs": 700,
}
)
# expect no problems for non-overlapping periods
headway_period1.AddToSchedule(self.schedule, self.problems)
headway_period2.AddToSchedule(self.schedule, self.problems)
headway_period3.AddToSchedule(self.schedule, self.problems)
headway_period4.AddToSchedule(self.schedule, self.problems)
self.trip.Validate(self.problems)
self.accumulator.AssertNoMoreExceptions()
self.trip.ClearFrequencies()
def testOverlappingPeriods(self):
# overlapping headway periods
headway_period1 = transitfeed.Frequency(
{
"trip_id": "054C-00",
"start_time": "00:00:00",
"end_time": "12:00:00",
"headway_secs": 600,
}
)
headway_period2 = transitfeed.Frequency(
{
"trip_id": "054C-00",
"start_time": "06:00:00",
"end_time": "18:00:00",
"headway_secs": 1200,
}
)
headway_period1.AddToSchedule(self.schedule, self.problems)
headway_period2.AddToSchedule(self.schedule, self.problems)
self.ValidateAndExpectOtherProblem(self.trip)
self.trip.ClearFrequencies()
self.accumulator.AssertNoMoreExceptions()
def testPeriodWithInvalidTripId(self):
headway_period1 = transitfeed.Frequency(
{
"trip_id": "foo",
"start_time": "00:00:00",
"end_time": "12:00:00",
"headway_secs": 600,
}
)
headway_period1.AddToSchedule(self.schedule, self.problems)
e = self.accumulator.PopException("InvalidValue")
self.assertEqual("trip_id", e.column_name)
self.trip.ClearFrequencies()
def testExactTimesStringValueConversion(self):
# Test that no exact_times converts to 0
frequency = transitfeed.Frequency(
field_dict={
"trip_id": "AB1,10",
"start_time": "10:00:00",
"end_time": "23:01:00",
"headway_secs": "1800",
}
)
frequency.ValidateBeforeAdd(self.problems)
self.assertEqual(frequency.ExactTimes(), 0)
# Test that empty exact_times converts to 0
frequency = transitfeed.Frequency(
field_dict={
"trip_id": "AB1,10",
"start_time": "10:00:00",
"end_time": "23:01:00",
"headway_secs": "1800",
"exact_times": "",
}
)
frequency.ValidateBeforeAdd(self.problems)
self.assertEqual(frequency.ExactTimes(), 0)
# Test that exact_times "0" converts to 0
frequency = transitfeed.Frequency(
field_dict={
"trip_id": "AB1,10",
"start_time": "10:00:00",
"end_time": "23:01:00",
"headway_secs": "1800",
"exact_times": "0",
}
)
frequency.ValidateBeforeAdd(self.problems)
self.assertEqual(frequency.ExactTimes(), 0)
# Test that exact_times "1" converts to 1
frequency = transitfeed.Frequency(
field_dict={
"trip_id": "AB1,10",
"start_time": "10:00:00",
"end_time": "23:01:00",
"headway_secs": "1800",
"exact_times": "1",
}
)
frequency.ValidateBeforeAdd(self.problems)
self.assertEqual(frequency.ExactTimes(), 1)
self.accumulator.AssertNoMoreExceptions()
def testExactTimesAsIntValue(self):
# Test that exact_times None converts to 0
frequency = transitfeed.Frequency(
field_dict={
"trip_id": "AB1,10",
"start_time": "10:00:00",
"end_time": "23:01:00",
"headway_secs": "1800",
"exact_times": None,
}
)
frequency.ValidateBeforeAdd(self.problems)
self.assertEqual(frequency.ExactTimes(), 0)
# Test that exact_times 0 remains 0
frequency = transitfeed.Frequency(
field_dict={
"trip_id": "AB1,10",
"start_time": "10:00:00",
"end_time": "23:01:00",
"headway_secs": "1800",
"exact_times": 0,
}
)
frequency.ValidateBeforeAdd(self.problems)
self.assertEqual(frequency.ExactTimes(), 0)
# Test that exact_times 1 remains 1
frequency = transitfeed.Frequency(
field_dict={
"trip_id": "AB1,10",
"start_time": "10:00:00",
"end_time": "23:01:00",
"headway_secs": "1800",
"exact_times": 1,
}
)
frequency.ValidateBeforeAdd(self.problems)
self.assertEqual(frequency.ExactTimes(), 1)
self.accumulator.AssertNoMoreExceptions()
def testExactTimesInvalidValues(self):
# Test that exact_times 15 raises error
frequency = transitfeed.Frequency(
field_dict={
"trip_id": "AB1,10",
"start_time": "10:00:00",
"end_time": "23:01:00",
"headway_secs": "1800",
"exact_times": 15,
}
)
frequency.ValidateBeforeAdd(self.problems)
self.accumulator.PopInvalidValue("exact_times")
self.accumulator.AssertNoMoreExceptions()
# Test that exact_times "yes" raises error
frequency = transitfeed.Frequency(
field_dict={
"trip_id": "AB1,10",
"start_time": "10:00:00",
"end_time": "23:01:00",
"headway_secs": "1800",
"exact_times": "yes",
}
)
frequency.ValidateBeforeAdd(self.problems)
self.accumulator.PopInvalidValue("exact_times")
self.accumulator.AssertNoMoreExceptions()
| 35.333333
| 74
| 0.542332
|
685ae8716aa078d50a5901127973d6f3ad6bab6a
| 1,795
|
py
|
Python
|
rsna_heme/process.py
|
johncolby/rsna_heme
|
14e3ecafa0587ebdce2a04b239edecb32dbaa6d0
|
[
"MIT"
] | 1
|
2020-05-30T13:59:47.000Z
|
2020-05-30T13:59:47.000Z
|
rsna_heme/process.py
|
johncolby/rsna_heme
|
14e3ecafa0587ebdce2a04b239edecb32dbaa6d0
|
[
"MIT"
] | null | null | null |
rsna_heme/process.py
|
johncolby/rsna_heme
|
14e3ecafa0587ebdce2a04b239edecb32dbaa6d0
|
[
"MIT"
] | null | null | null |
import argparse
import logging
import mxnet as mx
import os
import pandas as pd
import pickle
import re
import requests
from radstudy import RadStudy
from . import dicom
from . import labels
from . import transforms
class HemeStudy(RadStudy):
def __init__(self, wl=[(40, 80), (80, 200), (40, 380)], **kwargs):
super().__init__(**kwargs)
self.app_name = 'heme'
self.wl = wl
self.channels = 'axial CT'
self.series_picks = pd.DataFrame({'class': ['axial CT'], 'prob': '', 'SeriesNumber': 2, 'series': ''})
def process(self, save=True):
self.series_picks.series = self.series_to_path(2)
dir_series = self.series_picks.series[0]
dcm_names = os.listdir(dir_series)
dcm_names = sorted(dcm_names, key = lambda x: int(re.sub(".*\.(.*)\.dcm", "\\1", x)))
probs_all = []
for dcm_name in dcm_names:
dcm_path = os.path.join(dir_series, dcm_name)
data = self._load_dcm(dcm_path)
data_str = pickle.dumps(data[0][0])
prob = requests.post(self.process_url, files = {'data': data_str}).json()
prob = mx.nd.array(prob).sigmoid().asnumpy()
probs_all.append(prob)
probs = pd.DataFrame(probs_all, columns=labels.heme_types)
if save is True:
probs.to_csv(os.path.join(self.dir_tmp, 'output', 'probs.csv'), index=False)
else:
return probs
def _load_dcm(self, dcm_path):
dcm = dicom.Dicom(dcm_path)
img = dcm.img_for_plot3(self.wl)
img, _ = transforms.common_transform(mx.nd.array(img), 0)
img = img.flip(axis=2)
data = mx.gluon.data.SimpleDataset([(img, 0)])
data = data.transform_first(transforms.train_transform)
return data
| 34.519231
| 110
| 0.612256
|
4f55ad28c493dc1409bfcbb245ab077e4495edb1
| 10,387
|
py
|
Python
|
pybind/slxos/v16r_1_00b/interface/ethernet/interface_eth_isis_conf/intf_isis/interface_isis/interface_auth_mode/interface_md5/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/interface/ethernet/interface_eth_isis_conf/intf_isis/interface_isis/interface_auth_mode/interface_md5/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/interface/ethernet/interface_eth_isis_conf/intf_isis/interface_isis/interface_auth_mode/interface_md5/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | 1
|
2021-11-05T22:15:42.000Z
|
2021-11-05T22:15:42.000Z
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
class interface_md5(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-interface - based on the path /interface/ethernet/interface-eth-isis-conf/intf-isis/interface-isis/interface-auth-mode/interface-md5. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__interface_auth_mode_md5_level1','__interface_auth_mode_md5_level2',)
_yang_name = 'interface-md5'
_rest_name = 'md5'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__interface_auth_mode_md5_level1 = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="interface-auth-mode-md5-level1", rest_name="level-1", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Authentication mode for Level-1 LSPs, CSNP, PSNP', u'cli-full-no': None, u'alt-name': u'level-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='empty', is_config=True)
self.__interface_auth_mode_md5_level2 = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="interface-auth-mode-md5-level2", rest_name="level-2", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Authentication mode for Level-2 LSPs, CSNP, PSNP', u'cli-full-no': None, u'alt-name': u'level-2'}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='empty', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'interface', u'ethernet', u'interface-eth-isis-conf', u'intf-isis', u'interface-isis', u'interface-auth-mode', u'interface-md5']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'interface', u'Ethernet', u'isis', u'auth-mode', u'md5']
def _get_interface_auth_mode_md5_level1(self):
"""
Getter method for interface_auth_mode_md5_level1, mapped from YANG variable /interface/ethernet/interface_eth_isis_conf/intf_isis/interface_isis/interface_auth_mode/interface_md5/interface_auth_mode_md5_level1 (empty)
"""
return self.__interface_auth_mode_md5_level1
def _set_interface_auth_mode_md5_level1(self, v, load=False):
"""
Setter method for interface_auth_mode_md5_level1, mapped from YANG variable /interface/ethernet/interface_eth_isis_conf/intf_isis/interface_isis/interface_auth_mode/interface_md5/interface_auth_mode_md5_level1 (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_interface_auth_mode_md5_level1 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interface_auth_mode_md5_level1() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="interface-auth-mode-md5-level1", rest_name="level-1", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Authentication mode for Level-1 LSPs, CSNP, PSNP', u'cli-full-no': None, u'alt-name': u'level-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """interface_auth_mode_md5_level1 must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="interface-auth-mode-md5-level1", rest_name="level-1", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Authentication mode for Level-1 LSPs, CSNP, PSNP', u'cli-full-no': None, u'alt-name': u'level-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='empty', is_config=True)""",
})
self.__interface_auth_mode_md5_level1 = t
if hasattr(self, '_set'):
self._set()
def _unset_interface_auth_mode_md5_level1(self):
self.__interface_auth_mode_md5_level1 = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="interface-auth-mode-md5-level1", rest_name="level-1", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Authentication mode for Level-1 LSPs, CSNP, PSNP', u'cli-full-no': None, u'alt-name': u'level-1'}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='empty', is_config=True)
def _get_interface_auth_mode_md5_level2(self):
"""
Getter method for interface_auth_mode_md5_level2, mapped from YANG variable /interface/ethernet/interface_eth_isis_conf/intf_isis/interface_isis/interface_auth_mode/interface_md5/interface_auth_mode_md5_level2 (empty)
"""
return self.__interface_auth_mode_md5_level2
def _set_interface_auth_mode_md5_level2(self, v, load=False):
"""
Setter method for interface_auth_mode_md5_level2, mapped from YANG variable /interface/ethernet/interface_eth_isis_conf/intf_isis/interface_isis/interface_auth_mode/interface_md5/interface_auth_mode_md5_level2 (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_interface_auth_mode_md5_level2 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interface_auth_mode_md5_level2() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="interface-auth-mode-md5-level2", rest_name="level-2", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Authentication mode for Level-2 LSPs, CSNP, PSNP', u'cli-full-no': None, u'alt-name': u'level-2'}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """interface_auth_mode_md5_level2 must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="interface-auth-mode-md5-level2", rest_name="level-2", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Authentication mode for Level-2 LSPs, CSNP, PSNP', u'cli-full-no': None, u'alt-name': u'level-2'}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='empty', is_config=True)""",
})
self.__interface_auth_mode_md5_level2 = t
if hasattr(self, '_set'):
self._set()
def _unset_interface_auth_mode_md5_level2(self):
self.__interface_auth_mode_md5_level2 = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="interface-auth-mode-md5-level2", rest_name="level-2", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Authentication mode for Level-2 LSPs, CSNP, PSNP', u'cli-full-no': None, u'alt-name': u'level-2'}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='empty', is_config=True)
interface_auth_mode_md5_level1 = __builtin__.property(_get_interface_auth_mode_md5_level1, _set_interface_auth_mode_md5_level1)
interface_auth_mode_md5_level2 = __builtin__.property(_get_interface_auth_mode_md5_level2, _set_interface_auth_mode_md5_level2)
_pyangbind_elements = {'interface_auth_mode_md5_level1': interface_auth_mode_md5_level1, 'interface_auth_mode_md5_level2': interface_auth_mode_md5_level2, }
| 65.740506
| 526
| 0.744392
|
42d767d06a80921088239dc5f77edd0244dfffc7
| 6,928
|
py
|
Python
|
azure-mgmt-compute/azure/mgmt/compute/v2018_06_01/models/os_profile_py3.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | 1
|
2018-07-23T08:59:24.000Z
|
2018-07-23T08:59:24.000Z
|
azure-mgmt-compute/azure/mgmt/compute/v2018_06_01/models/os_profile_py3.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | 1
|
2018-11-29T14:46:42.000Z
|
2018-11-29T14:46:42.000Z
|
azure-mgmt-compute/azure/mgmt/compute/v2018_06_01/models/os_profile_py3.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | 1
|
2018-08-28T14:36:47.000Z
|
2018-08-28T14:36:47.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class OSProfile(Model):
"""Specifies the operating system settings for the virtual machine.
:param computer_name: Specifies the host OS name of the virtual machine.
<br><br> **Max-length (Windows):** 15 characters <br><br> **Max-length
(Linux):** 64 characters. <br><br> For naming conventions and restrictions
see [Azure infrastructure services implementation
guidelines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-infrastructure-subscription-accounts-guidelines?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json#1-naming-conventions).
:type computer_name: str
:param admin_username: Specifies the name of the administrator account.
<br><br> **Windows-only restriction:** Cannot end in "." <br><br>
**Disallowed values:** "administrator", "admin", "user", "user1", "test",
"user2", "test1", "user3", "admin1", "1", "123", "a", "actuser", "adm",
"admin2", "aspnet", "backup", "console", "david", "guest", "john",
"owner", "root", "server", "sql", "support", "support_388945a0", "sys",
"test2", "test3", "user4", "user5". <br><br> **Minimum-length (Linux):** 1
character <br><br> **Max-length (Linux):** 64 characters <br><br>
**Max-length (Windows):** 20 characters <br><br><li> For root access to
the Linux VM, see [Using root privileges on Linux virtual machines in
Azure](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-use-root-privileges?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json)<br><li>
For a list of built-in system users on Linux that should not be used in
this field, see [Selecting User Names for Linux on
Azure](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-usernames?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json)
:type admin_username: str
:param admin_password: Specifies the password of the administrator
account. <br><br> **Minimum-length (Windows):** 8 characters <br><br>
**Minimum-length (Linux):** 6 characters <br><br> **Max-length
(Windows):** 123 characters <br><br> **Max-length (Linux):** 72 characters
<br><br> **Complexity requirements:** 3 out of 4 conditions below need to
be fulfilled <br> Has lower characters <br>Has upper characters <br> Has a
digit <br> Has a special character (Regex match [\\W_]) <br><br>
**Disallowed values:** "abc@123", "P@$$w0rd", "P@ssw0rd", "P@ssword123",
"Pa$$word", "pass@word1", "Password!", "Password1", "Password22",
"iloveyou!" <br><br> For resetting the password, see [How to reset the
Remote Desktop service or its login password in a Windows
VM](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-reset-rdp?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json)
<br><br> For resetting root password, see [Manage users, SSH, and check or
repair disks on Azure Linux VMs using the VMAccess
Extension](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-using-vmaccess-extension?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json#reset-root-password)
:type admin_password: str
:param custom_data: Specifies a base-64 encoded string of custom data. The
base-64 encoded string is decoded to a binary array that is saved as a
file on the Virtual Machine. The maximum length of the binary array is
65535 bytes. <br><br> For using cloud-init for your VM, see [Using
cloud-init to customize a Linux VM during
creation](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-using-cloud-init?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json)
:type custom_data: str
:param windows_configuration: Specifies Windows operating system settings
on the virtual machine.
:type windows_configuration:
~azure.mgmt.compute.v2018_06_01.models.WindowsConfiguration
:param linux_configuration: Specifies the Linux operating system settings
on the virtual machine. <br><br>For a list of supported Linux
distributions, see [Linux on Azure-Endorsed
Distributions](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-endorsed-distros?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json)
<br><br> For running non-endorsed distributions, see [Information for
Non-Endorsed
Distributions](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-create-upload-generic?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json).
:type linux_configuration:
~azure.mgmt.compute.v2018_06_01.models.LinuxConfiguration
:param secrets: Specifies set of certificates that should be installed
onto the virtual machine.
:type secrets:
list[~azure.mgmt.compute.v2018_06_01.models.VaultSecretGroup]
:param allow_extension_operations: Specifies whether extension operations
should be allowed on the virtual machine. <br><br>This may only be set to
False when no extensions are present on the virtual machine.
:type allow_extension_operations: bool
"""
_attribute_map = {
'computer_name': {'key': 'computerName', 'type': 'str'},
'admin_username': {'key': 'adminUsername', 'type': 'str'},
'admin_password': {'key': 'adminPassword', 'type': 'str'},
'custom_data': {'key': 'customData', 'type': 'str'},
'windows_configuration': {'key': 'windowsConfiguration', 'type': 'WindowsConfiguration'},
'linux_configuration': {'key': 'linuxConfiguration', 'type': 'LinuxConfiguration'},
'secrets': {'key': 'secrets', 'type': '[VaultSecretGroup]'},
'allow_extension_operations': {'key': 'allowExtensionOperations', 'type': 'bool'},
}
def __init__(self, *, computer_name: str=None, admin_username: str=None, admin_password: str=None, custom_data: str=None, windows_configuration=None, linux_configuration=None, secrets=None, allow_extension_operations: bool=None, **kwargs) -> None:
super(OSProfile, self).__init__(**kwargs)
self.computer_name = computer_name
self.admin_username = admin_username
self.admin_password = admin_password
self.custom_data = custom_data
self.windows_configuration = windows_configuration
self.linux_configuration = linux_configuration
self.secrets = secrets
self.allow_extension_operations = allow_extension_operations
| 65.358491
| 251
| 0.701357
|
9ac5d9cbcc916929792c876305dba4cb772a0cba
| 2,045
|
py
|
Python
|
readinputfiles.py
|
virtualsociety/simulation-py
|
89ba16cfcb0743aedc4eb669ed9853989aeebee1
|
[
"MIT"
] | null | null | null |
readinputfiles.py
|
virtualsociety/simulation-py
|
89ba16cfcb0743aedc4eb669ed9853989aeebee1
|
[
"MIT"
] | null | null | null |
readinputfiles.py
|
virtualsociety/simulation-py
|
89ba16cfcb0743aedc4eb669ed9853989aeebee1
|
[
"MIT"
] | null | null | null |
'''
Function to read the required inputfiles
By Dr. Raymond Hoogendoorn
Copyright 2020
'''
import pandas as pd
def readInputFiles():
df_gender = pd.read_csv('./Input/Bevolking__kerncijfers_07122020_100024.csv', delimiter = ';')
df_age = pd.read_csv('./Input/Bevolking__kerncijfers_07122020_112736.csv', delimiter = ';')
df_lifeexpectancy = pd.read_csv('./Input/Levensverwachting__geslacht__leeftijd__per_jaar_en_periode_van_vijf_jaren__06012021_105805.csv', delimiter = ';')
df_maritalstatus = pd.read_csv('./Input/Bevolking__geslacht__leeftijd_en_burgerlijke_staat__1_januari_08122020_110015.csv', delimiter = ';')
df_marriageduration = pd.read_csv('./Input/Bestaande_huwelijken_en_partnerschappen__relatieduur__1_januari_08122020_121148.csv', delimiter = ';')
df_employmentstatus = pd.read_csv('./Input/Arbeidsdeelname__kerncijfers__08122020_130106.csv', delimiter = ';')
df_income = pd.read_csv('./Input/Inkomen_van_personen__inkomensklassen__persoonskenmerken_09122020_094158.csv', delimiter = ';')
df_marriage = pd.read_csv('./Input/Huwen_en_huwelijksontbinding__geslacht__leeftijd__31_december___regio_11122020_100116.csv', delimiter = ';')
df_marriage2 = pd.read_csv('./Input/Bevolking__geslacht__leeftijd_en_burgerlijke_staat__1_januari_11122020_105220.csv', delimiter = ';')
df_withchildren = pd.read_csv('./Input/Particuliere_huishoudens_naar_samenstelling_en_grootte__1_januari_14122020_114929.csv', delimiter = ';')
df_nrchildren = pd.read_csv('./Input/Huishoudens__kindertal__leeftijdsklasse_kind__regio__1_januari_14122020_114332.csv', delimiter = ';')
df_birthage = pd.read_csv('./Input/Levend_geboren_kinderen__migratieachtergrond_moeder_en_leeftijd_moeder_14122020_112329.csv', delimiter = ';')
df_capital = pd.read_csv('./Input/vermogensklassen.csv', delimiter = ';')
return df_gender, df_age, df_lifeexpectancy, df_maritalstatus, df_marriageduration, df_employmentstatus, df_income, df_marriage, df_marriage2, df_withchildren, df_nrchildren, df_birthage, df_capital
| 85.208333
| 202
| 0.80978
|
6d5dd353eab0a1172f28a56e281829c2ea204da1
| 4,361
|
py
|
Python
|
exercises/chapter15/tracks.py
|
SravaniDash/Coursera-Python-For-Everyone
|
17849538bf052f38c33fb630f161bf20a9a3ca46
|
[
"CC0-1.0"
] | null | null | null |
exercises/chapter15/tracks.py
|
SravaniDash/Coursera-Python-For-Everyone
|
17849538bf052f38c33fb630f161bf20a9a3ca46
|
[
"CC0-1.0"
] | null | null | null |
exercises/chapter15/tracks.py
|
SravaniDash/Coursera-Python-For-Everyone
|
17849538bf052f38c33fb630f161bf20a9a3ca46
|
[
"CC0-1.0"
] | null | null | null |
# This application will read an iTunes export file in XML and produce a properly normalized database with this structure:
# CREATE TABLE Artist (
# id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
# name TEXT UNIQUE
# );
# CREATE TABLE Genre (
# id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
# name TEXT UNIQUE
# );
# CREATE TABLE Album (
# id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
# artist_id INTEGER,
# title TEXT UNIQUE
# );
# CREATE TABLE Track (
# id INTEGER NOT NULL PRIMARY KEY
# AUTOINCREMENT UNIQUE,
# title TEXT UNIQUE,
# album_id INTEGER,
# genre_id INTEGER,
# len INTEGER, rating INTEGER, count INTEGER
# );
# If you run the program multiple times in testing or with different files, make sure to empty out the data before each run.
# You can use this code as a starting point for your application: http://www.py4e.com/code3/tracks.zip. The ZIP file contains the Library.xml file to be used for this assignment. You can export your own tracks from iTunes and create a database, but for the database that you turn in for this assignment, only use the Library.xml data that is provided.
# To grade this assignment, the program will run a query like this on your uploaded database and look for the data it expects to see:
#SELECT Track.title, Artist.name, Album.title, Genre.name
# FROM Track JOIN Genre JOIN Album JOIN Artist
# ON Track.genre_id = Genre.ID and Track.album_id = Album.id
# AND Album.artist_id = Artist.id
# ORDER BY Artist.name LIMIT 3
import xml.etree.ElementTree as ET
import sqlite3
conn = sqlite3.connect('trackdb.sqlite')
cur = conn.cursor()
# Make some fresh tables using executescript()
cur.executescript('''
DROP TABLE IF EXISTS Artist;
DROP TABLE IF EXISTS Genre;
DROP TABLE IF EXISTS Album;
DROP TABLE IF EXISTS Track;
CREATE TABLE Artist (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
name TEXT UNIQUE
);
CREATE TABLE Genre(
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
name TEXT UNIQUE
);
CREATE TABLE Album (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
artist_id INTEGER,
title TEXT UNIQUE
);
CREATE TABLE Track (
id INTEGER NOT NULL PRIMARY KEY
AUTOINCREMENT UNIQUE,
title TEXT UNIQUE,
album_id INTEGER,
genre_id INTEGER,
len INTEGER, rating INTEGER, count INTEGER
);
''')
fname = input('Enter file name: ')
if ( len(fname) < 1 ) : fname = 'Library.xml'
# <key>Track ID</key><integer>369</integer>
# <key>Name</key><string>Another One Bites The Dust</string>
# <key>Artist</key><string>Queen</string>
# <key>Genre</key><string>Industrial</string>
def lookup(d, key):
found = False
for child in d:
if found : return child.text
if child.tag == 'key' and child.text == key :
found = True
return None
#create element tress and find branch
stuff = ET.parse(fname)
all = stuff.findall('dict/dict/dict')#add one dict/
print('Dict count:', len(all))
for entry in all:
if ( lookup(entry, 'Track ID') is None ) : continue
name = lookup(entry, 'Name')
artist = lookup(entry, 'Artist')
genre = lookup(entry, 'Genre')#add
album = lookup(entry, 'Album')
count = lookup(entry, 'Play Count')
rating = lookup(entry, 'Rating')
length = lookup(entry, 'Total Time')
if name is None or artist is None or album is None or genre is None:
continue
print(name, artist, genre, album, count, rating, length)
cur.execute('''INSERT OR IGNORE INTO Artist (name)
VALUES ( ? )''', ( artist, ) )
cur.execute('SELECT id FROM Artist WHERE name = ? ', (artist, ))
artist_id = cur.fetchone()[0]
cur.execute('''INSERT OR IGNORE INTO Genre (name)
VALUES ( ? )''', ( genre, ) )
cur.execute('SELECT id FROM Genre WHERE name = ? ', (genre, ))
genre_id = cur.fetchone()[0]
cur.execute('''INSERT OR IGNORE INTO Album (title, artist_id)
VALUES ( ?, ? )''', ( album, artist_id ) )
cur.execute('SELECT id FROM Album WHERE title = ? ', (album, ))
album_id = cur.fetchone()[0]
cur.execute('''INSERT OR REPLACE INTO Track
(title, album_id, genre_id, len, rating, count)
VALUES ( ?, ?, ?, ?, ?, ? )''',
( name, album_id, genre_id, length, rating, count ) )
conn.commit()
| 32.544776
| 351
| 0.671406
|
aab4f541c902ba3d99a6fe5be18324949c4a6be0
| 458
|
py
|
Python
|
Week_3 (Communications Systems)- 3b Robotic Operating System (ROS)/electronics/Scripts/Question 2/s2.py
|
offjangir/Kratos-QSTP-Course
|
6c883f17867362305212171851b40db7b9f238f1
|
[
"MIT"
] | 2
|
2021-09-10T06:24:33.000Z
|
2021-09-10T06:25:02.000Z
|
Week_3 (Communications Systems)- 3b Robotic Operating System (ROS)/electronics/Scripts/Question 2/s2.py
|
offjangir/Kratos_QSTP-Work
|
6c883f17867362305212171851b40db7b9f238f1
|
[
"MIT"
] | null | null | null |
Week_3 (Communications Systems)- 3b Robotic Operating System (ROS)/electronics/Scripts/Question 2/s2.py
|
offjangir/Kratos_QSTP-Work
|
6c883f17867362305212171851b40db7b9f238f1
|
[
"MIT"
] | null | null | null |
#SUBSCRIBES TO THE TOPIC AND ALTENATE PUBLISHES ON THE OTHER TOPIC
#!/usr/bin/env python
import rospy
from std_msgs.msg import String
def callback(msg):
signal = String()
if msg.data == "green":
signal = "red"
else:
signal = "green"
pub.publish(signal)
if __name__ == '__main__':
rospy.init_node('s2')
sub = rospy.Subscriber('s1', String, callback) #SUBSCRIBE \s1
pub = rospy.Publisher('s2', String, queue_size=10) #PUBLISH ON \s2
rospy.spin()
| 28.625
| 67
| 0.707424
|
6ac3ca747d68e48518a8f2d54d749ff9529ebf06
| 443
|
py
|
Python
|
workflow/migrations/0011_auto_20190527_2331.py
|
tanmayagarwal/Activity-CE
|
a49c47053b191ffa5aee9a06e66a7c9644804434
|
[
"Apache-2.0"
] | 1
|
2021-07-07T14:39:23.000Z
|
2021-07-07T14:39:23.000Z
|
workflow/migrations/0011_auto_20190527_2331.py
|
michaelbukachi/Activity
|
f3d4f4da88ae9539c341ca73cc559b850693d669
|
[
"Apache-2.0"
] | null | null | null |
workflow/migrations/0011_auto_20190527_2331.py
|
michaelbukachi/Activity
|
f3d4f4da88ae9539c341ca73cc559b850693d669
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.2.1 on 2019-05-28 06:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflow', '0010_auto_20190527_2330'),
]
operations = [
migrations.AlterField(
model_name='program',
name='name',
field=models.CharField(default='Default Level 1', max_length=255, verbose_name='Program Name'),
),
]
| 23.315789
| 107
| 0.620767
|
7cb831c219629a869711a6ffdedd4271976cd1a0
| 3,422
|
py
|
Python
|
test/functional/wallet_keypool.py
|
fujicoin/fujicoin-v0.16.3
|
015eeb9c7a50f3dabfad8905d75468893cc90e22
|
[
"MIT"
] | 1
|
2018-09-23T23:54:15.000Z
|
2018-09-23T23:54:15.000Z
|
test/functional/wallet_keypool.py
|
david30000/fujicoin
|
7d8eacd96cc6da30cf6a2deaaec4a97eeecbbfb2
|
[
"MIT"
] | null | null | null |
test/functional/wallet_keypool.py
|
david30000/fujicoin
|
7d8eacd96cc6da30cf6a2deaaec4a97eeecbbfb2
|
[
"MIT"
] | 1
|
2021-07-18T11:22:27.000Z
|
2021-07-18T11:22:27.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2014-2017 The Fujicoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet keypool and interaction with wallet encryption/locking."""
from test_framework.test_framework import FujicoinTestFramework
from test_framework.util import *
class KeyPoolTest(FujicoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def run_test(self):
nodes = self.nodes
addr_before_encrypting = nodes[0].getnewaddress()
addr_before_encrypting_data = nodes[0].validateaddress(addr_before_encrypting)
wallet_info_old = nodes[0].getwalletinfo()
assert(addr_before_encrypting_data['hdmasterkeyid'] == wallet_info_old['hdmasterkeyid'])
# Encrypt wallet and wait to terminate
nodes[0].node_encrypt_wallet('test')
# Restart node 0
self.start_node(0)
# Keep creating keys
addr = nodes[0].getnewaddress()
addr_data = nodes[0].validateaddress(addr)
wallet_info = nodes[0].getwalletinfo()
assert(addr_before_encrypting_data['hdmasterkeyid'] != wallet_info['hdmasterkeyid'])
assert(addr_data['hdmasterkeyid'] == wallet_info['hdmasterkeyid'])
assert_raises_rpc_error(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
# put six (plus 2) new keys in the keypool (100% external-, +100% internal-keys, 1 in min)
nodes[0].walletpassphrase('test', 12000)
nodes[0].keypoolrefill(6)
nodes[0].walletlock()
wi = nodes[0].getwalletinfo()
assert_equal(wi['keypoolsize_hd_internal'], 6)
assert_equal(wi['keypoolsize'], 6)
# drain the internal keys
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
addr = set()
# the next one should fail
assert_raises_rpc_error(-12, "Keypool ran out", nodes[0].getrawchangeaddress)
# drain the external keys
addr.add(nodes[0].getnewaddress())
addr.add(nodes[0].getnewaddress())
addr.add(nodes[0].getnewaddress())
addr.add(nodes[0].getnewaddress())
addr.add(nodes[0].getnewaddress())
addr.add(nodes[0].getnewaddress())
assert(len(addr) == 6)
# the next one should fail
assert_raises_rpc_error(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
# refill keypool with three new addresses
nodes[0].walletpassphrase('test', 1)
nodes[0].keypoolrefill(3)
# test walletpassphrase timeout
time.sleep(1.1)
assert_equal(nodes[0].getwalletinfo()["unlocked_until"], 0)
# drain them by mining
nodes[0].generate(1)
nodes[0].generate(1)
nodes[0].generate(1)
assert_raises_rpc_error(-12, "Keypool ran out", nodes[0].generate, 1)
nodes[0].walletpassphrase('test', 100)
nodes[0].keypoolrefill(100)
wi = nodes[0].getwalletinfo()
assert_equal(wi['keypoolsize_hd_internal'], 100)
assert_equal(wi['keypoolsize'], 100)
if __name__ == '__main__':
KeyPoolTest().main()
| 40.258824
| 119
| 0.662186
|
a23e88493f6bd4b5351a2449a4a7b88bc6a02b66
| 1,549
|
py
|
Python
|
days_28-30_regex/test_regex.py
|
vkoprivica/100_Days_of_Python
|
736b8a41a03673db44cdbd095433254e60d3316b
|
[
"MIT"
] | null | null | null |
days_28-30_regex/test_regex.py
|
vkoprivica/100_Days_of_Python
|
736b8a41a03673db44cdbd095433254e60d3316b
|
[
"MIT"
] | null | null | null |
days_28-30_regex/test_regex.py
|
vkoprivica/100_Days_of_Python
|
736b8a41a03673db44cdbd095433254e60d3316b
|
[
"MIT"
] | null | null | null |
from regex import (extract_course_times, split_on_multiple_chars,
get_all_hashtags_and_links, match_first_paragraph,
find_double_words, match_ip_v4_address)
def test_extract_course_times():
expected = ['01:47', '32:03', '41:51', '27:48', '05:02']
assert extract_course_times() == expected
def test_split_on_multiple_chars():
expected = ['2017-11-03T01:00:02', 'challenge time',
'regex!', 'hope you join ... soon']
assert split_on_multiple_chars() == expected
def test_get_all_hashtags_and_links():
expected = ['http://pybit.es/requests-cache.html', '#python', '#APIs']
assert get_all_hashtags_and_links() == expected
def test_match_first_paragraph():
expected = 'pybites != greedy'
assert match_first_paragraph() == expected
def test_find_double_words():
expected = 'the the'
assert find_double_words() == expected
def test_match_ip_address():
valid_ips = ['1.1.1.1', '255.255.255.255', '192.168.1.1',
'10.10.1.1', '132.254.111.10', '26.10.2.10',
'127.0.0.1']
bad_ips = ['10.10.10', '10.10', '10', 'a.a.a.a', '10.0.0.a']
for valid_ip in valid_ips:
assert match_ip_v4_address(valid_ip)
for bad_ip in bad_ips:
assert match_ip_v4_address(bad_ip) is None
if __name__ == "__main__":
test_extract_course_times()
test_split_on_multiple_chars()
test_get_all_hashtags_and_links()
test_match_first_paragraph()
test_find_double_words()
test_match_ip_address()
| 31.612245
| 74
| 0.662363
|
79e2fa126f96009a480076fbed9e9842f92cbb28
| 17,792
|
py
|
Python
|
dx_refresh_vdb.py
|
arunskurian/delphixpy-examples
|
c4716edbd22fb238ceed23e989b6e6abd82ac8fc
|
[
"Apache-2.0"
] | null | null | null |
dx_refresh_vdb.py
|
arunskurian/delphixpy-examples
|
c4716edbd22fb238ceed23e989b6e6abd82ac8fc
|
[
"Apache-2.0"
] | null | null | null |
dx_refresh_vdb.py
|
arunskurian/delphixpy-examples
|
c4716edbd22fb238ceed23e989b6e6abd82ac8fc
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Adam Bowen - Apr 2016
# This script refreshes a vdb
# Updated by Corey Brune Oct 2016
# requirements
# pip install --upgrade setuptools pip docopt delphixpy.v1_8_0
# The below doc follows the POSIX compliant standards and allows us to use
# this doc to also define our arguments for the script. This thing is brilliant.
"""Refresh a vdb
Usage:
dx_refresh_vdb.py (--vdb <name> | --dsource <name> | --all_vdbs [--group_name <name>]| --host <name> | --list_timeflows | --list_snapshots)
[--timestamp_type <type>]
[--timestamp <timepoint_semantic> --timeflow <timeflow>]
[-d <identifier> | --engine <identifier> | --all]
[--debug] [--parallel <n>] [--poll <n>]
[--config <path_to_file>] [--logdir <path_to_file>]
dx_refresh_vdb.py -h | --help | -v | --version
Refresh a Delphix VDB
Examples:
dx_refresh_vdb.py --vdb "aseTest" --group_name "Analytics"
dx_refresh_vdb.py --dsource "dlpxdb1"
dx_refresh_vdb.py --all_vdbs --host LINUXSOURCE --parallel 4 --debug -d landsharkengine
dx_refresh_vdb.py --all_vdbs --group_name "Analytics" --all
Options:
--vdb <name> Name of the object you are refreshing.
--all_vdbs Refresh all VDBs that meet the filter criteria.
--dsource <name> Name of dsource in Delphix to execute against.
--group_name <name> Name of the group to execute against.
--list_timeflows List all timeflows
--list_snapshots List all snapshots
--host <name> Name of environment in Delphix to execute against.
--timestamp_type <type> The type of timestamp you are specifying.
Acceptable Values: TIME, SNAPSHOT
[default: SNAPSHOT]
--timestamp <timepoint_semantic>
The Delphix semantic for the point in time on
the source from which you want to refresh your VDB.
Formats:
latest point in time or snapshot: LATEST
point in time: "YYYY-MM-DD HH24:MI:SS"
snapshot name: "@YYYY-MM-DDTHH24:MI:SS.ZZZ"
snapshot time from GUI: "YYYY-MM-DD HH24:MI"
[default: LATEST]
--timeflow <name> Name of the timeflow to refresh a VDB
-d <identifier> Identifier of Delphix engine in dxtools.conf.
--engine <type> Alt Identifier of Delphix engine in dxtools.conf.
--all Run against all engines.
--debug Enable debug logging
--parallel <n> Limit number of jobs to maxjob
--poll <n> The number of seconds to wait between job polls
[default: 10]
--config <path_to_file> The path to the dxtools.conf file
[default: ./dxtools.conf]
--logdir <path_to_file> The path to the logfile you want to use.
[default: ./dx_refresh_db.log]
-h --help Show this screen.
-v --version Show version.
"""
from __future__ import print_function
VERSION = "v.0.3.004"
import sys
import traceback
from os.path import basename
from time import sleep
from time import time
from docopt import docopt
from delphixpy.v1_8_0.exceptions import HttpError
from delphixpy.v1_8_0.exceptions import JobError
from delphixpy.v1_8_0.exceptions import RequestError
from delphixpy.v1_8_0.web import database
from delphixpy.v1_8_0.web import environment
from delphixpy.v1_8_0.web import group
from delphixpy.v1_8_0.web import job
from delphixpy.v1_8_0.web import source
from delphixpy.v1_8_0.web import timeflow
from delphixpy.v1_8_0.web.snapshot import snapshot
from delphixpy.v1_8_0.web.vo import OracleRefreshParameters
from delphixpy.v1_8_0.web.vo import RefreshParameters
from delphixpy.v1_8_0.web.vo import TimeflowPointLocation
from delphixpy.v1_8_0.web.vo import TimeflowPointSemantic
from delphixpy.v1_8_0.web.vo import TimeflowPointTimestamp
from lib.DlpxException import DlpxException
from lib.DxLogging import logging_est
from lib.DxLogging import print_debug
from lib.DxLogging import print_exception
from lib.DxLogging import print_info
from lib.DxTimeflow import DxTimeflow
from lib.GetReferences import find_obj_by_name
from lib.GetReferences import find_source_by_dbname
from lib.GetSession import GetSession
def refresh_database(vdb_name, timestamp, timestamp_type="SNAPSHOT"):
"""
This function actually performs the refresh
engine:
dlpx_obj: Virtualization Engine session object
vdb_name: VDB to be refreshed
"""
# Sanity check to make sure our source object has a reference
dx_timeflow_obj = DxTimeflow(dx_session_obj.server_session)
container_obj = find_obj_by_name(dx_session_obj.server_session, database, vdb_name)
source_obj = find_source_by_dbname(
dx_session_obj.server_session, database, vdb_name
)
# Sanity check to make sure our container object has a reference
if container_obj.reference:
try:
if container_obj.virtual is not True:
raise DlpxException(
"{} is not a virtual object. "
"Skipping.\n".format(container_obj.name)
)
elif container_obj.staging is True:
raise DlpxException(
"{} is a virtual object. " "Skipping.\n".format(container_obj.name)
)
elif container_obj.runtime.enabled == "ENABLED":
print_info(
"\nINFO: Refrshing {} to {}\n".format(container_obj.name, timestamp)
)
# This exception is raised if rewinding a vFiles VDB
# since AppDataContainer does not have virtual, staging or
# enabled attributes.
except AttributeError:
pass
if source_obj.reference:
# We can only refresh VDB's
if source_obj.virtual != True:
print_info(
"\nINFO: {} is not a virtual object. Skipping.\n".format(
container_obj.name
)
)
# Ensure this source is not a staging database. We can't act upon those.
elif source_obj.staging == True:
print_info(
"\nINFO: {} is a staging database. Skipping.\n".format(
container_obj.name
)
)
# Ensure the source is enabled. We can't refresh disabled databases.
elif source_obj.runtime.enabled == "ENABLED":
source_db = database.get(
dx_session_obj.server_session, container_obj.provision_container
)
if not source_db:
print_error(
"\nERROR: Was unable to retrieve the source container for {} \n".format(
container_obj.name
)
)
print_info(
"\nINFO: Refreshing {} from {}\n".format(
container_obj.name, source_db.name
)
)
# If the vdb is a Oracle type, we need to use a
# OracleRefreshParameters
"""
rewind_params = RollbackParameters()
rewind_params.timeflow_point_parameters = dx_timeflow_obj.set_timeflow_point(
container_obj, timestamp_type, timestamp
)
print_debug('{}: {}'.format(engine_name, str(rewind_params)))
"""
if str(container_obj.reference).startswith("ORACLE"):
refresh_params = OracleRefreshParameters()
else:
refresh_params = RefreshParameters()
try:
refresh_params.timeflow_point_parameters = (
dx_timeflow_obj.set_timeflow_point(
source_db, timestamp_type, timestamp
)
)
print_info("\nINFO: Refresh prams {}\n".format(refresh_params))
# Sync it
database.refresh(
dx_session_obj.server_session,
container_obj.reference,
refresh_params,
)
dx_session_obj.jobs[
dx_session_obj.server_session.address
] = dx_session_obj.server_session.last_job
except RequestError as e:
print("\nERROR: Could not set timeflow point:\n%s\n" % (
e.message.action
))
sys.exit(1)
except DlpxException as e:
print("ERROR: Could not set timeflow point:\n%s\n" % (e.message))
sys.exit(1)
# Don't do anything if the database is disabled
else:
print_info(
"\nINFO: {} is not enabled. Skipping sync.\n".format(container_obj.name)
)
def run_async(func):
"""
http://code.activestate.com/recipes/576684-simple-threading-decorator/
run_async(func)
function decorator, intended to make "func" run in a separate
thread (asynchronously).
Returns the created Thread object
E.g.:
@run_async
def task1():
do_something
@run_async
def task2():
do_something_too
t1 = task1()
t2 = task2()
...
t1.join()
t2.join()
"""
from threading import Thread
from functools import wraps
@wraps(func)
def async_func(*args, **kwargs):
func_hl = Thread(target=func, args=args, kwargs=kwargs)
func_hl.start()
return func_hl
return async_func
@run_async
def main_workflow(engine):
"""
This function actually runs the jobs.
Use the @run_async decorator to run this function asynchronously.
This allows us to run against multiple Delphix Engine simultaneously
engine: Dictionary of engines
"""
jobs = {}
try:
# Setup the connection to the Delphix Engine
dx_session_obj.serversess(
engine["ip_address"], engine["username"], engine["password"]
)
except DlpxException as e:
print_exception(
"\nERROR: Engine {} encountered an error while"
"{}:\n{}\n".format(engine["hostname"], arguments["--target"], e)
)
sys.exit(1)
thingstodo = ["thingtodo"]
with dx_session_obj.job_mode(single_thread):
while len(dx_session_obj.jobs) > 0 or len(thingstodo) > 0:
if len(thingstodo) > 0:
refresh_database(
arguments["--vdb"],
arguments["--timestamp"],
arguments["--timestamp_type"],
)
thingstodo.pop()
# get all the jobs, then inspect them
i = 0
for j in dx_session_obj.jobs.keys():
job_obj = job.get(dx_session_obj.server_session, dx_session_obj.jobs[j])
print_debug(job_obj)
print_info(
"{}: Operations: {}".format(engine["hostname"], job_obj.job_state)
)
if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
# If the job is in a non-running state, remove it from the
# running jobs list.
del dx_session_obj.jobs[j]
elif job_obj.job_state in "RUNNING":
# If the job is in a running state, increment the running
# job count.
i += 1
print_info("{}: {:d} jobs running.".format(engine["hostname"], i))
# If we have running jobs, pause before repeating the checks.
if len(dx_session_obj.jobs) > 0:
sleep(float(arguments["--poll"]))
def run_job():
"""
This function runs the main_workflow aynchronously against all the servers
specified
"""
# Create an empty list to store threads we create.
threads = []
engine = None
# If the --all argument was given, run against every engine in dxtools.conf
if arguments["--all"]:
print_info("Executing against all Delphix Engines in the dxtools.conf")
try:
# For each server in the dxtools.conf...
for delphix_engine in dx_session_obj.dlpx_engines:
engine = dx_session_obj[delphix_engine]
# Create a new thread and add it to the list.
threads.append(main_workflow(engine))
except DlpxException as e:
print("Error encountered in run_job():\n{}".format(e))
sys.exit(1)
else:
# Else if the --engine argument was given, test to see if the engine
# exists in dxtools.conf
if arguments["--engine"]:
try:
engine = dx_session_obj.dlpx_engines[arguments["--engine"]]
print_info(
"Executing against Delphix Engine: {}\n".format(
(arguments["--engine"])
)
)
except (DlpxException, RequestError, KeyError) as e:
raise DlpxException(
"\nERROR: Delphix Engine {} cannot be "
"found in {}. Please check your value "
"and try again. Exiting.\n".format(
arguments["--engine"], config_file_path
)
)
else:
# Else search for a default engine in the dxtools.conf
for delphix_engine in dx_session_obj.dlpx_engines:
if dx_session_obj.dlpx_engines[delphix_engine]["default"] == "true":
engine = dx_session_obj.dlpx_engines[delphix_engine]
print_info(
"Executing against the default Delphix Engine "
"in the dxtools.conf: {}".format(
dx_session_obj.dlpx_engines[delphix_engine]["hostname"]
)
)
break
if engine == None:
raise DlpxException("\nERROR: No default engine found. Exiting")
# run the job against the engine
threads.append(main_workflow(engine))
# For each thread in the list...
for each in threads:
# join them back together so that we wait for all threads to complete
# before moving on
each.join()
def time_elapsed():
"""
This function calculates the time elapsed since the beginning of the script.
Call this anywhere you want to note the progress in terms of time
"""
# elapsed_minutes = round((time() - time_start)/60, +1)
# return elapsed_minutes
return round((time() - time_start) / 60, +1)
def main(arguments):
# We want to be able to call on these variables anywhere in the script.
global single_thread
global usebackup
global time_start
global config_file_path
global dx_session_obj
global debug
if arguments["--debug"]:
debug = True
try:
dx_session_obj = GetSession()
logging_est(arguments["--logdir"])
print_debug(arguments)
time_start = time()
engine = None
single_thread = False
config_file_path = arguments["--config"]
# Parse the dxtools.conf and put it into a dictionary
dx_session_obj.get_config(config_file_path)
# This is the function that will handle processing main_workflow for
# all the servers.
run_job()
# elapsed_minutes = time_elapsed()
print_info("script took {:.2f} minutes to get this far.".format(time_elapsed()))
# Here we handle what we do when the unexpected happens
except SystemExit as e:
"""
This is what we use to handle our sys.exit(#)
"""
sys.exit(e)
except HttpError as e:
"""
We use this exception handler when our connection to Delphix fails
"""
print_exception(
"Connection failed to the Delphix Engine"
"Please check the ERROR message:\n{}\n"
).format(e)
sys.exit(1)
except JobError as e:
"""
We use this exception handler when a job fails in Delphix so that
we have actionable data
"""
elapsed_minutes = time_elapsed()
print_exception("A job failed in the Delphix Engine")
print_info(
"{} took {:.2f} minutes to get this far:\n{}\n".format(
basename(__file__), elapsed_minutes, e
)
)
sys.exit(3)
except KeyboardInterrupt:
"""
We use this exception handler to gracefully handle ctrl+c exits
"""
print_debug("You sent a CTRL+C to interrupt the process")
elapsed_minutes = time_elapsed()
print_info(
"{} took {:.2f} minutes to get this far\n".format(
basename(__file__), elapsed_minutes
)
)
except:
"""
Everything else gets caught here
"""
print_exception(sys.exc_info()[0])
elapsed_minutes = time_elapsed()
print_info(
"{} took {:.2f} minutes to get this far\n".format(
basename(__file__), elapsed_minutes
)
)
sys.exit(1)
if __name__ == "__main__":
# Grab our arguments from the doc at the top of the script
arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION)
# Feed our arguments to the main function, and off we go!
main(arguments)
| 36.684536
| 141
| 0.58279
|
0240ecbdb6c78392b7cb492cb094cbd723c8d4eb
| 8,519
|
py
|
Python
|
raven/contrib/django/models.py
|
mgedmin/raven-python
|
6d487a8298dd5340c701b7195eb65ce4ed113f1f
|
[
"BSD-3-Clause"
] | null | null | null |
raven/contrib/django/models.py
|
mgedmin/raven-python
|
6d487a8298dd5340c701b7195eb65ce4ed113f1f
|
[
"BSD-3-Clause"
] | null | null | null |
raven/contrib/django/models.py
|
mgedmin/raven-python
|
6d487a8298dd5340c701b7195eb65ce4ed113f1f
|
[
"BSD-3-Clause"
] | null | null | null |
"""
raven.contrib.django.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Acts as an implicit hook for Django installs.
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from hashlib import md5
import logging
import sys
import warnings
from raven.utils import six
from django.conf import settings as django_settings
logger = logging.getLogger('sentry.errors.client')
def get_installed_apps():
"""
Modules in settings.INSTALLED_APPS as a set.
"""
return set(django_settings.INSTALLED_APPS)
_client = (None, None)
class ProxyClient(object):
"""
A proxy which represents the currenty client at all times.
"""
# introspection support:
__members__ = property(lambda x: x.__dir__())
# Need to pretend to be the wrapped class, for the sake of objects that care
# about this (especially in equality tests)
__class__ = property(lambda x: get_client().__class__)
__dict__ = property(lambda o: get_client().__dict__)
__repr__ = lambda x: repr(get_client())
__getattr__ = lambda x, o: getattr(get_client(), o)
__setattr__ = lambda x, o, v: setattr(get_client(), o, v)
__delattr__ = lambda x, o: delattr(get_client(), o)
__lt__ = lambda x, o: get_client() < o
__le__ = lambda x, o: get_client() <= o
__eq__ = lambda x, o: get_client() == o
__ne__ = lambda x, o: get_client() != o
__gt__ = lambda x, o: get_client() > o
__ge__ = lambda x, o: get_client() >= o
if not six.PY3:
__cmp__ = lambda x, o: cmp(get_client(), o) # NOQA
__hash__ = lambda x: hash(get_client())
# attributes are currently not callable
# __call__ = lambda x, *a, **kw: get_client()(*a, **kw)
__nonzero__ = lambda x: bool(get_client())
__len__ = lambda x: len(get_client())
__getitem__ = lambda x, i: get_client()[i]
__iter__ = lambda x: iter(get_client())
__contains__ = lambda x, i: i in get_client()
__getslice__ = lambda x, i, j: get_client()[i:j]
__add__ = lambda x, o: get_client() + o
__sub__ = lambda x, o: get_client() - o
__mul__ = lambda x, o: get_client() * o
__floordiv__ = lambda x, o: get_client() // o
__mod__ = lambda x, o: get_client() % o
__divmod__ = lambda x, o: get_client().__divmod__(o)
__pow__ = lambda x, o: get_client() ** o
__lshift__ = lambda x, o: get_client() << o
__rshift__ = lambda x, o: get_client() >> o
__and__ = lambda x, o: get_client() & o
__xor__ = lambda x, o: get_client() ^ o
__or__ = lambda x, o: get_client() | o
__div__ = lambda x, o: get_client().__div__(o)
__truediv__ = lambda x, o: get_client().__truediv__(o)
__neg__ = lambda x: -(get_client())
__pos__ = lambda x: +(get_client())
__abs__ = lambda x: abs(get_client())
__invert__ = lambda x: ~(get_client())
__complex__ = lambda x: complex(get_client())
__int__ = lambda x: int(get_client())
if not six.PY3:
__long__ = lambda x: long(get_client()) # NOQA
__float__ = lambda x: float(get_client())
__str__ = lambda x: six.binary_type(get_client())
__unicode__ = lambda x: six.text_type(get_client())
__oct__ = lambda x: oct(get_client())
__hex__ = lambda x: hex(get_client())
__index__ = lambda x: get_client().__index__()
__coerce__ = lambda x, o: x.__coerce__(x, o)
__enter__ = lambda x: x.__enter__()
__exit__ = lambda x, *a, **kw: x.__exit__(*a, **kw)
client = ProxyClient()
def get_option(x, d=None):
options = getattr(django_settings, 'RAVEN_CONFIG', {})
return getattr(django_settings, 'SENTRY_%s' % x, options.get(x, d))
def get_client(client=None):
global _client
tmp_client = client is not None
if not tmp_client:
client = getattr(django_settings, 'SENTRY_CLIENT', 'raven.contrib.django.DjangoClient')
if _client[0] != client:
module, class_name = client.rsplit('.', 1)
ga = lambda x, d=None: getattr(django_settings, 'SENTRY_%s' % x, d)
options = getattr(django_settings, 'RAVEN_CONFIG', {})
options.setdefault('servers', ga('SERVERS'))
options.setdefault('include_paths', ga('INCLUDE_PATHS', []))
options['include_paths'] = set(options['include_paths']) | get_installed_apps()
options.setdefault('exclude_paths', ga('EXCLUDE_PATHS'))
options.setdefault('timeout', ga('TIMEOUT'))
options.setdefault('name', ga('NAME'))
options.setdefault('auto_log_stacks', ga('AUTO_LOG_STACKS'))
options.setdefault('key', ga('KEY', md5(django_settings.SECRET_KEY.encode('utf8')).hexdigest()))
options.setdefault('string_max_length', ga('MAX_LENGTH_STRING'))
options.setdefault('list_max_length', ga('MAX_LENGTH_LIST'))
options.setdefault('site', ga('SITE'))
options.setdefault('public_key', ga('PUBLIC_KEY'))
options.setdefault('secret_key', ga('SECRET_KEY'))
options.setdefault('project', ga('PROJECT'))
options.setdefault('processors', ga('PROCESSORS'))
options.setdefault('dsn', ga('DSN'))
options.setdefault('context', ga('CONTEXT'))
class_name = str(class_name)
try:
instance = getattr(__import__(module, {}, {}, class_name), class_name)(**options)
except ImportError:
logger.exception('Failed to import client: %s', client)
if not _client[1]:
# If there is no previous client, set the default one.
client = 'raven.contrib.django.DjangoClient'
_client = (client, get_client(client))
else:
if not tmp_client:
_client = (client, instance)
return instance
return _client[1]
def sentry_exception_handler(request=None, **kwargs):
exc_type = sys.exc_info()[0]
exclusions = set(get_option('IGNORE_EXCEPTIONS', ()))
exc_name = '%s.%s' % (exc_type.__module__, exc_type.__name__)
if exc_type.__name__ in exclusions or exc_name in exclusions or any(exc_name.startswith(e[:-1]) for e in exclusions if e.endswith('*')):
logger.info(
'Not capturing exception due to filters: %s', exc_type,
exc_info=sys.exc_info())
return
try:
client.captureException(exc_info=sys.exc_info(), request=request)
except Exception as exc:
try:
logger.exception('Unable to process log entry: %s' % (exc,))
except Exception as exc:
warnings.warn('Unable to process log entry: %s' % (exc,))
def register_handlers():
from django.core.signals import got_request_exception
# HACK: support Sentry's internal communication
if 'sentry' in django_settings.INSTALLED_APPS:
from django.db import transaction
# Django 1.6
if hasattr(transaction, 'atomic'):
commit_on_success = transaction.atomic
else:
commit_on_success = transaction.commit_on_success
@commit_on_success
def wrap_sentry(request, **kwargs):
if transaction.is_dirty():
transaction.rollback()
return sentry_exception_handler(request, **kwargs)
exception_handler = wrap_sentry
else:
exception_handler = sentry_exception_handler
# Connect to Django's internal signal handler
got_request_exception.connect(exception_handler, weak=False)
# If Celery is installed, register a signal handler
if 'djcelery' in django_settings.INSTALLED_APPS:
try:
# Celery < 2.5? is not supported
from raven.contrib.celery import (
register_signal, register_logger_signal)
except ImportError:
logger.exception('Failed to install Celery error handler')
else:
try:
register_signal(client)
except Exception:
logger.exception('Failed to install Celery error handler')
try:
register_logger_signal(client)
except Exception:
logger.exception('Failed to install Celery error handler')
def register_serializers():
# force import so serializers can call register
import raven.contrib.django.serializers # NOQA
if ('raven.contrib.django' in django_settings.INSTALLED_APPS
or 'raven.contrib.django.raven_compat' in django_settings.INSTALLED_APPS):
register_handlers()
register_serializers()
| 36.562232
| 140
| 0.647846
|
a87896e5386c70170acd94b3e982528373860900
| 8,293
|
py
|
Python
|
synapse/groups/attestations.py
|
dsonck92/synapse
|
2560b1b6b2f74b5724253396c0e3665fa1f7968c
|
[
"Apache-2.0"
] | 9,945
|
2015-01-02T07:41:06.000Z
|
2022-03-31T23:22:42.000Z
|
synapse/groups/attestations.py
|
t2bot/synapse
|
62ca554ef09330cb88d46fca8296a859d0adc143
|
[
"Apache-2.0"
] | 9,320
|
2015-01-08T14:09:03.000Z
|
2022-03-31T21:11:24.000Z
|
synapse/groups/attestations.py
|
t2bot/synapse
|
62ca554ef09330cb88d46fca8296a859d0adc143
|
[
"Apache-2.0"
] | 2,299
|
2015-01-31T22:16:29.000Z
|
2022-03-31T06:08:26.000Z
|
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Attestations ensure that users and groups can't lie about their memberships.
When a user joins a group the HS and GS swap attestations, which allow them
both to independently prove to third parties their membership.These
attestations have a validity period so need to be periodically renewed.
If a user leaves (or gets kicked out of) a group, either side can still use
their attestation to "prove" their membership, until the attestation expires.
Therefore attestations shouldn't be relied on to prove membership in important
cases, but can for less important situations, e.g. showing a users membership
of groups on their profile, showing flairs, etc.
An attestation is a signed blob of json that looks like:
{
"user_id": "@foo:a.example.com",
"group_id": "+bar:b.example.com",
"valid_until_ms": 1507994728530,
"signatures":{"matrix.org":{"ed25519:auto":"..."}}
}
"""
import logging
import random
from typing import TYPE_CHECKING, Optional, Tuple
from signedjson.sign import sign_json
from twisted.internet.defer import Deferred
from synapse.api.errors import HttpResponseException, RequestSendFailed, SynapseError
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.types import JsonDict, get_domain_from_id
if TYPE_CHECKING:
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
# Default validity duration for new attestations we create
DEFAULT_ATTESTATION_LENGTH_MS = 3 * 24 * 60 * 60 * 1000
# We add some jitter to the validity duration of attestations so that if we
# add lots of users at once we don't need to renew them all at once.
# The jitter is a multiplier picked randomly between the first and second number
DEFAULT_ATTESTATION_JITTER = (0.9, 1.3)
# Start trying to update our attestations when they come this close to expiring
UPDATE_ATTESTATION_TIME_MS = 1 * 24 * 60 * 60 * 1000
class GroupAttestationSigning:
"""Creates and verifies group attestations."""
def __init__(self, hs: "HomeServer"):
self.keyring = hs.get_keyring()
self.clock = hs.get_clock()
self.server_name = hs.hostname
self.signing_key = hs.signing_key
async def verify_attestation(
self,
attestation: JsonDict,
group_id: str,
user_id: str,
server_name: Optional[str] = None,
) -> None:
"""Verifies that the given attestation matches the given parameters.
An optional server_name can be supplied to explicitly set which server's
signature is expected. Otherwise assumes that either the group_id or user_id
is local and uses the other's server as the one to check.
"""
if not server_name:
if get_domain_from_id(group_id) == self.server_name:
server_name = get_domain_from_id(user_id)
elif get_domain_from_id(user_id) == self.server_name:
server_name = get_domain_from_id(group_id)
else:
raise Exception("Expected either group_id or user_id to be local")
if user_id != attestation["user_id"]:
raise SynapseError(400, "Attestation has incorrect user_id")
if group_id != attestation["group_id"]:
raise SynapseError(400, "Attestation has incorrect group_id")
valid_until_ms = attestation["valid_until_ms"]
# TODO: We also want to check that *new* attestations that people give
# us to store are valid for at least a little while.
now = self.clock.time_msec()
if valid_until_ms < now:
raise SynapseError(400, "Attestation expired")
assert server_name is not None
await self.keyring.verify_json_for_server(
server_name,
attestation,
now,
)
def create_attestation(self, group_id: str, user_id: str) -> JsonDict:
"""Create an attestation for the group_id and user_id with default
validity length.
"""
validity_period = DEFAULT_ATTESTATION_LENGTH_MS * random.uniform(
*DEFAULT_ATTESTATION_JITTER
)
valid_until_ms = int(self.clock.time_msec() + validity_period)
return sign_json(
{
"group_id": group_id,
"user_id": user_id,
"valid_until_ms": valid_until_ms,
},
self.server_name,
self.signing_key,
)
class GroupAttestionRenewer:
"""Responsible for sending and receiving attestation updates."""
def __init__(self, hs: "HomeServer"):
self.clock = hs.get_clock()
self.store = hs.get_datastore()
self.assestations = hs.get_groups_attestation_signing()
self.transport_client = hs.get_federation_transport_client()
self.is_mine_id = hs.is_mine_id
self.attestations = hs.get_groups_attestation_signing()
if not hs.config.worker.worker_app:
self._renew_attestations_loop = self.clock.looping_call(
self._start_renew_attestations, 30 * 60 * 1000
)
async def on_renew_attestation(
self, group_id: str, user_id: str, content: JsonDict
) -> JsonDict:
"""When a remote updates an attestation"""
attestation = content["attestation"]
if not self.is_mine_id(group_id) and not self.is_mine_id(user_id):
raise SynapseError(400, "Neither user not group are on this server")
await self.attestations.verify_attestation(
attestation, user_id=user_id, group_id=group_id
)
await self.store.update_remote_attestion(group_id, user_id, attestation)
return {}
def _start_renew_attestations(self) -> "Deferred[None]":
return run_as_background_process("renew_attestations", self._renew_attestations)
async def _renew_attestations(self) -> None:
"""Called periodically to check if we need to update any of our attestations"""
now = self.clock.time_msec()
rows = await self.store.get_attestations_need_renewals(
now + UPDATE_ATTESTATION_TIME_MS
)
async def _renew_attestation(group_user: Tuple[str, str]) -> None:
group_id, user_id = group_user
try:
if not self.is_mine_id(group_id):
destination = get_domain_from_id(group_id)
elif not self.is_mine_id(user_id):
destination = get_domain_from_id(user_id)
else:
logger.warning(
"Incorrectly trying to do attestations for user: %r in %r",
user_id,
group_id,
)
await self.store.remove_attestation_renewal(group_id, user_id)
return
attestation = self.attestations.create_attestation(group_id, user_id)
await self.transport_client.renew_group_attestation(
destination, group_id, user_id, content={"attestation": attestation}
)
await self.store.update_attestation_renewal(
group_id, user_id, attestation
)
except (RequestSendFailed, HttpResponseException) as e:
logger.warning(
"Failed to renew attestation of %r in %r: %s", user_id, group_id, e
)
except Exception:
logger.exception(
"Error renewing attestation of %r in %r", user_id, group_id
)
for row in rows:
await _renew_attestation((row["group_id"], row["user_id"]))
| 37.86758
| 88
| 0.656819
|
c47739874e06f42c7eb96ea82d6382fed8af2e9d
| 2,035
|
py
|
Python
|
Z_ALL_FILE/Py/code_qry.py
|
omikabir/omEngin
|
b8c04a5c2c12ffc3d0b67c2ceba9e5741d3f9195
|
[
"Apache-2.0"
] | null | null | null |
Z_ALL_FILE/Py/code_qry.py
|
omikabir/omEngin
|
b8c04a5c2c12ffc3d0b67c2ceba9e5741d3f9195
|
[
"Apache-2.0"
] | null | null | null |
Z_ALL_FILE/Py/code_qry.py
|
omikabir/omEngin
|
b8c04a5c2c12ffc3d0b67c2ceba9e5741d3f9195
|
[
"Apache-2.0"
] | 1
|
2021-04-29T21:46:02.000Z
|
2021-04-29T21:46:02.000Z
|
import pandas as pd
import os
#opt = itertools.islice(ls, len(ls))
#st = map(lambda x : )
def parsecode(txt):
df = pd.read_csv(os.getcwd() + '\\OMDB.csv')
ls = df['Code'].to_list()
code = []
q = 0
for i in range(len(ls)):
text = txt
if ls[i] in text:
n = text.find(ls[i])
st = text[n:n+7]
code.append(st)
txt = txt.replace(ls[i],'')
q = q + 1
else:
if q == 0:
return ''
else:
return code
def qry_by_code(code, tbl = None, col = None):
if tbl is None and col is None:
a1 = "select Incident_Notification,Down_Time,Up_Time,Major_Cause,Action_Taken,Link_ID_Site_ID,Incident_ID from incident_tracker_v2 where ("
a2 = " No_of_2G_Impacted_sites Like '%" + code + "%' or No_of_3G_Impacted_sites like '%" + code + "%' or No_of_4G_Impacted_Sites like '%" + code + "%' or Incident_Notification Like '%" + code
a3 = "%') order by Down_Time desc"
aa = a1 + a2 + a3
return aa
else:
return ""
def codechk(txt):
rs = parsecode(txt.upper())
st = 0
print('ret val', rs)
if len(rs) == 1:
code = rs[0]
rn = 0
try:
cd = int(code[6:7])
qry = qry_by_code(code)
conn = pyodbc.connect(soc)
df = pd.read(qry, con = conn)
if df.shape[0] != 0:
if df.shape[0] > 3:
st = "last 3 incident out of " + df.shape[0]
rn = 3
else:
st = "incident found " + df.shape[0] + chr(10)
rn = df.shape[0]
for i in range(rn):
tmp = chr(10)
for j in df:
tmp = tmp + chr(10) + df.loc[i,j]
else:
st = st + chr(10) + str(i) + tmp
except:
print('not code')
return st
else:
return st
| 28.263889
| 200
| 0.456511
|
cbd96f198a524ce01faafc3d077cf8b422a701d4
| 6,784
|
py
|
Python
|
booking/views.py
|
eedf/becours
|
95ac62b3d102ddbb16b01fe27fc32c98d1c40410
|
[
"MIT"
] | null | null | null |
booking/views.py
|
eedf/becours
|
95ac62b3d102ddbb16b01fe27fc32c98d1c40410
|
[
"MIT"
] | null | null | null |
booking/views.py
|
eedf/becours
|
95ac62b3d102ddbb16b01fe27fc32c98d1c40410
|
[
"MIT"
] | null | null | null |
from datetime import date, timedelta
from django.contrib.auth.mixins import LoginRequiredMixin
from django.core.files import File
from django.db.models import Min, Sum
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.utils.text import slugify
from django.utils.timezone import now
from django.views.generic import ListView, TemplateView, DetailView
from os import unlink
from templated_docs import fill_template
from .models import Booking, BookingItem, Agreement
class HomeView(TemplateView):
template_name = 'booking/home.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
potential_incomes = [item.amount - item.amount_cot for item in BookingItem.objects.filter(booking__state__income=1)]
potential_overnights = [item.overnights for item in BookingItem.objects.filter(booking__state__income=1)]
context['potential_income'] = sum(filter(bool, potential_incomes))
context['potential_overnights'] = sum(filter(bool, potential_overnights))
confirmed_incomes = [item.amount - item.amount_cot for item in BookingItem.objects.filter(booking__state__income__in=(2, 3))]
confirmed_overnights = [item.overnights for item in BookingItem.objects.filter(booking__state__income__in=(2, 3))]
context['confirmed_income'] = sum(filter(bool, confirmed_incomes))
context['confirmed_overnights'] = sum(filter(bool, confirmed_overnights))
context['total_income'] = context['potential_income'] + context['confirmed_income']
context['total_overnights'] = context['potential_overnights'] + context['confirmed_overnights']
return context
class BookingListView(ListView):
queryset = Booking.objects.order_by('begin')
class BookingDetailView(DetailView):
model = Booking
class CreateAgreementView(LoginRequiredMixin, DetailView):
model = Booking
def render_to_response(self, context, **response_kwargs):
year = self.object.items.earliest('begin').begin.year
try:
order = Agreement.objects.filter(date__year=year).latest('order').order + 1
except Agreement.DoesNotExist:
order = 1
agreement = Agreement.objects.create(date=now().date(), order=order, booking=self.object)
context['agreement'] = agreement
for ext in ('odt', 'pdf'):
filename = fill_template('booking/agreement.odt', context, output_format=ext)
visible_filename = "Convention_{number}_{title}.{ext}".format(number=agreement.number(), ext=ext,
title=slugify(self.object.title))
f = open(filename, 'rb')
getattr(agreement, ext).save(visible_filename, File(f))
f.close()
unlink(filename)
return HttpResponseRedirect(reverse('booking:booking_detail', kwargs={'pk': self.object.pk}))
class OccupancyView(TemplateView):
template_name = 'booking/occupancy.html'
def occupancy_for(self, day, product):
items = BookingItem.objects.filter(begin__lte=day, end__gt=day, product=product)
items = items.filter(booking__state__income__in=(1, 2, 3), headcount__isnull=False)
items = items.order_by('booking__title')
items = items.values('booking__title', 'booking__state__color')
items = items.annotate(headcount=Sum('headcount'))
return (sum([item['headcount'] for item in items]), items)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
occupancy = []
for i in range(365):
day = date(2017, 1, 1) + timedelta(days=i)
occupancy.append((day, ) + self.occupancy_for(day, 2) + self.occupancy_for(day, 1))
context['occupancy'] = occupancy
return context
class StatsView(TemplateView):
template_name = 'booking/stats.html'
def get_context_data(self, **kwargs):
items = BookingItem.objects.all()
kwargs['stats'] = {
'headcount': sum([item.headcount for item in items if item.headcount]),
'overnights': sum([item.overnights for item in items if item.overnights]),
'amount_hosting': sum([item.amount - item.amount_cot for item in items if item.product in (1, 2, 5)]),
'amount_cot': sum([item.overnights for item in items if item.overnights]),
'amount_other': sum([item.amount for item in items if item.product in (3, 4)]),
'amount': sum([item.amount for item in items]),
}
kwargs['stats']['overnight_cost'] = kwargs['stats']['amount_hosting'] / kwargs['stats']['overnights']
STATS = (
('stats_eedf', BookingItem.objects.filter(booking__org_type=1)),
('stats_ext', BookingItem.objects.exclude(booking__org_type=1)),
('stats_village', BookingItem.objects.filter(product__in=(2, 5))),
('stats_terrain', BookingItem.objects.filter(product=1)),
('stats_village_eedf', BookingItem.objects.filter(booking__org_type=1, product__in=(2, 5))),
('stats_village_ext', BookingItem.objects.exclude(booking__org_type=1).filter(product__in=(2, 5))),
('stats_terrain_eedf', BookingItem.objects.filter(booking__org_type=1, product=1)),
('stats_terrain_ext', BookingItem.objects.exclude(booking__org_type=1).filter(product=1)),
('stats_ete', BookingItem.objects.filter(end__gte='2017-07-01', begin__lte='2017-08-31')),
('stats_avr', BookingItem.objects.filter(end__gte='2017-04-16', begin__lte='2017-05-01')),
('stats_oct', BookingItem.objects.filter(end__gte='2017-10-20', begin__lte='2017-11-02')),
)
for (name, items) in STATS:
kwargs[name] = {
'headcount': sum([item.headcount for item in items if item.headcount]),
'overnights': sum([item.overnights for item in items if item.overnights]),
'amount_hosting': sum([item.amount - item.amount_cot for item in items if item.product in (1, 2, 5)]),
'amount_cot': sum([item.overnights for item in items if item.overnights]),
'amount_other': sum([item.amount for item in items if item.product in (3, 4)]),
'amount': sum([item.amount for item in items]),
}
kwargs[name]['overnights_rate'] = (100 * kwargs[name]['overnights'] / kwargs['stats']['overnights'])
kwargs[name]['amount_hosting_rate'] = (100 * kwargs[name]['amount_hosting'] / kwargs['stats']['amount_hosting'])
kwargs[name]['overnight_cost'] = kwargs[name]['overnights'] and kwargs[name]['amount_hosting'] / kwargs[name]['overnights']
return kwargs
| 53.417323
| 135
| 0.666421
|
873ee428b52cc951b4b3557ece31a5d13dd69a75
| 4,277
|
py
|
Python
|
lale/lib/aif360/eq_odds_postprocessing.py
|
szymonkucharczyk/lale
|
bc956e56f9e23c78049699faf428c2f7fdd4565d
|
[
"Apache-2.0"
] | 1
|
2021-07-24T20:35:18.000Z
|
2021-07-24T20:35:18.000Z
|
lale/lib/aif360/eq_odds_postprocessing.py
|
szymonkucharczyk/lale
|
bc956e56f9e23c78049699faf428c2f7fdd4565d
|
[
"Apache-2.0"
] | null | null | null |
lale/lib/aif360/eq_odds_postprocessing.py
|
szymonkucharczyk/lale
|
bc956e56f9e23c78049699faf428c2f7fdd4565d
|
[
"Apache-2.0"
] | 1
|
2021-03-16T08:20:30.000Z
|
2021-03-16T08:20:30.000Z
|
# Copyright 2019 IBM Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import aif360.algorithms.postprocessing
import lale.docstrings
import lale.operators
from .util import (
_BasePostEstimatorImpl,
_categorical_fairness_properties,
_categorical_input_predict_schema,
_categorical_output_predict_schema,
_categorical_supervised_input_fit_schema,
)
class _EqOddsPostprocessingImpl(_BasePostEstimatorImpl):
def __init__(
self,
favorable_labels,
protected_attributes,
estimator,
redact=True,
seed=None,
):
prot_attr_names = [pa["feature"] for pa in protected_attributes]
unprivileged_groups = [{name: 0 for name in prot_attr_names}]
privileged_groups = [{name: 1 for name in prot_attr_names}]
mitigator = aif360.algorithms.postprocessing.EqOddsPostprocessing(
unprivileged_groups=unprivileged_groups,
privileged_groups=privileged_groups,
seed=seed,
)
super(_EqOddsPostprocessingImpl, self).__init__(
favorable_labels=favorable_labels,
protected_attributes=protected_attributes,
estimator=estimator,
redact=redact,
mitigator=mitigator,
)
_input_fit_schema = _categorical_supervised_input_fit_schema
_input_predict_schema = _categorical_input_predict_schema
_output_predict_schema = _categorical_output_predict_schema
_hyperparams_schema = {
"description": "Hyperparameter schema.",
"allOf": [
{
"description": "This first sub-object lists all constructor arguments with their "
"types, one at a time, omitting cross-argument constraints.",
"type": "object",
"additionalProperties": False,
"required": [
*_categorical_fairness_properties.keys(),
"estimator",
"redact",
"seed",
],
"relevantToOptimizer": [],
"properties": {
**_categorical_fairness_properties,
"estimator": {
"description": "Nested supervised learning operator for which to mitigate fairness.",
"laleType": "operator",
},
"redact": {
"description": "Whether to redact protected attributes before data preparation (recommended) or not.",
"type": "boolean",
"default": True,
},
"seed": {
"description": "Seed to make `predict` repeatable.",
"anyOf": [{"type": "integer"}, {"enum": [None]}],
"default": None,
},
},
}
],
}
_combined_schemas = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": """`Equalized odds postprocessing`_ post-estimator fairness mitigator.
.. _`Equalized odds postprocessing`: https://aif360.readthedocs.io/en/latest/modules/generated/aif360.algorithms.postprocessing.EqOddsPostprocessing.html
""",
"documentation_url": "https://lale.readthedocs.io/en/latest/modules/lale.lib.aif360.eq_odds_postprocessing.html",
"import_from": "aif360.algorithms.postprocessing",
"type": "object",
"tags": {"pre": [], "op": ["estimator", "classifier", "interpretable"], "post": []},
"properties": {
"hyperparams": _hyperparams_schema,
"input_fit": _input_fit_schema,
"input_predict": _input_predict_schema,
"output_predict": _output_predict_schema,
},
}
EqOddsPostprocessing = lale.operators.make_operator(
_EqOddsPostprocessingImpl, _combined_schemas
)
lale.docstrings.set_docstrings(EqOddsPostprocessing)
| 36.245763
| 153
| 0.644611
|
488431213c954037571d4c4549039ad9bf245ecc
| 31,233
|
py
|
Python
|
kubernetes/test/test_core_v1_api.py
|
anemerovsky-essextec/python
|
6e40b9169b27c3f1f9422c0f6dd1cd9caef8d57c
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/test/test_core_v1_api.py
|
anemerovsky-essextec/python
|
6e40b9169b27c3f1f9422c0f6dd1cd9caef8d57c
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/test/test_core_v1_api.py
|
anemerovsky-essextec/python
|
6e40b9169b27c3f1f9422c0f6dd1cd9caef8d57c
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.12.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.apis.core_v1_api import CoreV1Api
class TestCoreV1Api(unittest.TestCase):
""" CoreV1Api unit test stubs """
def setUp(self):
self.api = kubernetes.client.apis.core_v1_api.CoreV1Api()
def tearDown(self):
pass
def test_connect_delete_namespaced_pod_proxy(self):
"""
Test case for connect_delete_namespaced_pod_proxy
"""
pass
def test_connect_delete_namespaced_pod_proxy_with_path(self):
"""
Test case for connect_delete_namespaced_pod_proxy_with_path
"""
pass
def test_connect_delete_namespaced_service_proxy(self):
"""
Test case for connect_delete_namespaced_service_proxy
"""
pass
def test_connect_delete_namespaced_service_proxy_with_path(self):
"""
Test case for connect_delete_namespaced_service_proxy_with_path
"""
pass
def test_connect_delete_node_proxy(self):
"""
Test case for connect_delete_node_proxy
"""
pass
def test_connect_delete_node_proxy_with_path(self):
"""
Test case for connect_delete_node_proxy_with_path
"""
pass
def test_connect_get_namespaced_pod_attach(self):
"""
Test case for connect_get_namespaced_pod_attach
"""
pass
def test_connect_get_namespaced_pod_exec(self):
"""
Test case for connect_get_namespaced_pod_exec
"""
pass
def test_connect_get_namespaced_pod_portforward(self):
"""
Test case for connect_get_namespaced_pod_portforward
"""
pass
def test_connect_get_namespaced_pod_proxy(self):
"""
Test case for connect_get_namespaced_pod_proxy
"""
pass
def test_connect_get_namespaced_pod_proxy_with_path(self):
"""
Test case for connect_get_namespaced_pod_proxy_with_path
"""
pass
def test_connect_get_namespaced_service_proxy(self):
"""
Test case for connect_get_namespaced_service_proxy
"""
pass
def test_connect_get_namespaced_service_proxy_with_path(self):
"""
Test case for connect_get_namespaced_service_proxy_with_path
"""
pass
def test_connect_get_node_proxy(self):
"""
Test case for connect_get_node_proxy
"""
pass
def test_connect_get_node_proxy_with_path(self):
"""
Test case for connect_get_node_proxy_with_path
"""
pass
def test_connect_head_namespaced_pod_proxy(self):
"""
Test case for connect_head_namespaced_pod_proxy
"""
pass
def test_connect_head_namespaced_pod_proxy_with_path(self):
"""
Test case for connect_head_namespaced_pod_proxy_with_path
"""
pass
def test_connect_head_namespaced_service_proxy(self):
"""
Test case for connect_head_namespaced_service_proxy
"""
pass
def test_connect_head_namespaced_service_proxy_with_path(self):
"""
Test case for connect_head_namespaced_service_proxy_with_path
"""
pass
def test_connect_head_node_proxy(self):
"""
Test case for connect_head_node_proxy
"""
pass
def test_connect_head_node_proxy_with_path(self):
"""
Test case for connect_head_node_proxy_with_path
"""
pass
def test_connect_options_namespaced_pod_proxy(self):
"""
Test case for connect_options_namespaced_pod_proxy
"""
pass
def test_connect_options_namespaced_pod_proxy_with_path(self):
"""
Test case for connect_options_namespaced_pod_proxy_with_path
"""
pass
def test_connect_options_namespaced_service_proxy(self):
"""
Test case for connect_options_namespaced_service_proxy
"""
pass
def test_connect_options_namespaced_service_proxy_with_path(self):
"""
Test case for connect_options_namespaced_service_proxy_with_path
"""
pass
def test_connect_options_node_proxy(self):
"""
Test case for connect_options_node_proxy
"""
pass
def test_connect_options_node_proxy_with_path(self):
"""
Test case for connect_options_node_proxy_with_path
"""
pass
def test_connect_patch_namespaced_pod_proxy(self):
"""
Test case for connect_patch_namespaced_pod_proxy
"""
pass
def test_connect_patch_namespaced_pod_proxy_with_path(self):
"""
Test case for connect_patch_namespaced_pod_proxy_with_path
"""
pass
def test_connect_patch_namespaced_service_proxy(self):
"""
Test case for connect_patch_namespaced_service_proxy
"""
pass
def test_connect_patch_namespaced_service_proxy_with_path(self):
"""
Test case for connect_patch_namespaced_service_proxy_with_path
"""
pass
def test_connect_patch_node_proxy(self):
"""
Test case for connect_patch_node_proxy
"""
pass
def test_connect_patch_node_proxy_with_path(self):
"""
Test case for connect_patch_node_proxy_with_path
"""
pass
def test_connect_post_namespaced_pod_attach(self):
"""
Test case for connect_post_namespaced_pod_attach
"""
pass
def test_connect_post_namespaced_pod_exec(self):
"""
Test case for connect_post_namespaced_pod_exec
"""
pass
def test_connect_post_namespaced_pod_portforward(self):
"""
Test case for connect_post_namespaced_pod_portforward
"""
pass
def test_connect_post_namespaced_pod_proxy(self):
"""
Test case for connect_post_namespaced_pod_proxy
"""
pass
def test_connect_post_namespaced_pod_proxy_with_path(self):
"""
Test case for connect_post_namespaced_pod_proxy_with_path
"""
pass
def test_connect_post_namespaced_service_proxy(self):
"""
Test case for connect_post_namespaced_service_proxy
"""
pass
def test_connect_post_namespaced_service_proxy_with_path(self):
"""
Test case for connect_post_namespaced_service_proxy_with_path
"""
pass
def test_connect_post_node_proxy(self):
"""
Test case for connect_post_node_proxy
"""
pass
def test_connect_post_node_proxy_with_path(self):
"""
Test case for connect_post_node_proxy_with_path
"""
pass
def test_connect_put_namespaced_pod_proxy(self):
"""
Test case for connect_put_namespaced_pod_proxy
"""
pass
def test_connect_put_namespaced_pod_proxy_with_path(self):
"""
Test case for connect_put_namespaced_pod_proxy_with_path
"""
pass
def test_connect_put_namespaced_service_proxy(self):
"""
Test case for connect_put_namespaced_service_proxy
"""
pass
def test_connect_put_namespaced_service_proxy_with_path(self):
"""
Test case for connect_put_namespaced_service_proxy_with_path
"""
pass
def test_connect_put_node_proxy(self):
"""
Test case for connect_put_node_proxy
"""
pass
def test_connect_put_node_proxy_with_path(self):
"""
Test case for connect_put_node_proxy_with_path
"""
pass
def test_create_namespace(self):
"""
Test case for create_namespace
"""
pass
def test_create_namespaced_binding(self):
"""
Test case for create_namespaced_binding
"""
pass
def test_create_namespaced_config_map(self):
"""
Test case for create_namespaced_config_map
"""
pass
def test_create_namespaced_endpoints(self):
"""
Test case for create_namespaced_endpoints
"""
pass
def test_create_namespaced_event(self):
"""
Test case for create_namespaced_event
"""
pass
def test_create_namespaced_limit_range(self):
"""
Test case for create_namespaced_limit_range
"""
pass
def test_create_namespaced_persistent_volume_claim(self):
"""
Test case for create_namespaced_persistent_volume_claim
"""
pass
def test_create_namespaced_pod(self):
"""
Test case for create_namespaced_pod
"""
pass
def test_create_namespaced_pod_binding(self):
"""
Test case for create_namespaced_pod_binding
"""
pass
def test_create_namespaced_pod_eviction(self):
"""
Test case for create_namespaced_pod_eviction
"""
pass
def test_create_namespaced_pod_template(self):
"""
Test case for create_namespaced_pod_template
"""
pass
def test_create_namespaced_replication_controller(self):
"""
Test case for create_namespaced_replication_controller
"""
pass
def test_create_namespaced_resource_quota(self):
"""
Test case for create_namespaced_resource_quota
"""
pass
def test_create_namespaced_secret(self):
"""
Test case for create_namespaced_secret
"""
pass
def test_create_namespaced_service(self):
"""
Test case for create_namespaced_service
"""
pass
def test_create_namespaced_service_account(self):
"""
Test case for create_namespaced_service_account
"""
pass
def test_create_node(self):
"""
Test case for create_node
"""
pass
def test_create_persistent_volume(self):
"""
Test case for create_persistent_volume
"""
pass
def test_delete_collection_namespaced_config_map(self):
"""
Test case for delete_collection_namespaced_config_map
"""
pass
def test_delete_collection_namespaced_endpoints(self):
"""
Test case for delete_collection_namespaced_endpoints
"""
pass
def test_delete_collection_namespaced_event(self):
"""
Test case for delete_collection_namespaced_event
"""
pass
def test_delete_collection_namespaced_limit_range(self):
"""
Test case for delete_collection_namespaced_limit_range
"""
pass
def test_delete_collection_namespaced_persistent_volume_claim(self):
"""
Test case for delete_collection_namespaced_persistent_volume_claim
"""
pass
def test_delete_collection_namespaced_pod(self):
"""
Test case for delete_collection_namespaced_pod
"""
pass
def test_delete_collection_namespaced_pod_template(self):
"""
Test case for delete_collection_namespaced_pod_template
"""
pass
def test_delete_collection_namespaced_replication_controller(self):
"""
Test case for delete_collection_namespaced_replication_controller
"""
pass
def test_delete_collection_namespaced_resource_quota(self):
"""
Test case for delete_collection_namespaced_resource_quota
"""
pass
def test_delete_collection_namespaced_secret(self):
"""
Test case for delete_collection_namespaced_secret
"""
pass
def test_delete_collection_namespaced_service_account(self):
"""
Test case for delete_collection_namespaced_service_account
"""
pass
def test_delete_collection_node(self):
"""
Test case for delete_collection_node
"""
pass
def test_delete_collection_persistent_volume(self):
"""
Test case for delete_collection_persistent_volume
"""
pass
def test_delete_namespace(self):
"""
Test case for delete_namespace
"""
pass
def test_delete_namespaced_config_map(self):
"""
Test case for delete_namespaced_config_map
"""
pass
def test_delete_namespaced_endpoints(self):
"""
Test case for delete_namespaced_endpoints
"""
pass
def test_delete_namespaced_event(self):
"""
Test case for delete_namespaced_event
"""
pass
def test_delete_namespaced_limit_range(self):
"""
Test case for delete_namespaced_limit_range
"""
pass
def test_delete_namespaced_persistent_volume_claim(self):
"""
Test case for delete_namespaced_persistent_volume_claim
"""
pass
def test_delete_namespaced_pod(self):
"""
Test case for delete_namespaced_pod
"""
pass
def test_delete_namespaced_pod_template(self):
"""
Test case for delete_namespaced_pod_template
"""
pass
def test_delete_namespaced_replication_controller(self):
"""
Test case for delete_namespaced_replication_controller
"""
pass
def test_delete_namespaced_resource_quota(self):
"""
Test case for delete_namespaced_resource_quota
"""
pass
def test_delete_namespaced_secret(self):
"""
Test case for delete_namespaced_secret
"""
pass
def test_delete_namespaced_service(self):
"""
Test case for delete_namespaced_service
"""
pass
def test_delete_namespaced_service_account(self):
"""
Test case for delete_namespaced_service_account
"""
pass
def test_delete_node(self):
"""
Test case for delete_node
"""
pass
def test_delete_persistent_volume(self):
"""
Test case for delete_persistent_volume
"""
pass
def test_get_api_resources(self):
"""
Test case for get_api_resources
"""
pass
def test_list_component_status(self):
"""
Test case for list_component_status
"""
pass
def test_list_config_map_for_all_namespaces(self):
"""
Test case for list_config_map_for_all_namespaces
"""
pass
def test_list_endpoints_for_all_namespaces(self):
"""
Test case for list_endpoints_for_all_namespaces
"""
pass
def test_list_event_for_all_namespaces(self):
"""
Test case for list_event_for_all_namespaces
"""
pass
def test_list_limit_range_for_all_namespaces(self):
"""
Test case for list_limit_range_for_all_namespaces
"""
pass
def test_list_namespace(self):
"""
Test case for list_namespace
"""
pass
def test_list_namespaced_config_map(self):
"""
Test case for list_namespaced_config_map
"""
pass
def test_list_namespaced_endpoints(self):
"""
Test case for list_namespaced_endpoints
"""
pass
def test_list_namespaced_event(self):
"""
Test case for list_namespaced_event
"""
pass
def test_list_namespaced_limit_range(self):
"""
Test case for list_namespaced_limit_range
"""
pass
def test_list_namespaced_persistent_volume_claim(self):
"""
Test case for list_namespaced_persistent_volume_claim
"""
pass
def test_list_namespaced_pod(self):
"""
Test case for list_namespaced_pod
"""
pass
def test_list_namespaced_pod_template(self):
"""
Test case for list_namespaced_pod_template
"""
pass
def test_list_namespaced_replication_controller(self):
"""
Test case for list_namespaced_replication_controller
"""
pass
def test_list_namespaced_resource_quota(self):
"""
Test case for list_namespaced_resource_quota
"""
pass
def test_list_namespaced_secret(self):
"""
Test case for list_namespaced_secret
"""
pass
def test_list_namespaced_service(self):
"""
Test case for list_namespaced_service
"""
pass
def test_list_namespaced_service_account(self):
"""
Test case for list_namespaced_service_account
"""
pass
def test_list_node(self):
"""
Test case for list_node
"""
pass
def test_list_persistent_volume(self):
"""
Test case for list_persistent_volume
"""
pass
def test_list_persistent_volume_claim_for_all_namespaces(self):
"""
Test case for list_persistent_volume_claim_for_all_namespaces
"""
pass
def test_list_pod_for_all_namespaces(self):
"""
Test case for list_pod_for_all_namespaces
"""
pass
def test_list_pod_template_for_all_namespaces(self):
"""
Test case for list_pod_template_for_all_namespaces
"""
pass
def test_list_replication_controller_for_all_namespaces(self):
"""
Test case for list_replication_controller_for_all_namespaces
"""
pass
def test_list_resource_quota_for_all_namespaces(self):
"""
Test case for list_resource_quota_for_all_namespaces
"""
pass
def test_list_secret_for_all_namespaces(self):
"""
Test case for list_secret_for_all_namespaces
"""
pass
def test_list_service_account_for_all_namespaces(self):
"""
Test case for list_service_account_for_all_namespaces
"""
pass
def test_list_service_for_all_namespaces(self):
"""
Test case for list_service_for_all_namespaces
"""
pass
def test_patch_namespace(self):
"""
Test case for patch_namespace
"""
pass
def test_patch_namespace_status(self):
"""
Test case for patch_namespace_status
"""
pass
def test_patch_namespaced_config_map(self):
"""
Test case for patch_namespaced_config_map
"""
pass
def test_patch_namespaced_endpoints(self):
"""
Test case for patch_namespaced_endpoints
"""
pass
def test_patch_namespaced_event(self):
"""
Test case for patch_namespaced_event
"""
pass
def test_patch_namespaced_limit_range(self):
"""
Test case for patch_namespaced_limit_range
"""
pass
def test_patch_namespaced_persistent_volume_claim(self):
"""
Test case for patch_namespaced_persistent_volume_claim
"""
pass
def test_patch_namespaced_persistent_volume_claim_status(self):
"""
Test case for patch_namespaced_persistent_volume_claim_status
"""
pass
def test_patch_namespaced_pod(self):
"""
Test case for patch_namespaced_pod
"""
pass
def test_patch_namespaced_pod_status(self):
"""
Test case for patch_namespaced_pod_status
"""
pass
def test_patch_namespaced_pod_template(self):
"""
Test case for patch_namespaced_pod_template
"""
pass
def test_patch_namespaced_replication_controller(self):
"""
Test case for patch_namespaced_replication_controller
"""
pass
def test_patch_namespaced_replication_controller_scale(self):
"""
Test case for patch_namespaced_replication_controller_scale
"""
pass
def test_patch_namespaced_replication_controller_status(self):
"""
Test case for patch_namespaced_replication_controller_status
"""
pass
def test_patch_namespaced_resource_quota(self):
"""
Test case for patch_namespaced_resource_quota
"""
pass
def test_patch_namespaced_resource_quota_status(self):
"""
Test case for patch_namespaced_resource_quota_status
"""
pass
def test_patch_namespaced_secret(self):
"""
Test case for patch_namespaced_secret
"""
pass
def test_patch_namespaced_service(self):
"""
Test case for patch_namespaced_service
"""
pass
def test_patch_namespaced_service_account(self):
"""
Test case for patch_namespaced_service_account
"""
pass
def test_patch_namespaced_service_status(self):
"""
Test case for patch_namespaced_service_status
"""
pass
def test_patch_node(self):
"""
Test case for patch_node
"""
pass
def test_patch_node_status(self):
"""
Test case for patch_node_status
"""
pass
def test_patch_persistent_volume(self):
"""
Test case for patch_persistent_volume
"""
pass
def test_patch_persistent_volume_status(self):
"""
Test case for patch_persistent_volume_status
"""
pass
def test_read_component_status(self):
"""
Test case for read_component_status
"""
pass
def test_read_namespace(self):
"""
Test case for read_namespace
"""
pass
def test_read_namespace_status(self):
"""
Test case for read_namespace_status
"""
pass
def test_read_namespaced_config_map(self):
"""
Test case for read_namespaced_config_map
"""
pass
def test_read_namespaced_endpoints(self):
"""
Test case for read_namespaced_endpoints
"""
pass
def test_read_namespaced_event(self):
"""
Test case for read_namespaced_event
"""
pass
def test_read_namespaced_limit_range(self):
"""
Test case for read_namespaced_limit_range
"""
pass
def test_read_namespaced_persistent_volume_claim(self):
"""
Test case for read_namespaced_persistent_volume_claim
"""
pass
def test_read_namespaced_persistent_volume_claim_status(self):
"""
Test case for read_namespaced_persistent_volume_claim_status
"""
pass
def test_read_namespaced_pod(self):
"""
Test case for read_namespaced_pod
"""
pass
def test_read_namespaced_pod_log(self):
"""
Test case for read_namespaced_pod_log
"""
pass
def test_read_namespaced_pod_status(self):
"""
Test case for read_namespaced_pod_status
"""
pass
def test_read_namespaced_pod_template(self):
"""
Test case for read_namespaced_pod_template
"""
pass
def test_read_namespaced_replication_controller(self):
"""
Test case for read_namespaced_replication_controller
"""
pass
def test_read_namespaced_replication_controller_scale(self):
"""
Test case for read_namespaced_replication_controller_scale
"""
pass
def test_read_namespaced_replication_controller_status(self):
"""
Test case for read_namespaced_replication_controller_status
"""
pass
def test_read_namespaced_resource_quota(self):
"""
Test case for read_namespaced_resource_quota
"""
pass
def test_read_namespaced_resource_quota_status(self):
"""
Test case for read_namespaced_resource_quota_status
"""
pass
def test_read_namespaced_secret(self):
"""
Test case for read_namespaced_secret
"""
pass
def test_read_namespaced_service(self):
"""
Test case for read_namespaced_service
"""
pass
def test_read_namespaced_service_account(self):
"""
Test case for read_namespaced_service_account
"""
pass
def test_read_namespaced_service_status(self):
"""
Test case for read_namespaced_service_status
"""
pass
def test_read_node(self):
"""
Test case for read_node
"""
pass
def test_read_node_status(self):
"""
Test case for read_node_status
"""
pass
def test_read_persistent_volume(self):
"""
Test case for read_persistent_volume
"""
pass
def test_read_persistent_volume_status(self):
"""
Test case for read_persistent_volume_status
"""
pass
def test_replace_namespace(self):
"""
Test case for replace_namespace
"""
pass
def test_replace_namespace_finalize(self):
"""
Test case for replace_namespace_finalize
"""
pass
def test_replace_namespace_status(self):
"""
Test case for replace_namespace_status
"""
pass
def test_replace_namespaced_config_map(self):
"""
Test case for replace_namespaced_config_map
"""
pass
def test_replace_namespaced_endpoints(self):
"""
Test case for replace_namespaced_endpoints
"""
pass
def test_replace_namespaced_event(self):
"""
Test case for replace_namespaced_event
"""
pass
def test_replace_namespaced_limit_range(self):
"""
Test case for replace_namespaced_limit_range
"""
pass
def test_replace_namespaced_persistent_volume_claim(self):
"""
Test case for replace_namespaced_persistent_volume_claim
"""
pass
def test_replace_namespaced_persistent_volume_claim_status(self):
"""
Test case for replace_namespaced_persistent_volume_claim_status
"""
pass
def test_replace_namespaced_pod(self):
"""
Test case for replace_namespaced_pod
"""
pass
def test_replace_namespaced_pod_status(self):
"""
Test case for replace_namespaced_pod_status
"""
pass
def test_replace_namespaced_pod_template(self):
"""
Test case for replace_namespaced_pod_template
"""
pass
def test_replace_namespaced_replication_controller(self):
"""
Test case for replace_namespaced_replication_controller
"""
pass
def test_replace_namespaced_replication_controller_scale(self):
"""
Test case for replace_namespaced_replication_controller_scale
"""
pass
def test_replace_namespaced_replication_controller_status(self):
"""
Test case for replace_namespaced_replication_controller_status
"""
pass
def test_replace_namespaced_resource_quota(self):
"""
Test case for replace_namespaced_resource_quota
"""
pass
def test_replace_namespaced_resource_quota_status(self):
"""
Test case for replace_namespaced_resource_quota_status
"""
pass
def test_replace_namespaced_secret(self):
"""
Test case for replace_namespaced_secret
"""
pass
def test_replace_namespaced_service(self):
"""
Test case for replace_namespaced_service
"""
pass
def test_replace_namespaced_service_account(self):
"""
Test case for replace_namespaced_service_account
"""
pass
def test_replace_namespaced_service_status(self):
"""
Test case for replace_namespaced_service_status
"""
pass
def test_replace_node(self):
"""
Test case for replace_node
"""
pass
def test_replace_node_status(self):
"""
Test case for replace_node_status
"""
pass
def test_replace_persistent_volume(self):
"""
Test case for replace_persistent_volume
"""
pass
def test_replace_persistent_volume_status(self):
"""
Test case for replace_persistent_volume_status
"""
pass
if __name__ == '__main__':
unittest.main()
| 19.267736
| 105
| 0.581596
|
b4a08069b9de44bf7978fba19bdf918708a73d9e
| 758
|
py
|
Python
|
core/src/zeit/cms/generation/evolve5.py
|
rickdg/vivi
|
16134ac954bf8425646d4ad47bdd1f372e089355
|
[
"BSD-3-Clause"
] | 5
|
2019-05-16T09:51:29.000Z
|
2021-05-31T09:30:03.000Z
|
core/src/zeit/cms/generation/evolve5.py
|
rickdg/vivi
|
16134ac954bf8425646d4ad47bdd1f372e089355
|
[
"BSD-3-Clause"
] | 107
|
2019-05-24T12:19:02.000Z
|
2022-03-23T15:05:56.000Z
|
core/src/zeit/cms/generation/evolve5.py
|
rickdg/vivi
|
16134ac954bf8425646d4ad47bdd1f372e089355
|
[
"BSD-3-Clause"
] | 3
|
2020-08-14T11:01:17.000Z
|
2022-01-08T17:32:19.000Z
|
import zope.component
import zeit.cms.generation
import zeit.cms.generation.install
import zeit.cms.syndication.interfaces
import zeit.cms.workingcopy.interfaces
def update(root):
# Change the storage type of hidden containers from zc.set to TreeSet
# We don't migtrate the user preferences here but just remove it.
workingcopy_location = zope.component.getUtility(
zeit.cms.workingcopy.interfaces.IWorkingcopyLocation)
for name in workingcopy_location:
workingcopy = workingcopy_location[name]
targets = zeit.cms.syndication.interfaces.IMySyndicationTargets(
workingcopy)
del targets._targets
targets.__init__()
def evolve(context):
zeit.cms.generation.do_evolve(context, update)
| 31.583333
| 73
| 0.751979
|
0fd80aeb25884597254565adbe75d33f64de3b82
| 507
|
py
|
Python
|
kontranto_igra/migrations/0003_auto_20210105_2257.py
|
zd-mioc/Kontranto
|
928f8e6ca1c8c136878d0dd036321053ab461049
|
[
"MIT"
] | 1
|
2020-07-19T11:11:08.000Z
|
2020-07-19T11:11:08.000Z
|
kontranto_igra/migrations/0003_auto_20210105_2257.py
|
zd-mioc/Kontranto
|
928f8e6ca1c8c136878d0dd036321053ab461049
|
[
"MIT"
] | null | null | null |
kontranto_igra/migrations/0003_auto_20210105_2257.py
|
zd-mioc/Kontranto
|
928f8e6ca1c8c136878d0dd036321053ab461049
|
[
"MIT"
] | 1
|
2021-03-01T08:39:41.000Z
|
2021-03-01T08:39:41.000Z
|
# Generated by Django 3.1.4 on 2021-01-05 21:57
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('kontranto_igra', '0002_auto_20210105_2249'),
]
operations = [
migrations.AlterField(
model_name='move',
name='game_id',
field=models.ForeignKey(db_column='white_score', on_delete=django.db.models.deletion.CASCADE, to='kontranto_igra.game'),
),
]
| 25.35
| 132
| 0.654832
|
ad3d8a7153d80f73860ee80ad0fb9b8822dde3d1
| 5,137
|
py
|
Python
|
charge_density_methods_VASP/lib.py
|
benwmcdowell/charge_density_methods_VASP
|
c1d965b62e638e4509c8b2b94fc797568aa46919
|
[
"MIT"
] | 4
|
2021-04-13T13:57:48.000Z
|
2021-04-15T03:37:52.000Z
|
charge_density_methods_VASP/lib.py
|
benwmcdowell/charge_density_methods_VASP
|
c1d965b62e638e4509c8b2b94fc797568aa46919
|
[
"MIT"
] | null | null | null |
charge_density_methods_VASP/lib.py
|
benwmcdowell/charge_density_methods_VASP
|
c1d965b62e638e4509c8b2b94fc797568aa46919
|
[
"MIT"
] | null | null | null |
from numpy import zeros, dot, cross, shape
from numpy.linalg import inv
#reads the total charge density from a CHGCAR file
def parse_CHGCAR(ifile, **args):
if 'scale' in args:
rescale=False
else:
rescale=True
#reads atomic positions and lattice vectors
lv=zeros((3,3))
with open(ifile,'r') as chgcar:
for i in range(8):
line=chgcar.readline().split()
if i==1:
sf=float(line[0])
if i>1 and i<5:
for j in range(3):
lv[i-2][j]=float(line[j])*sf
if i==5:
atomtypes=line
if i==6:
atomnums=line
for j in range(len(atomnums)):
atomnums[j]=int(atomnums[j])
if i==7:
mode=line[0]
coord=zeros((sum(atomnums),3))
for i in range(sum(atomnums)):
line=chgcar.readline().split()
for j in range(3):
coord[i][j]=float(line[j])
if mode[0]=='D':
coord[i]=dot(coord[i],lv)
line=chgcar.readline()
#starts reading charge density info
line=chgcar.readline().split()
x=0
y=0
z=0
dim=[int(i) for i in line]
e=zeros((dim[0],dim[1],dim[2]))
searching=True
while searching:
line=chgcar.readline().split()
for i in line:
e[x][y][z]=float(i)
x+=1
if x==dim[0]:
x=0
y+=1
if y==dim[1]:
y=0
z+=1
if z==dim[2]:
searching=False
break
if rescale:
print('charge density values rescaled to electrons per cubic Angstrom')
vol=dot(cross(lv[0],lv[1]),lv[2])
e/=vol
return e, lv, coord, atomtypes, atomnums
def write_CHGCAR(filepath, e, lv, coord, atomtypes, atomnums):
with open(filepath, 'w+') as file:
file.write('\n1.0\n')
for i in range(3):
for j in range(3):
file.write(' {}'.format(lv[i][j]))
file.write('\n')
for i in [atomtypes,atomnums]:
for j in i:
file.write(' {}'.format(j))
file.write('\n')
file.write('Direct\n')
for i in range(len(coord)):
coord[i]=dot(coord[i],inv(lv))
for j in coord[i]:
file.write(' {}'.format(j))
file.write('\n')
file.write('\n')
dim=shape(e)
for i in dim:
file.write(' {}'.format(i))
writing=True
x=0
y=0
z=0
while writing:
file.write('\n')
for i in range(5):
file.write(' {:.11e}'.format(e[x][y][z]))
x+=1
if x==dim[0]:
y+=1
x=0
if y==dim[1]:
z+=1
y=0
if z==dim[2]:
writing=False
break
#reads the total charge density from a CHGCAR file
def parse_LOCPOT(ifile):
#reads atomic positions and lattice vectors
lv=zeros((3,3))
with open(ifile,'r') as chgcar:
for i in range(8):
line=chgcar.readline().split()
if i==1:
sf=float(line[0])
if i>1 and i<5:
for j in range(3):
lv[i-2][j]=float(line[j])*sf
if i==5:
atomtypes=line
if i==6:
atomnums=line
for j in range(len(atomnums)):
atomnums[j]=int(atomnums[j])
if i==7:
mode=line[0]
coord=zeros((sum(atomnums),3))
for i in range(sum(atomnums)):
line=chgcar.readline().split()
for j in range(3):
coord[i][j]=float(line[j])
if mode[0]=='D':
coord[i]=dot(coord[i],lv)
line=chgcar.readline()
#starts reading charge density info
line=chgcar.readline().split()
x=0
y=0
z=0
dim=[int(i) for i in line]
pot=zeros((dim[0],dim[1],dim[2]))
searching=True
counter=0
while searching:
line=chgcar.readline().split()
if not line:
break
for i in line:
pot[x][y][z]+=float(i)
x+=1
if x==dim[0]:
x=0
y+=1
if y==dim[1]:
y=0
z+=1
if z==dim[2]:
z=0
counter+=1
if counter==2:
searching=False
break
if counter==2:
pot/=2.0
return pot, lv, coord, atomtypes, atomnums
| 31.133333
| 80
| 0.406658
|
d4efd0584dc3796b74adb818ce29953c0b6c512a
| 52,943
|
py
|
Python
|
aicsimage/io/omexml.py
|
HelmholtzAI-Consultants-Munich/pytorch_fnet
|
879784bd0f8e76ab8f0ed8de4235180a316e12d8
|
[
"Unlicense"
] | 16
|
2021-03-12T01:37:36.000Z
|
2022-02-07T22:02:15.000Z
|
aicsimage/io/omexml.py
|
HelmholtzAI-Consultants-Munich/pytorch_fnet
|
879784bd0f8e76ab8f0ed8de4235180a316e12d8
|
[
"Unlicense"
] | 2
|
2021-03-26T04:12:15.000Z
|
2022-03-30T07:34:34.000Z
|
aicsimage/io/omexml.py
|
HelmholtzAI-Consultants-Munich/pytorch_fnet
|
879784bd0f8e76ab8f0ed8de4235180a316e12d8
|
[
"Unlicense"
] | 3
|
2020-07-08T09:03:00.000Z
|
2021-11-29T07:17:13.000Z
|
# Python-bioformats is distributed under the GNU General Public
# License, but this file is licensed under the more permissive BSD
# license. See the accompanying file LICENSE for details.
#
# Copyright (c) 2009-2014 Broad Institute
# All rights reserved.
"""omexml.py read and write OME xml
"""
from __future__ import absolute_import, unicode_literals
import sys
import xml.etree.ElementTree as ElementTree
if sys.version_info.major == 3:
from io import StringIO
uenc = 'unicode'
else:
from cStringIO import StringIO
uenc = 'utf-8'
import datetime
import logging
from functools import reduce
logger = logging.getLogger(__file__)
import re
import uuid
def xsd_now():
'''Return the current time in xsd:dateTime format'''
return datetime.datetime.now().isoformat()
DEFAULT_NOW = xsd_now()
#
# The namespaces
#
NS_BINARY_FILE = "http://www.openmicroscopy.org/Schemas/BinaryFile/2013-06"
NS_ORIGINAL_METADATA = "openmicroscopy.org/OriginalMetadata"
NS_DEFAULT = "http://www.openmicroscopy.org/Schemas/{ns_key}/2013-06"
NS_RE = r"http://www.openmicroscopy.org/Schemas/(?P<ns_key>.*)/[0-9/-]"
default_xml = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Warning: this comment is an OME-XML metadata block, which contains crucial dimensional parameters and other important metadata. Please edit cautiously (if at all), and back up the original data before doing so. For more information, see the OME-TIFF web site: http://ome-xml.org/wiki/OmeTiff. -->
<OME xmlns="{ns_ome_default}"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.openmicroscopy.org/Schemas/OME/2013-06 http://www.openmicroscopy.org/Schemas/OME/2012-03/ome.xsd">
<Image ID="Image:0" Name="default.png">
<AcquisitionDate>{timestamp}</AcquisitionDate>
<Pixels DimensionOrder="XYCTZ"
ID="Pixels:0"
SizeC="1"
SizeT="1"
SizeX="512"
SizeY="512"
SizeZ="1"
Type="uint8">
<Channel ID="Channel:0:0" SamplesPerPixel="1">
<LightPath/>
</Channel>
</Pixels>
</Image>
</OME>""".format(ns_ome_default=NS_DEFAULT.format(ns_key='ome'), timestamp=xsd_now())
#
# These are the OME-XML pixel types - not all supported by subimager
#
PT_INT8 = "int8"
PT_INT16 = "int16"
PT_INT32 = "int32"
PT_UINT8 = "uint8"
PT_UINT16 = "uint16"
PT_UINT32 = "uint32"
PT_FLOAT = "float"
PT_BIT = "bit"
PT_DOUBLE = "double"
PT_COMPLEX = "complex"
PT_DOUBLECOMPLEX = "double-complex"
#
# The allowed dimension types
#
DO_XYZCT = "XYZCT"
DO_XYZTC = "XYZTC"
DO_XYCTZ = "XYCTZ"
DO_XYCZT = "XYCZT"
DO_XYTCZ = "XYTCZ"
DO_XYTZC = "XYTZC"
#
# Original metadata corresponding to TIFF tags
# The text for these can be found in
# loci.formats.in.BaseTiffReader.initStandardMetadata
#
'''IFD # 254'''
OM_NEW_SUBFILE_TYPE = "NewSubfileType"
'''IFD # 256'''
OM_IMAGE_WIDTH = "ImageWidth"
'''IFD # 257'''
OM_IMAGE_LENGTH = "ImageLength"
'''IFD # 258'''
OM_BITS_PER_SAMPLE = "BitsPerSample"
'''IFD # 262'''
OM_PHOTOMETRIC_INTERPRETATION = "PhotometricInterpretation"
PI_WHITE_IS_ZERO = "WhiteIsZero"
PI_BLACK_IS_ZERO = "BlackIsZero"
PI_RGB = "RGB"
PI_RGB_PALETTE = "Palette"
PI_TRANSPARENCY_MASK = "Transparency Mask"
PI_CMYK = "CMYK"
PI_Y_CB_CR = "YCbCr"
PI_CIE_LAB = "CIELAB"
PI_CFA_ARRAY = "Color Filter Array"
'''BioFormats infers the image type from the photometric interpretation'''
OM_METADATA_PHOTOMETRIC_INTERPRETATION = "MetaDataPhotometricInterpretation"
MPI_RGB = "RGB"
MPI_MONOCHROME = "Monochrome"
MPI_CMYK = "CMYK"
'''IFD # 263'''
OM_THRESHHOLDING = "Threshholding" # (sic)
'''IFD # 264 (but can be 265 if the orientation = 8)'''
OM_CELL_WIDTH = "CellWidth"
'''IFD # 265'''
OM_CELL_LENGTH = "CellLength"
'''IFD # 266'''
OM_FILL_ORDER = "FillOrder"
'''IFD # 279'''
OM_DOCUMENT_NAME = "Document Name"
'''IFD # 271'''
OM_MAKE = "Make"
'''IFD # 272'''
OM_MODEL = "Model"
'''IFD # 274'''
OM_ORIENTATION = "Orientation"
'''IFD # 277'''
OM_SAMPLES_PER_PIXEL = "SamplesPerPixel"
'''IFD # 280'''
OM_MIN_SAMPLE_VALUE = "MinSampleValue"
'''IFD # 281'''
OM_MAX_SAMPLE_VALUE = "MaxSampleValue"
'''IFD # 282'''
OM_X_RESOLUTION = "XResolution"
'''IFD # 283'''
OM_Y_RESOLUTION = "YResolution"
'''IFD # 284'''
OM_PLANAR_CONFIGURATION = "PlanarConfiguration"
PC_CHUNKY = "Chunky"
PC_PLANAR = "Planar"
'''IFD # 286'''
OM_X_POSITION = "XPosition"
'''IFD # 287'''
OM_Y_POSITION = "YPosition"
'''IFD # 288'''
OM_FREE_OFFSETS = "FreeOffsets"
'''IFD # 289'''
OM_FREE_BYTECOUNTS = "FreeByteCounts"
'''IFD # 290'''
OM_GRAY_RESPONSE_UNIT = "GrayResponseUnit"
'''IFD # 291'''
OM_GRAY_RESPONSE_CURVE = "GrayResponseCurve"
'''IFD # 292'''
OM_T4_OPTIONS = "T4Options"
'''IFD # 293'''
OM_T6_OPTIONS = "T6Options"
'''IFD # 296'''
OM_RESOLUTION_UNIT = "ResolutionUnit"
'''IFD # 297'''
OM_PAGE_NUMBER = "PageNumber"
'''IFD # 301'''
OM_TRANSFER_FUNCTION = "TransferFunction"
'''IFD # 305'''
OM_SOFTWARE = "Software"
'''IFD # 306'''
OM_DATE_TIME = "DateTime"
'''IFD # 315'''
OM_ARTIST = "Artist"
'''IFD # 316'''
OM_HOST_COMPUTER = "HostComputer"
'''IFD # 317'''
OM_PREDICTOR = "Predictor"
'''IFD # 318'''
OM_WHITE_POINT = "WhitePoint"
'''IFD # 322'''
OM_TILE_WIDTH = "TileWidth"
'''IFD # 323'''
OM_TILE_LENGTH = "TileLength"
'''IFD # 324'''
OM_TILE_OFFSETS = "TileOffsets"
'''IFD # 325'''
OM_TILE_BYTE_COUNT = "TileByteCount"
'''IFD # 332'''
OM_INK_SET = "InkSet"
'''IFD # 33432'''
OM_COPYRIGHT = "Copyright"
#
# Well row/column naming conventions
#
NC_LETTER = "letter"
NC_NUMBER = "number"
def page_name_original_metadata(index):
'''Get the key name for the page name metadata data for the indexed tiff page
These are TIFF IFD #'s 285+
index - zero-based index of the page
'''
return "PageName #%d" % index
def get_text(node):
'''Get the contents of text nodes in a parent node'''
return node.text
def set_text(node, text):
'''Set the text of a parent'''
node.text = text
def qn(namespace, tag_name):
'''Return the qualified name for a given namespace and tag name
This is the ElementTree representation of a qualified name
'''
return "{%s}%s" % (namespace, tag_name)
def split_qn(qn):
'''Split a qualified tag name or return None if namespace not present'''
m = re.match('\{(.*)\}(.*)', qn)
return m.group(1), m.group(2) if m else None
def get_namespaces(node):
'''Get top-level XML namespaces from a node.'''
ns_lib = {'ome': None, 'sa': None, 'spw': None}
for child in node.iter():
ns = split_qn(child.tag)[0]
match = re.match(NS_RE, ns)
if match:
ns_key = match.group('ns_key').lower()
ns_lib[ns_key] = ns
return ns_lib
def get_float_attr(node, attribute):
'''Cast an element attribute to a float or return None if not present'''
attr = node.get(attribute)
return None if attr is None else float(attr)
def get_int_attr(node, attribute):
'''Cast an element attribute to an int or return None if not present'''
attr = node.get(attribute)
return None if attr is None else int(attr)
def make_text_node(parent, namespace, tag_name, text):
'''Either make a new node and add the given text or replace the text
parent - the parent node to the node to be created or found
namespace - the namespace of the node's qualified name
tag_name - the tag name of the node's qualified name
text - the text to be inserted
'''
qname = qn(namespace, tag_name)
node = parent.find(qname)
if node is None:
node = ElementTree.SubElement(parent, qname)
set_text(node, text)
class OMEXML(object):
'''Reads and writes OME-XML with methods to get and set it.
The OMEXML class has four main purposes: to parse OME-XML, to output
OME-XML, to provide a structured mechanism for inspecting OME-XML and to
let the caller create and modify OME-XML.
There are two ways to invoke the constructor. If you supply XML as a string
or unicode string, the constructor will parse it and will use it as the
base for any inspection and modification. If you don't supply XML, you'll
get a bland OME-XML object which has a one-channel image. You can modify
it programatically and get the modified OME-XML back out by calling to_xml.
There are two ways to get at the XML. The arduous way is to get the
root_node of the DOM and explore it yourself using the DOM API
(http://docs.python.org/library/xml.dom.html#module-xml.dom). The easy way,
where it's supported is to use properties on OMEXML and on some of its
derived objects. For instance:
>>> o = OMEXML()
>>> print o.image().AcquisitionDate
will get you the date that image # 0 was acquired.
>>> o = OMEXML()
>>> o.image().Name = "MyImage"
will set the image name to "MyImage".
You can add and remove objects using the "count" properties. Each of these
handles hooking up and removing orphaned elements for you and should be
less error prone than creating orphaned elements and attaching them. For
instance, to create a three-color image:
>>> o = OMEXML()
>>> o.image().Pixels.channel_count = 3
>>> o.image().Pixels.Channel(0).Name = "Red"
>>> o.image().Pixels.Channel(1).Name = "Green"
>>> o.image().Pixels.Channel(2).Name = "Blue"
See the `OME-XML schema documentation <http://git.openmicroscopy.org/src/develop/components/specification/Documentation/Generated/OME-2011-06/ome.html>`_.
'''
def __init__(self, xml=None, rootnode=None):
if xml is None and rootnode is None:
xml = default_xml
if rootnode is None:
if sys.platform.startswith('win'):
enc = 'ISO-8859-1'
else:
enc = 'UTF-8'
self.dom = ElementTree.fromstring(xml, ElementTree.XMLParser(encoding=enc))
else:
self.dom = rootnode
# determine OME namespaces
self.ns = get_namespaces(self.dom)
if __name__ == '__main__':
if self.ns['ome'] is None:
raise Exception("Error: String not in OME-XML format")
# generate a uuid if there is none
# < OME UUID = "urn:uuid:ef8af211-b6c1-44d4-97de-daca46f16346"
omeElem = self.dom
if not omeElem.get('UUID'):
omeElem.set('UUID', 'urn:uuid:'+str(uuid.uuid4()))
self.uuidStr = omeElem.get('UUID')
def __str__(self):
#
# need to register the ome namespace because BioFormats expects
# that namespace to be the default or to be explicitly named "ome"
#
for ns_key in ["ome"]:
ns = self.ns.get(ns_key) or NS_DEFAULT.format(ns_key=ns_key)
# ElementTree.register_namespace(ns_key, ns)
ElementTree.register_namespace('', ns)
# ElementTree.register_namespace("om", NS_ORIGINAL_METADATA)
result = StringIO()
ElementTree.ElementTree(self.root_node).write(result,
encoding=uenc,
method="xml",
xml_declaration = True
# default_namespace = 'http://www.openmicroscopy.org/Schemas/ome/2013-06'
)
return result.getvalue()
def to_xml(self, indent="\t", newline="\n", encoding=uenc):
return str(self)
def get_ns(self, key):
return self.ns[key]
@property
def root_node(self):
return self.dom
def get_image_count(self):
'''The number of images (= series) specified by the XML'''
return len(self.root_node.findall(qn(self.ns['ome'], "Image")))
def set_image_count(self, value):
'''Add or remove image nodes as needed'''
assert value > 0
root = self.root_node
if self.image_count > value:
image_nodes = root.find(qn(self.ns['ome'], "Image"))
for image_node in image_nodes[value:]:
root.remove(image_node)
while(self.image_count < value):
new_image = self.Image(ElementTree.SubElement(root, qn(self.ns['ome'], "Image")))
new_image.ID = str(uuid.uuid4())
new_image.Name = "default.png"
new_image.AcquisitionDate = xsd_now()
new_pixels = self.Pixels(
ElementTree.SubElement(new_image.node, qn(self.ns['ome'], "Pixels")))
new_pixels.ome_uuid = self.uuidStr
new_pixels.ID = str(uuid.uuid4())
new_pixels.DimensionOrder = DO_XYCTZ
new_pixels.PixelType = PT_UINT8
new_pixels.SizeC = 1
new_pixels.SizeT = 1
new_pixels.SizeX = 512
new_pixels.SizeY = 512
new_pixels.SizeZ = 1
new_channel = self.Channel(
ElementTree.SubElement(new_pixels.node, qn(self.ns['ome'], "Channel")))
new_channel.ID = "Channel%d:0" % self.image_count
new_channel.Name = new_channel.ID
new_channel.SamplesPerPixel = 1
image_count = property(get_image_count, set_image_count)
@property
def plates(self):
return self.PlatesDucktype(self.root_node)
@property
def structured_annotations(self):
'''Return the structured annotations container
returns a wrapping of OME/StructuredAnnotations. It creates
the element if it doesn't exist.
'''
node = self.root_node.find(qn(self.ns['sa'], "StructuredAnnotations"))
if node is None:
node = ElementTree.SubElement(
self.root_node, qn(self.ns['sa'], "StructuredAnnotations"))
return self.StructuredAnnotations(node)
class Image(object):
'''Representation of the OME/Image element'''
def __init__(self, node):
'''Initialize with the DOM Image node'''
self.node = node
self.ns = get_namespaces(self.node)
def get_ID(self):
return self.node.get("ID")
def set_ID(self, value):
self.node.set("ID", value)
ID = property(get_ID, set_ID)
def get_Name(self):
return self.node.get("Name")
def set_Name(self, value):
self.node.set("Name", value)
Name = property(get_Name, set_Name)
def get_AcquisitionDate(self):
'''The date in ISO-8601 format'''
acquired_date = self.node.find(qn(self.ns["ome"], "AcquisitionDate"))
if acquired_date is None:
return None
return get_text(acquired_date)
def set_AcquisitionDate(self, date):
acquired_date = self.node.find(qn(self.ns["ome"], "AcquisitionDate"))
if acquired_date is None:
acquired_date = ElementTree.SubElement(
self.node, qn(self.ns["ome"], "AcquisitionDate"))
set_text(acquired_date, date)
AcquisitionDate = property(get_AcquisitionDate, set_AcquisitionDate)
@property
def Pixels(self):
'''The OME/Image/Pixels element.
Example:
>>> md = bioformats.omexml.OMEXML(xml)
>>> pixels = omemetadata.image(i).Pixels
>>> channel_count = pixels.SizeC
>>> stack_count = pixels.SizeZ
>>> timepoint_count = pixels.SizeT
'''
return OMEXML.Pixels(self.node.find(qn(self.ns['ome'], "Pixels")))
def image(self, index=0):
'''Return an image node by index'''
return self.Image(self.root_node.findall(qn(self.ns['ome'], "Image"))[index])
class Channel(object):
'''The OME/Image/Pixels/Channel element'''
def __init__(self, node):
self.node = node
self.ns = get_namespaces(node)
def get_ID(self):
return self.node.get("ID")
def set_ID(self, value):
self.node.set("ID", value)
ID = property(get_ID, set_ID)
def get_Name(self):
return self.node.get("Name")
def set_Name(self, value):
self.node.set("Name", value)
Name = property(get_Name, set_Name)
def get_SamplesPerPixel(self):
return get_int_attr(self.node, "SamplesPerPixel")
def set_SamplesPerPixel(self, value):
self.node.set("SamplesPerPixel", str(value))
SamplesPerPixel = property(get_SamplesPerPixel, set_SamplesPerPixel)
def get_Color(self):
return get_int_attr(self.node, "Color")
def set_Color(self, value):
self.node.set("Color", str(value))
Color = property(get_Color, set_Color)
class TiffData(object):
'''The OME/Image/Pixels/TiffData element
<TiffData FirstC="0" FirstT="0" FirstZ="0" IFD="0" PlaneCount="1">
<UUID FileName="img40_1.ome.tif">urn:uuid:ef8af211-b6c1-44d4-97de-daca46f16346</UUID>
</TiffData>
For our purposes, there will be one TiffData per 2-dimensional image plane.
'''
def __init__(self, node):
self.node = node
self.ns = get_namespaces(self.node)
def get_FirstZ(self):
'''The Z index of the plane'''
return get_int_attr(self.node, "FirstZ")
def set_FirstZ(self, value):
self.node.set("FirstZ", str(value))
FirstZ = property(get_FirstZ, set_FirstZ)
def get_FirstC(self):
'''The channel index of the plane'''
return get_int_attr(self.node, "FirstC")
def set_FirstC(self, value):
self.node.set("FirstC", str(value))
FirstC = property(get_FirstC, set_FirstC)
def get_FirstT(self):
'''The T index of the plane'''
return get_int_attr(self.node, "FirstT")
def set_FirstT(self, value):
self.node.set("FirstT", str(value))
FirstT = property(get_FirstT, set_FirstT)
def get_IFD(self):
'''plane index within tiff file'''
return get_int_attr(self.node, "IFD")
def set_IFD(self, value):
self.node.set("IFD", str(value))
IFD = property(get_IFD, set_IFD)
def get_PlaneCount(self):
'''How many planes in this TiffData. Should always be 1'''
return get_int_attr(self.node, "PlaneCount")
def set_PlaneCount(self, value):
self.node.set("PlaneCount", str(value))
PlaneCount = property(get_PlaneCount, set_PlaneCount)
class Plane(object):
'''The OME/Image/Pixels/Plane element
The Plane element represents one 2-dimensional image plane. It
has the Z, C and T indices of the plane and optionally has the
X, Y, Z, exposure time and a relative time delta.
'''
def __init__(self, node):
self.node = node
self.ns = get_namespaces(self.node)
def get_TheZ(self):
'''The Z index of the plane'''
return get_int_attr(self.node, "TheZ")
def set_TheZ(self, value):
self.node.set("TheZ", str(value))
TheZ = property(get_TheZ, set_TheZ)
def get_TheC(self):
'''The channel index of the plane'''
return get_int_attr(self.node, "TheC")
def set_TheC(self, value):
self.node.set("TheC", str(value))
TheC = property(get_TheC, set_TheC)
def get_TheT(self):
'''The T index of the plane'''
return get_int_attr(self.node, "TheT")
def set_TheT(self, value):
self.node.set("TheT", str(value))
TheT = property(get_TheT, set_TheT)
def get_DeltaT(self):
'''# of seconds since the beginning of the experiment'''
return get_float_attr(self.node, "DeltaT")
def set_DeltaT(self, value):
self.node.set("DeltaT", str(value))
DeltaT = property(get_DeltaT, set_DeltaT)
@property
def ExposureTime(self):
'''Units are seconds. Duration of acquisition????'''
exposure_time = self.node.get("ExposureTime")
if exposure_time is not None:
return float(exposure_time)
return None
def get_PositionX(self):
'''X position of stage'''
position_x = self.node.get("PositionX")
if position_x is not None:
return float(position_x)
return None
def set_PositionX(self, value):
self.node.set("PositionX", str(value))
PositionX = property(get_PositionX, set_PositionX)
def get_PositionY(self):
'''Y position of stage'''
return get_float_attr(self.node, "PositionY")
def set_PositionY(self, value):
self.node.set("PositionY", str(value))
PositionY = property(get_PositionY, set_PositionY)
def get_PositionZ(self):
'''Z position of stage'''
return get_float_attr(self.node, "PositionZ")
def set_PositionZ(self, value):
self.node.set("PositionZ", str(value))
PositionZ = property(get_PositionZ, set_PositionZ)
class Pixels(object):
'''The OME/Image/Pixels element
The Pixels element represents the pixels in an OME image and, for
an OME-XML encoded image, will actually contain the base-64 encoded
pixel data. It has the X, Y, Z, C, and T extents of the image
and it specifies the channel interleaving and channel depth.
'''
def __init__(self, node):
self.node = node
self.ns = get_namespaces(self.node)
self.ome_uuid = ""
self.node.set("BigEndian", "true")
def get_ID(self):
return self.node.get("ID")
def set_ID(self, value):
self.node.set("ID", value)
ID = property(get_ID, set_ID)
def get_DimensionOrder(self):
'''The ordering of image planes in the file
A 5-letter code indicating the ordering of pixels, from the most
rapidly varying to least. Use the DO_* constants (for instance
DO_XYZCT) to compare and set this.
'''
return self.node.get("DimensionOrder")
def set_DimensionOrder(self, value):
self.node.set("DimensionOrder", value)
DimensionOrder = property(get_DimensionOrder, set_DimensionOrder)
def get_PixelType(self):
'''The pixel bit type, for instance PT_UINT8
The pixel type specifies the datatype used to encode pixels
in the image data. You can use the PT_* constants to compare
and set the pixel type.
'''
return self.node.get("Type")
def set_PixelType(self, value):
self.node.set("Type", value)
PixelType = property(get_PixelType, set_PixelType)
def get_SizeX(self):
'''The dimensions of the image in the X direction in pixels'''
return get_int_attr(self.node, "SizeX")
def set_SizeX(self, value):
self.node.set("SizeX", str(value))
SizeX = property(get_SizeX, set_SizeX)
def get_SizeY(self):
'''The dimensions of the image in the Y direction in pixels'''
return get_int_attr(self.node, "SizeY")
def set_SizeY(self, value):
self.node.set("SizeY", str(value))
SizeY = property(get_SizeY, set_SizeY)
def get_SizeZ(self):
'''The dimensions of the image in the Z direction in pixels'''
return get_int_attr(self.node, "SizeZ")
def set_SizeZ(self, value):
self.node.set("SizeZ", str(value))
SizeZ = property(get_SizeZ, set_SizeZ)
def get_SizeT(self):
'''The dimensions of the image in the T direction in pixels'''
return get_int_attr(self.node, "SizeT")
def set_SizeT(self, value):
self.node.set("SizeT", str(value))
SizeT = property(get_SizeT, set_SizeT)
def get_SizeC(self):
'''The dimensions of the image in the C direction in pixels'''
return get_int_attr(self.node, "SizeC")
def set_SizeC(self, value):
self.node.set("SizeC", str(value))
SizeC = property(get_SizeC, set_SizeC)
def get_PhysicalSizeX(self):
'''The dimensions of the image in the X direction in physical units'''
return get_float_attr(self.node, "PhysicalSizeX")
def set_PhysicalSizeX(self, value):
self.node.set("PhysicalSizeX", str(value))
PhysicalSizeX = property(get_PhysicalSizeX, set_PhysicalSizeX)
def get_PhysicalSizeY(self):
'''The dimensions of the image in the Y direction in physical units'''
return get_float_attr(self.node, "PhysicalSizeY")
def set_PhysicalSizeY(self, value):
self.node.set("PhysicalSizeY", str(value))
PhysicalSizeY = property(get_PhysicalSizeY, set_PhysicalSizeY)
def get_PhysicalSizeZ(self):
'''The dimensions of the image in the Z direction in physical units'''
return get_float_attr(self.node, "PhysicalSizeZ")
def set_PhysicalSizeZ(self, value):
self.node.set("PhysicalSizeZ", str(value))
PhysicalSizeZ = property(get_PhysicalSizeZ, set_PhysicalSizeZ)
def get_channel_count(self):
'''The number of channels in the image
You can change the number of channels in the image by
setting the channel_count:
pixels.channel_count = 3
pixels.Channel(0).Name = "Red"
...
'''
return len(self.node.findall(qn(self.ns['ome'], "Channel")))
def set_channel_count(self, value):
assert value >= 0
channel_count = self.channel_count
if channel_count > value:
channels = self.node.findall(qn(self.ns['ome'], "Channel"))
for channel in channels[value:]:
self.node.remove(channel)
else:
for _ in range(channel_count, value):
new_channel = OMEXML.Channel(
ElementTree.SubElement(self.node, qn(self.ns['ome'], "Channel")))
new_channel.ID = str(uuid.uuid4())
new_channel.Name = new_channel.ID
new_channel.SamplesPerPixel = 1
channel_count = property(get_channel_count, set_channel_count)
def Channel(self, index=0):
'''Get the indexed channel from the Pixels element'''
channel = self.node.findall(qn(self.ns['ome'], "Channel"))[index]
return OMEXML.Channel(channel)
def get_plane_count(self):
'''The number of planes in the image
An image with only one plane or an interleaved color plane will
often not have any planes.
You can change the number of planes in the image by
setting the plane_count:
pixels.plane_count = 3
pixels.Plane(0).TheZ=pixels.Plane(0).TheC=pixels.Plane(0).TheT=0
...
'''
return len(self.node.findall(qn(self.ns['ome'], "Plane")))
def set_plane_count(self, value):
assert value >= 0
plane_count = self.plane_count
if plane_count > value:
planes = self.node.findall(qn(self.ns['ome'], "Plane"))
for plane in planes[value:]:
self.node.remove(plane)
else:
for _ in range(plane_count, value):
new_plane = OMEXML.Plane(
ElementTree.SubElement(self.node, qn(self.ns['ome'], "Plane")))
plane_count = property(get_plane_count, set_plane_count)
def Plane(self, index=0):
'''Get the indexed plane from the Pixels element'''
plane = self.node.findall(qn(self.ns['ome'], "Plane"))[index]
return OMEXML.Plane(plane)
def TiffData(self, index=0):
'''Get the indexed TiffData from the Pixels element'''
tiffData = self.node.findall(qn(self.ns['ome'], "TiffData"))[index]
return OMEXML.TiffData(tiffData)
def get_planes_of_channel(self, index):
planes = self.node.findall(qn(self.ns['ome'], "Plane[@TheC='"+str(index)+"']"))
return planes
# does not fix up any indices
def remove_channel(self, index):
channel = self.node.findall(qn(self.ns['ome'], "Channel"))[index]
self.node.remove(channel)
planes = self.get_planes_of_channel(index)
for p in planes:
self.node.remove(p)
def append_channel(self, index, name):
# add channel
new_channel = OMEXML.Channel(
ElementTree.SubElement(self.node, qn(self.ns['ome'], "Channel")))
new_channel.SamplesPerPixel = 1
new_channel.ID = "Channel:0:"+str(index)
new_channel.Name = name
# add a bunch of planes with "TheC"=str(index)
for t in range(self.get_SizeT()):
for z in range(self.get_SizeZ()):
new_plane = OMEXML.Plane(
ElementTree.SubElement(self.node, qn(self.ns['ome'], "Plane")))
new_plane.TheC = str(index)
new_plane.TheZ = str(z)
new_plane.TheT = str(t)
# update SizeC
self.set_SizeC(self.get_SizeC() + 1)
# can be done as a single step just prior to final output
def populate_TiffData(self):
''' assuming Pixels has its sizes, set up tiffdata elements'''
assert self.SizeC is not None
assert self.SizeZ is not None
assert self.SizeT is not None
total = self.SizeC * self.SizeT * self.SizeZ
# blow away the old ones.
tiffdatas = self.node.findall(qn(self.ns['ome'], "TiffData"))
for td in tiffdatas:
self.node.remove(td)
# assumes xyczt
ifd = 0
for i in range(self.SizeT):
for j in range(self.SizeZ):
for k in range(self.SizeC):
new_tiffdata = OMEXML.TiffData(
ElementTree.SubElement(self.node, qn(self.ns['ome'], "TiffData")))
new_tiffdata.set_FirstC(k)
new_tiffdata.set_FirstZ(j)
new_tiffdata.set_FirstT(i)
new_tiffdata.set_IFD(ifd)
new_tiffdata.set_PlaneCount(1)
# child element <UUID FileName=""></UUID> is omitted here for single file ome tiffs
# UUID has an optional FileName attribute for image data that
# are split among several files but we do not currently support it.
# uuidelem = ElementTree.SubElement(new_tiffdata.node, qn(self.ns['ome'], "UUID"))
# uuidelem.text = self.ome_uuid
ifd = ifd + 1
class StructuredAnnotations(dict):
'''The OME/StructuredAnnotations element
Structured annotations let OME-XML represent metadata from other file
formats, for example the tag metadata in TIFF files. The
StructuredAnnotations element is a container for the structured
annotations.
Images can have structured annotation references. These match to
the IDs of structured annotations in the StructuredAnnotations
element. You can get the structured annotations in an OME-XML document
using a dictionary interface to StructuredAnnotations.
Pragmatically, TIFF tag metadata is stored as key/value pairs in
OriginalMetadata annotations - in the context of CellProfiler,
callers will be using these to read tag data that's not represented
in OME-XML such as the bits per sample and min and max sample values.
'''
def __init__(self, node):
self.node = node
self.ns = get_namespaces(self.node)
def __getitem__(self, key):
for child in self.node:
if child.get("ID") == key:
return child
raise IndexError('ID "%s" not found' % key)
def __contains__(self, key):
return self.has_key(key)
def keys(self):
return filter(lambda x: x is not None,
[child.get("ID") for child in self.node])
def has_key(self, key):
for child in self.node:
if child.get("ID") == key:
return True
return False
def add_original_metadata(self, key, value):
'''Create an original data key/value pair
key - the original metadata's key name, for instance OM_PHOTOMETRIC_INTERPRETATION
value - the value, for instance, "RGB"
returns the ID for the structured annotation.
'''
xml_annotation = ElementTree.SubElement(
self.node, qn(self.ns['sa'], "XMLAnnotation"))
node_id = str(uuid.uuid4())
xml_annotation.set("ID", node_id)
xa_value = ElementTree.SubElement(xml_annotation, qn(self.ns['sa'], "Value"))
ov = ElementTree.SubElement(
xa_value, qn(NS_ORIGINAL_METADATA, "OriginalMetadata"))
ov_key = ElementTree.SubElement(ov, qn(NS_ORIGINAL_METADATA, "Key"))
set_text(ov_key, key)
ov_value = ElementTree.SubElement(
ov, qn(NS_ORIGINAL_METADATA, "Value"))
set_text(ov_value, value)
return node_id
def iter_original_metadata(self):
'''An iterator over the original metadata in structured annotations
returns (<annotation ID>, (<key, value>))
where <annotation ID> is the ID attribute of the annotation (which
can be used to tie an annotation to an image)
<key> is the original metadata key, typically one of the
OM_* names of a TIFF tag
<value> is the value for the metadata
'''
#
# Here's the XML we're traversing:
#
# <StructuredAnnotations>
# <XMLAnnotation>
# <Value>
# <OriginalMetadta>
# <Key>Foo</Key>
# <Value>Bar</Value>
# </OriginalMetadata>
# </Value>
# </XMLAnnotation>
# </StructuredAnnotations>
#
for annotation_node in self.node.findall(qn(self.ns['sa'], "XMLAnnotation")):
# <XMLAnnotation/>
annotation_id = annotation_node.get("ID")
for xa_value_node in annotation_node.findall(qn(self.ns['sa'], "Value")):
# <Value/>
for om_node in xa_value_node.findall(
qn(NS_ORIGINAL_METADATA, "OriginalMetadata")):
# <OriginalMetadata>
key_node = om_node.find(qn(NS_ORIGINAL_METADATA, "Key"))
value_node = om_node.find(qn(NS_ORIGINAL_METADATA, "Value"))
if key_node is not None and value_node is not None:
key_text = get_text(key_node)
value_text = get_text(value_node)
if key_text is not None and value_text is not None:
yield annotation_id, (key_text, value_text)
else:
logger.warn("Original metadata was missing key or value:" + om_node.toxml())
return
def has_original_metadata(self, key):
'''True if there is an original metadata item with the given key'''
return any([k == key
for annotation_id, (k, v)
in self.iter_original_metadata()])
def get_original_metadata_value(self, key, default=None):
'''Return the value for a particular original metadata key
key - key to search for
default - default value to return if not found
'''
for annotation_id, (k, v) in self.iter_original_metadata():
if k == key:
return v
return default
def get_original_metadata_refs(self, ids):
'''For a given ID, get the matching original metadata references
ids - collection of IDs to match
returns a dictionary of key to value
'''
d = {}
for annotation_id, (k,v) in self.iter_original_metadata():
if annotation_id in ids:
d[k] = v
return d
@property
def OriginalMetadata(self):
return OMEXML.OriginalMetadata(self)
class OriginalMetadata(dict):
'''View original metadata as a dictionary
Original metadata holds "vendor-specific" metadata including TIFF
tag values.
'''
def __init__(self, sa):
'''Initialized with the structured_annotations class instance'''
self.sa = sa
def __getitem__(self, key):
return self.sa.get_original_metadata_value(key)
def __setitem__(self, key, value):
self.sa.add_original_metadata(key, value)
def __contains__(self, key):
return self.has_key(key)
def __iter__(self):
for annotation_id, (key, value) in self.sa.iter_original_metadata():
yield key
def __len__(self):
return len(list(self.sa_iter_original_metadata()))
def keys(self):
return [key
for annotation_id, (key, value)
in self.sa.iter_original_metadata()]
def has_key(self, key):
for annotation_id, (k, value) in self.sa.iter_original_metadata():
if k == key:
return True
return False
def iteritems(self):
for annotation_id, (key, value) in self.sa.iter_original_metadata():
yield key, value
class PlatesDucktype(object):
'''It looks like a list of plates'''
def __init__(self, root):
self.root = root
self.ns = get_namespaces(self.root)
def __getitem__(self, key):
plates = self.root.findall(qn(self.ns['spw'], "Plate"))
if isinstance(key, slice):
return [OMEXML.Plate(plate) for plate in plates[key]]
return OMEXML.Plate(plates[key])
def __len__(self):
return len(self.root.findall(qn(self.ns['spw'], "Plate")))
def __iter__(self):
for plate in self.root.iterfind(qn(self.ns['spw'], "Plate")):
yield OMEXML.Plate(plate)
def newPlate(self, name, plate_id = str(uuid.uuid4())):
new_plate_node = ElementTree.SubElement(
self.root, qn(self.ns['spw'], "Plate"))
new_plate = OMEXML.Plate(new_plate_node)
new_plate.ID = plate_id
new_plate.Name = name
return new_plate
class Plate(object):
'''The SPW:Plate element
This represents the plate element of the SPW schema:
http://www.openmicroscopy.org/Schemas/SPW/2007-06/
'''
def __init__(self, node):
self.node = node
self.ns = get_namespaces(self.node)
def get_ID(self):
return self.node.get("ID")
def set_ID(self, value):
self.node.set("ID", value)
ID = property(get_ID, set_ID)
def get_Name(self):
return self.node.get("Name")
def set_Name(self, value):
self.node.set("Name", value)
Name = property(get_Name, set_Name)
def get_Status(self):
return self.node.get("Status")
def set_Status(self, value):
self.node.set("Status", value)
Status = property(get_Status, set_Status)
def get_ExternalIdentifier(self):
return self.node.get("ExternalIdentifier")
def set_ExternalIdentifier(self, value):
return self.node.set("ExternalIdentifier", value)
ExternalIdentifier = property(get_ExternalIdentifier, set_ExternalIdentifier)
def get_ColumnNamingConvention(self):
# Consider a default if not defined of NC_NUMBER
return self.node.get("ColumnNamingConvention")
def set_ColumnNamingConvention(self, value):
assert value in (NC_LETTER, NC_NUMBER)
self.node.set("ColumnNamingConvention", value)
ColumnNamingConvention = property(get_ColumnNamingConvention,
set_ColumnNamingConvention)
def get_RowNamingConvention(self):
# Consider a default if not defined of NC_LETTER
return self.node.get("RowNamingConvention")
def set_RowNamingConvention(self, value):
assert value in (NC_LETTER, NC_NUMBER)
self.node.set("RowNamingConvention", value)
RowNamingConvention = property(get_RowNamingConvention,
set_RowNamingConvention)
def get_WellOriginX(self):
return get_float_attr(self.node, "WellOriginX")
def set_WellOriginX(self, value):
self.node.set("WellOriginX", str(value))
WellOriginX = property(get_WellOriginX, set_WellOriginX)
def get_WellOriginY(self):
return get_float_attr(self.node, "WellOriginY")
def set_WellOriginY(self, value):
self.node.set("WellOriginY", str(value))
WellOriginY = property(get_WellOriginY, set_WellOriginY)
def get_Rows(self):
return get_int_attr(self.node, "Rows")
def set_Rows(self, value):
self.node.set("Rows", str(value))
Rows = property(get_Rows, set_Rows)
def get_Columns(self):
return get_int_attr(self.node, "Columns")
def set_Columns(self, value):
self.node.set("Columns", str(value))
Columns = property(get_Columns, set_Columns)
def get_Description(self):
description = self.node.find(qn(self.ns['spw'], "Description"))
if description is None:
return None
return get_text(description)
def set_Description(self, text):
make_text_node(self.node, NS_SPW, "Description", test)
Description = property(get_Description, set_Description)
def get_Well(self):
'''The well dictionary / list'''
return OMEXML.WellsDucktype(self)
Well = property(get_Well)
def get_well_name(self, well):
'''Get a well's name, using the row and column convention'''
result = "".join([
"%02d" % (i+1) if convention == NC_NUMBER
else "ABCDEFGHIJKLMNOP"[i]
for i, convention
in ((well.Row, self.RowNamingConvention or NC_LETTER),
(well.Column, self.ColumnNamingConvention or NC_NUMBER))])
return result
class WellsDucktype(dict):
'''The WellsDucktype lets you retrieve and create wells
The WellsDucktype looks like a dictionary but lets you reference
the wells in a plate using indexing. Types of indexes:
list indexing: e.g. plate.Well[14] gets the 14th well as it appears
in the XML
dictionary_indexing:
by well name - e.g. plate.Well["A08"]
by row and column - e.g. plate.Well[1,3] (B03)
by ID - e.g. plate.Well["Well:0:0:0"]
If the ducktype is unable to parse a well name, it assumes you're
using an ID.
'''
def __init__(self, plate):
self.plate_node = plate.node
self.plate = plate
self.ns = get_namespaces(self.plate_node)
def __len__(self):
return len(self.plate_node.findall(qn(self.ns['spw'], "Well")))
def __getitem__(self, key):
all_wells = self.plate_node.findall(qn(self.ns['spw'], "Well"))
if isinstance(key, slice):
return [OMEXML.Well(w) for w in all_wells[key]]
if hasattr(key, "__len__") and len(key) == 2:
well = OMEXML.Well(None)
for w in all_wells:
well.node = w
if well.Row == key[0] and well.Column == key[1]:
return well
if isinstance(key, int):
return OMEXML.Well(all_wells[key])
well = OMEXML.Well(None)
for w in all_wells:
well.node = w
if self.plate.get_well_name(well) == key:
return well
if well.ID == key:
return well
return None
def __iter__(self):
'''Return the standard name for all wells on the plate
for instance, 'B03' for a well with Row=1, Column=2 for a plate
with the standard row and column naming convention
'''
all_wells = self.plate_node.findall(qn(self.ns['spw'], "Well"))
well = OMEXML.Well(None)
for w in all_wells:
well.node = w
yield self.plate.get_well_name(well)
def new(self, row, column, well_id = str(uuid.uuid4())):
'''Create a new well at the given row and column
row - index of well's row
column - index of well's column
well_id - the ID attribute for the well
'''
well_node = ElementTree.SubElement(
self.plate_node, qn(self.ns['spw'], "Well"))
well = OMEXML.Well(well_node)
well.Row = row
well.Column = column
well.ID = well_id
return well
class Well(object):
def __init__(self, node):
self.node = node
def get_Column(self):
return get_int_attr(self.node, "Column")
def set_Column(self, value):
self.node.set("Column", str(value))
Column = property(get_Column, set_Column)
def get_Row(self):
return get_int_attr(self.node, "Row")
def set_Row(self, value):
self.node.set("Row", str(value))
Row = property(get_Row, set_Row)
def get_ID(self):
return self.node.get("ID")
def set_ID(self, value):
self.node.set("ID", value)
ID = property(get_ID, set_ID)
def get_Sample(self):
return OMEXML.WellSampleDucktype(self.node)
Sample = property(get_Sample)
def get_ExternalDescription(self):
return self.node.get("ExternalDescription")
def set_ExternalDescription(self, value):
return self.node.set("ExternalDescription", value)
ExternalDescription = property(get_ExternalDescription, set_ExternalDescription)
def get_ExternalIdentifier(self):
return self.node.get("ExternalIdentifier")
def set_ExternalIdentifier(self, value):
return self.node.set("ExternalIdentifier", value)
ExternalIdentifier = property(get_ExternalIdentifier, set_ExternalIdentifier)
def get_Color(self):
return int(self.node.get("Color"))
def set_Color(self, value):
self.node.set("Color", str(value))
class WellSampleDucktype(list):
'''The WellSample elements in a well
This is made to look like an indexable list so that you can do
things like:
wellsamples[0:2]
'''
def __init__(self, well_node):
self.well_node = well_node
self.ns = get_namespaces(self.well_node)
def __len__(self):
return len(self.well_node.findall(qn(self.ns['spw'], "WellSample")))
def __getitem__(self, key):
all_samples = self.well_node.findall(qn(self.ns['spw'], "WellSample"))
if isinstance(key, slice):
return [OMEXML.WellSample(s)
for s in all_samples[key]]
return OMEXML.WellSample(all_samples[int(key)])
def __iter__(self):
'''Iterate through the well samples.'''
all_samples = self.well_node.findall(qn(self.ns['spw'], "WellSample"))
for s in all_samples:
yield OMEXML.WellSample(s)
def new(self, wellsample_id = str(uuid.uuid4()), index = None):
'''Create a new well sample
'''
if index is None:
index = reduce(max, [s.Index for s in self], -1) + 1
new_node = ElementTree.SubElement(
self.well_node, qn(self.ns['spw'], "WellSample"))
s = OMEXML.WellSample(new_node)
s.ID = wellsample_id
s.Index = index
class WellSample(object):
'''The WellSample is a location within a well'''
def __init__(self, node):
self.node = node
self.ns = get_namespaces(self.node)
def get_ID(self):
return self.node.get("ID")
def set_ID(self, value):
self.node.set("ID", value)
ID = property(get_ID, set_ID)
def get_PositionX(self):
return get_float_attr(self.node, "PositionX")
def set_PositionX(self, value):
self.node.set("PositionX", str(value))
PositionX = property(get_PositionX, set_PositionX)
def get_PositionY(self):
return get_float_attr(self.node, "PositionY")
def set_PositionY(self, value):
self.node.set("PositionY", str(value))
PositionY = property(get_PositionY, set_PositionY)
def get_Timepoint(self):
return self.node.get("Timepoint")
def set_Timepoint(self, value):
if isinstance(value, datetime.datetime):
value = value.isoformat()
self.node.set("Timepoint", value)
Timepoint = property(get_Timepoint, set_Timepoint)
def get_Index(self):
return get_int_attr(self.node, "Index")
def set_Index(self, value):
self.node.set("Index", str(value))
Index = property(get_Index, set_Index)
def get_ImageRef(self):
'''Get the ID of the image of this site'''
ref = self.node.find(qn(self.ns['spw'], "ImageRef"))
if ref is None:
return None
return ref.get("ID")
def set_ImageRef(self, value):
'''Add a reference to the image of this site'''
ref = self.node.find(qn(self.ns['spw'], "ImageRef"))
if ref is None:
ref = ElementTree.SubElement(self.node, qn(self.ns['spw'], "ImageRef"))
ref.set("ID", value)
ImageRef = property(get_ImageRef, set_ImageRef)
| 37.152982
| 302
| 0.573655
|
fd626201c898afed834e028b4188d0f4108591c5
| 404
|
py
|
Python
|
tests/test_sqlalchemy/tables.py
|
cedar-team/snapshot-queries
|
e033d7f8cc221617e1922e913cae2f32b409faf6
|
[
"MIT"
] | null | null | null |
tests/test_sqlalchemy/tables.py
|
cedar-team/snapshot-queries
|
e033d7f8cc221617e1922e913cae2f32b409faf6
|
[
"MIT"
] | 6
|
2022-03-30T16:19:01.000Z
|
2022-03-30T16:32:08.000Z
|
tests/test_sqlalchemy/tables.py
|
cedar-team/snapshot-queries
|
e033d7f8cc221617e1922e913cae2f32b409faf6
|
[
"MIT"
] | 1
|
2022-03-31T19:22:22.000Z
|
2022-03-31T19:22:22.000Z
|
from sqlalchemy import Column, Date, Integer, MetaData, String, Table
tables = MetaData()
students = Table(
"students",
tables,
Column("id", Integer, primary_key=True),
Column("first_name", String),
Column("last_name", String),
)
classes = Table(
"classes",
tables,
Column("id", Integer, primary_key=True),
Column("name", String),
Column("start_date", Date),
)
| 20.2
| 69
| 0.64604
|
21a1df12b6a8331435fd0ce41aae9c5c7e4ca73e
| 4,265
|
py
|
Python
|
backend/webserver/db.py
|
Kevinwochan/Ultracast
|
0d32bd176f4423d6cc7346e9864b020dbdd0cbb4
|
[
"BSD-3-Clause"
] | null | null | null |
backend/webserver/db.py
|
Kevinwochan/Ultracast
|
0d32bd176f4423d6cc7346e9864b020dbdd0cbb4
|
[
"BSD-3-Clause"
] | null | null | null |
backend/webserver/db.py
|
Kevinwochan/Ultracast
|
0d32bd176f4423d6cc7346e9864b020dbdd0cbb4
|
[
"BSD-3-Clause"
] | null | null | null |
from . import models
from . import schema
import re
import magic
import mimetypes
import boto3
from botocore.client import Config
from mongoengine import connect
from pydub import AudioSegment
import io
import hashlib
from base64 import urlsafe_b64encode
#MONGO_URI = f'mongodb://{MONGO_USERNAME}:{MONGO_PASSWORD}@{MONGO_IP}/{MONGO_DB}?authSource={MONGO_AUTH_DB}'
config = None
'''
Defaults
Modified when init_app() called
'''
REGION = 'sfo2'
STATIC_FILE_BASE_URL = f'https://{REGION}.digitaloceanspaces.com'
session = boto3.session.Session()
client = session.client('s3',
region_name=REGION,
endpoint_url=STATIC_FILE_BASE_URL,
aws_access_key_id='CUT4SK6OYILHJJV3B5LD',
aws_secret_access_key='yyIXed9h9kn6n9V4c/b64+ZRHtP8baR89lp3dqvOY34')
BUCKET = 'ultracast-files'
FILE_ACCESS = 'public-read'
def init_app(app):
'''
Init based off apps config
'''
config = app.config
REGION = app.config["S3"]["REGION"]
STATIC_FILE_BASE_URL = f'https://{REGION}.digitaloceanspaces.com'
client = session.client('s3',
region_name=REGION,
endpoint_url=STATIC_FILE_BASE_URL,
aws_access_key_id=app.config["S3"]["AWS_ACCESS_KEY"],
aws_secret_access_key=app.config["S3"]["AWS_SECRET_ACCESS_KEY"])
BUCKET = app.config["S3"]["BUCKET"]
FILE_ACCESS = app.config["S3"]["FILE_ACCESS"]
def connect_mongo(app_config):
mongo_uri = "mongodb://{u}:{p}@{ip}/{db}?authSource={auth_db}".format(
u=app_config["MONGO_USERNAME"], p=app_config["MONGO_PASSWORD"],
ip=app_config["MONGO_IP"], db=app_config["MONGO_DB"], auth_db=app_config["MONGO_AUTH_DB"])
connect(host=mongo_uri)
# Digital Ocean Space (Static-Files)
class IllegalMimeException(Exception):
pass
def get_bucket_url():
return re.sub(r"^https://", f"https://{BUCKET}.", STATIC_FILE_BASE_URL)
def get_file_url(filename):
return get_bucket_url() + f"/{filename}"
def get_key_from_url(url):
return re.sub(get_bucket_url() + "/", "", url)
def get_key_from_binary_data(data, ext=""):
return urlsafe_b64encode(hashlib.sha256(data).digest()).decode('UTF-8') + ext
def check_status(resp, ok_statuses, op):
if resp['ResponseMetadata']['HTTPStatusCode'] not in ok_statuses:
raise Exception(f"Error for operation [{op}] - Response: {resp}")
def file_exists(key):
try:
client.head_object(Bucket=BUCKET, Key=key)
return True
except:
return False
def url_exists(url):
return file_exists(get_key_from_url(url))
def get_key(data, key=None, ext=""):
if key is None:
return get_key_from_binary_data(data, ext)
else:
return key
def check_mime(data, valid_mimes):
try:
mime_type = magic.from_buffer(data, mime=True)
except:
raise IllegalMimeException(f"Could not interpret MIME type of payload")
if mime_type not in valid_mimes:
raise IllegalMimeException(f"MIME type {mime_type} not allowed")
return mime_type
def add_file(data, key=None, valid_mimes=[], override=False):
mime_type = check_mime(data, valid_mimes)
extension = mimetypes.guess_extension(mime_type)
key = get_key(data, key, extension)
if not override and file_exists(key):
return get_file_url(key)
resp = client.put_object(
Body=data,
Bucket=BUCKET,
Key=key,
ACL=FILE_ACCESS,
ContentType=mime_type)
check_status(resp, [200], 'Add File')
return get_file_url(key)
def remove_file(url, key=None):
if key is None:
resp = client.delete_object(Bucket=BUCKET, Key=get_key_from_url(url))
else:
resp = client.delete_object(Bucket=BUCKET, Key=key)
check_status(resp, [200, 204], 'Remove File')
def update_file(old_url, data, new_key=None, valid_mimes=[]):
if url_exists(old_url):
remove_file(old_url)
return add_file(data, new_key, valid_mimes)
def audio_file_duration_secs(data):
try:
audio = AudioSegment.from_file(io.BytesIO(data), format="mp3")
return int(round(audio.duration_seconds))
except:
return -1
| 27.875817
| 108
| 0.668699
|
fde4b29837510262b82f5e0eac3e8a6f9d153cdf
| 22,519
|
py
|
Python
|
assign-random-peer-reviewer-by-section.py
|
gqmaguirejr/Canvas-tools
|
f58db8be12470353ce296b2833839c95a78808fa
|
[
"MIT"
] | 26
|
2019-01-08T16:34:57.000Z
|
2021-09-13T20:53:40.000Z
|
assign-random-peer-reviewer-by-section.py
|
gqmaguirejr/Canvas-tools
|
f58db8be12470353ce296b2833839c95a78808fa
|
[
"MIT"
] | 1
|
2020-09-10T19:32:19.000Z
|
2020-09-10T20:03:35.000Z
|
assign-random-peer-reviewer-by-section.py
|
gqmaguirejr/Canvas-tools
|
f58db8be12470353ce296b2833839c95a78808fa
|
[
"MIT"
] | 7
|
2019-09-28T23:10:47.000Z
|
2022-03-15T12:57:18.000Z
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# -*- mode: python; python-indent-offset: 4 -*-
#
# ./assign-random-peer-reviewer-by-section.py course_id new_assignment_id [old_assignment_id ]
#
# This program assigns each user in a course (course_id) with a randomly assigned peer reviewer from within their section for a given assignment (new_assignment).
# Note that this program ignores all sections that do not have a single quote or the word "section" in them.
#
# Note also that there are some permutations that cannot meet the above two conditions and the additional condition of not having a person assigned
# to review two different persons. In this case the program tries with a new starting permutation. It will try up to 99 times before giving
# up doing peer reviewing assignments for this section. I know this is an arbitrary number, but hope that it works in practice.
#
# Example:
#
# ./assign-random-peer-reviewer-by-section.py --testing 28715 159758
# ./assign-random-peer-reviewer-by-section.py 28850 160120
#
# ./assign-random-peer-reviewer-by-section.py 28715 159753
#
# G. Q. Maguire Jr.
#
# 2021.09.16 based on earlier copy-peer-reviewer-assignments.py program
#
import requests, time
from pprint import pprint
import optparse
import sys
import json
import random
#############################
###### EDIT THIS STUFF ######
#############################
global baseUrl # the base URL used for access to Canvas
global header # the header for all HTML requests
global payload # place to store additionally payload when needed for options to HTML requests
# Based upon the options to the program, initialize the variables used to access Canvas gia HTML requests
def initialize(options):
global baseUrl, header, payload
# styled based upon https://martin-thoma.com/configuration-files-in-python/
if options.config_filename:
config_file=options.config_filename
else:
config_file='config.json'
try:
with open(config_file) as json_data_file:
configuration = json.load(json_data_file)
access_token=configuration["canvas"]["access_token"]
if options.containers:
baseUrl="http://"+configuration["canvas"]["host"]+"/api/v1"
print("using HTTP for the container environment")
else:
baseUrl="https://"+configuration["canvas"]["host"]+"/api/v1"
header = {'Authorization' : 'Bearer ' + access_token}
payload = {}
except:
print("Unable to open configuration file named {}".format(config_file))
print("Please create a suitable configuration file, the default name is config.json")
sys.exit()
##############################################################################
## ONLY update the code below if you are experimenting with other API calls ##
##############################################################################
def summarize_assignments(list_of_assignments):
summary_of_assignments={}
for assignm in list_of_assignments:
summary_of_assignments[assignm['id']]=assignm['name']
print("summary_of_assignments={}".format(summary_of_assignments))
def list_assignments(course_id):
assignments_found_thus_far=[]
# Use the Canvas API to get the list of assignments for the course
#GET /api/v1/courses/:course_id/assignments
url = "{0}/courses/{1}/assignments".format(baseUrl, course_id)
if Verbose_Flag:
print("url: {}".format(url))
r = requests.get(url, headers = header)
if Verbose_Flag:
print("result of getting assignments: {}".format(r.text))
if r.status_code == requests.codes.ok:
page_response=r.json()
for p_response in page_response:
assignments_found_thus_far.append(p_response)
# the following is needed when the reponse has been paginated
# i.e., when the response is split into pieces - each returning only some of the list of modules
# see "Handling Pagination" - Discussion created by tyler.clair@usu.edu on Apr 27, 2015, https://community.canvaslms.com/thread/1500
while r.links.get('next', False):
r = requests.get(r.links['next']['url'], headers=header)
page_response = r.json()
for p_response in page_response:
assignments_found_thus_far.append(p_response)
return assignments_found_thus_far
def list_peer_reviews(course_id, assignment_id):
reviews_found_thus_far=[]
# Use the Canvas API to get the list of peer reviwes for the course
# GET /api/v1/courses/:course_id/assignments/:assignment_id/peer_reviews
url = "{0}/courses/{1}/assignments/{2}/peer_reviews".format(baseUrl, course_id, assignment_id)
if Verbose_Flag:
print("url: {}".format(url))
r = requests.get(url, headers = header)
if Verbose_Flag:
print("result of getting peer reviews: {}".format(r.text))
if r.status_code == requests.codes.ok:
page_response=r.json()
for p_response in page_response:
reviews_found_thus_far.append(p_response)
# the following is needed when the reponse has been paginated
# i.e., when the response is split into pieces - each returning only some of the list of modules
# see "Handling Pagination" - Discussion created by tyler.clair@usu.edu on Apr 27, 2015, https://community.canvaslms.com/thread/1500
if 'link' in r.headers:
while r.links.get('next', False):
r = requests.get(r.links['next']['url'], headers=header)
page_response = r.json()
for p_response in page_response:
reviews_found_thus_far.append(p_response)
return reviews_found_thus_far
def submission_for_assignment_by_user(course_id, assignment_id, user_id):
# return the submission information for a single user's assignment for a specific course as a dict
#
# Use the Canvas API to get a user's submission for a course for a specific assignment
# GET /api/v1/courses/:course_id/assignments/:assignment_id/submissions/:user_id
url = "{0}/courses/{1}/assignments/{2}/submissions/{3}".format(baseUrl, course_id, assignment_id, user_id)
if Verbose_Flag:
print("url: {}".format(url))
#extra_parameters={'student_ids[]': 'all'}
#r = requests.get(url, params=extra_parameters, headers = header)
r = requests.get(url, headers = header)
if Verbose_Flag:
print("result of getting submissions: {}".format(r.text))
if r.status_code == requests.codes.ok:
page_response=r.json()
if Verbose_Flag:
print("page_response: " + str(page_response))
return page_response
else:
return dict()
def assign_peer_reviewer(course_id, assignment_id, user_id, submission_id):
global Verbose_Flag
# Use the Canvas API
#POST /api/v1/courses/:course_id/assignments/:assignment_id/submissions/:submission_id/peer_reviews
# Request Parameters:
#Parameter Type Description
# user_id Required integer user_id to assign as reviewer on this assignment
#
# from https://github.com/matematikk-mooc/frontend/blob/master/src/js/api/api.js
# createPeerReview: function(courseID, assignmentID, submissionID, userID, callback, error) {
# this._post({
# "callback": callback,
# "error": error,
# "uri": "/courses/" + courseID + "/assignments/" + assignmentID + "/submissions/" + submissionID + "/peer_reviews",
# "params": { user_id: userID }
# });
# },
url = "{0}/courses/{1}/assignments/{2}/submissions/{3}/peer_reviews".format(baseUrl, course_id, assignment_id, submission_id)
if Verbose_Flag:
print("url: {}".format(url))
payload={'user_id': user_id}
r = requests.post(url, headers = header, data=payload)
if Verbose_Flag:
print("result of post assigning peer reviwer: {}".format(r.text))
if r.status_code == requests.codes.ok:
print("result of post assigning peer reviwer: {}".format(r.text))
if r.status_code == requests.codes.ok:
page_response=r.json()
print("assigned reviewer")
return True
return False
def assign_assessor_as_peer_reviewer(course_id, assignment_id, assessor_id, user_id):
submission=submission_for_assignment_by_user(course_id, assignment_id, user_id)
if Verbose_Flag:
print("submission: {}".format(submission))
if Verbose_Flag:
print("user_id: {}".format(submission['user_id']))
if submission['user_id'] == int(user_id):
if Verbose_Flag:
print("matching submission: {}".format(submission))
output=assign_peer_reviewer(course_id, assignment_id, assessor_id, submission['id'])
return output
return "no match found"
def copy_assigned_peer_reviewers(course_id, old_assignment_id, new_assignment_id):
# students=students_in_course(course_id)
# for student in students:
old_list=list_peer_reviews(course_id, old_assignment_id)
if Verbose_Flag:
print("old_list: {}".format(old_list))
for previous_peer_assignment in old_list:
assessor_id=previous_peer_assignment['assessor_id']
user_id=previous_peer_assignment['user_id']
if Verbose_Flag:
print("assessor_id: {}".format(assessor_id))
print("user_id: {}".format(user_id))
assign_assessor_as_peer_reviewer(course_id, new_assignment_id, assessor_id, user_id)
new_list=list_peer_reviews(course_id, new_assignment_id)
if Verbose_Flag:
print("new_list: " + str(new_list))
def section_name_from_section_id(sections_info, section_id):
for i in sections_info:
if i['id'] == section_id:
return i['name']
def sections_in_course(course_id):
sections_found_thus_far=[]
# Use the Canvas API to get the list of sections for this course
#GET /api/v1/courses/:course_id/sections
url = "{0}/courses/{1}/sections".format(baseUrl,course_id)
if Verbose_Flag:
print("url: {}".format(url))
r = requests.get(url, headers = header)
if Verbose_Flag:
print("result of getting sections: {}".format(r.text))
if r.status_code == requests.codes.ok:
page_response=r.json()
for p_response in page_response:
sections_found_thus_far.append(p_response)
# the following is needed when the reponse has been paginated
# i.e., when the response is split into pieces - each returning only some of the list of modules
# see "Handling Pagination" - Discussion created by tyler.clair@usu.edu on Apr 27, 2015, https://community.canvaslms.com/thread/1500
while r.links.get('next', False):
r = requests.get(r.links['next']['url'], headers=header)
page_response = r.json()
for p_response in page_response:
sections_found_thus_far.append(p_response)
return sections_found_thus_far
def students_in_course(course_id):
user_found_thus_far=[]
# Use the Canvas API to get the list of users enrolled in this course
#GET /api/v1/courses/:course_id/enrollments
url = "{0}/courses/{1}/enrollments".format(baseUrl,course_id)
if Verbose_Flag:
print("url: {}".format(url))
extra_parameters={'per_page': '100', 'type[]': ['StudentEnrollment']}
r = requests.get(url, params=extra_parameters, headers = header)
if Verbose_Flag:
print("result of getting enrollments: {}".format(r.text))
if r.status_code == requests.codes.ok:
page_response=r.json()
for p_response in page_response:
user_found_thus_far.append(p_response)
# the following is needed when the reponse has been paginated
# i.e., when the response is split into pieces - each returning only some of the list of modules
# see "Handling Pagination" - Discussion created by tyler.clair@usu.edu on Apr 27, 2015, https://community.canvaslms.com/thread/1500
while r.links.get('next', False):
r = requests.get(r.links['next']['url'], headers=header)
page_response = r.json()
for p_response in page_response:
user_found_thus_far.append(p_response)
return user_found_thus_far
def list_groups_in_course(course_id):
groups_found_thus_far=[]
# Use the Canvas API to get the list of groups for the course
# GET /api/v1/courses/:course_id/groups
url = "{0}/courses/{1}/groups".format(baseUrl, course_id)
if Verbose_Flag:
print("url: {}".format(url))
r = requests.get(url, headers = header)
if Verbose_Flag:
print("result of getting groups: {}".format(r.text))
if r.status_code == requests.codes.ok:
page_response=r.json()
for p_response in page_response:
groups_found_thus_far.append(p_response)
# the following is needed when the reponse has been paginated
# i.e., when the response is split into pieces - each returning only some of the list of modules
# see "Handling Pagination" - Discussion created by tyler.clair@usu.edu on Apr 27, 2015, https://community.canvaslms.com/thread/1500
if 'link' in r.headers:
while r.links.get('next', False):
r = requests.get(r.links['next']['url'], headers=header)
page_response = r.json()
for p_response in page_response:
groups_found_thus_far.append(p_response)
return groups_found_thus_far
def members_of_groups(group_id):
members_found_thus_far=[]
# Use the Canvas API to get the list of members of group
# GET /api/v1/groups/:group_id/users
url = "{0}/groups/{1}/users".format(baseUrl, group_id)
if Verbose_Flag:
print("url: {}".format(url))
r = requests.get(url, headers = header)
if Verbose_Flag:
print("result of getting memebrs of group: {}".format(r.text))
if r.status_code == requests.codes.ok:
page_response=r.json()
for p_response in page_response:
members_found_thus_far.append(p_response)
# the following is needed when the reponse has been paginated
# i.e., when the response is split into pieces - each returning only some of the list of modules
# see "Handling Pagination" - Discussion created by tyler.clair@usu.edu on Apr 27, 2015, https://community.canvaslms.com/thread/1500
if 'link' in r.headers:
while r.links.get('next', False):
r = requests.get(r.links['next']['url'], headers=header)
page_response = r.json()
for p_response in page_response:
members_found_thus_far.append(p_response)
return members_found_thus_far
# remove sections that do not have a quote or the word "section" in them
def clean_up_sections(sections):
clean_sections=[]
for i in sections:
if (i['name'].find("'") >= 0) or (i['name'].find("section") >= 0):
clean_sections.append(i)
return clean_sections
def relevant_section_ids(sections):
section_ids=[]
for i in sections:
section_ids.append(i['id'])
return section_ids
def same_group(s, possible_reviwer, course_groups):
for g in course_groups:
if (s in course_groups[g]['member_ids']) and (possible_reviwer in course_groups[g]['member_ids']):
return True
return False
def student_name_from_id(id, students_info):
for s in students_info:
if s['user']['id'] == id:
return s['user']['name']
return ''
def try_to_assignments(student_ids_for_section, course_groups):
global Verbose_Flag
assignments={} # will be of the form assigned_reviwer student_reviewed
reviewers=random.sample(student_ids_for_section, k=len(student_ids_for_section))
print("reviewers={}".format(reviewers))
for s in student_ids_for_section:
reviewers=random.sample(student_ids_for_section, k=len(student_ids_for_section))
if Verbose_Flag:
print("s={}".format(s))
if len(reviewers) >= 1:
possible_reviwer=reviewers.pop()
else:
print("empty list of reviewers, s={}".format(s))
return False
while (s==possible_reviwer) or same_group(s, possible_reviwer, course_groups) or assignments.get(possible_reviwer, False):
random.shuffle(reviewers)
if Verbose_Flag:
print("reviewers={}".format(reviewers))
if len(reviewers) >= 1:
possible_reviwer=reviewers.pop()
else:
print("empty list of reviewers #2, s={}".format(s))
return False
if Verbose_Flag:
print("s={0}, possible_reviwer={1}".format(s, possible_reviwer))
assignments[possible_reviwer]=s
if Verbose_Flag:
print("assigned s={0}, possible_reviwer={1}".format(s, possible_reviwer))
return assignments
def main():
global Verbose_Flag
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose',
dest="verbose",
default=False,
action="store_true",
help="Print lots of output to stdout"
)
parser.add_option("--config", dest="config_filename",
help="read configuration from FILE", metavar="FILE")
parser.add_option('-C', '--containers',
dest="containers",
default=False,
action="store_true",
help="for the container enviroment in the virtual machine"
)
parser.add_option('-t', '--testing',
dest="testing",
default=False,
action="store_true",
help="execute test code"
)
options, remainder = parser.parse_args()
Verbose_Flag=options.verbose
if Verbose_Flag:
print('ARGV :', sys.argv[1:])
print('VERBOSE :', options.verbose)
print('REMAINING :', remainder)
initialize(options)
if (len(remainder) < 2):
print("Insuffient arguments\n must provide course_id new_assignment_id [old_assignment_id ]")
if (len(remainder) > 0):
course_id=remainder[0]
if (len(remainder) > 1):
new_assignment_id=remainder[1]
if (len(remainder) > 3):
old_assignment_id=remainder[2]
sections=sections_in_course(course_id)
sections=clean_up_sections(sections)
print("sections={}".format(sections))
s_ids=relevant_section_ids(sections)
print("s_ids={}".format(s_ids))
students=students_in_course(course_id)
students_by_section={}
for s in students:
s_id=s['course_section_id']
if s_id in s_ids:
current_section_membership=students_by_section.get(s_id, [])
current_section_membership.append(s)
students_by_section[s_id]=current_section_membership
if options.testing and course_id == 28715:
emils_section=39696
chips_section=39698
e_names=[s['user']['name'] for s in students_by_section[emils_section]]
e_ids=[s['user']['id'] for s in students_by_section[emils_section]]
print("Emil's section={}".format(e_names))
print("Emil's section ids={}".format(e_ids))
groups=list_groups_in_course(course_id)
print("number of groups={}".format(len(groups)))
course_groups={}
for g in groups:
g_id=g['id']
g_name=g['name']
if g['members_count'] > 0:
members=members_of_groups(g_id)
member_ids=[x['id'] for x in members]
course_groups[g_id]={'name': g_name,
'members_count': g['members_count'],
'member_ids': member_ids,
'members': members}
#print("course_groups={}".format(course_groups))
student_ids_by_section={}
assignments={} # will be of the form assigned_reviwer student_reviewed
for section in sections:
print("working on {}".format(section['name']))
if section['name'].find('Magnus Boman') >= 0: # skip Magnus' section
print("Skipping Magnus Boman's section")
continue
if options.testing and course_id == 28715 and section['id'] not in [emils_section, chips_section]:
continue
student_ids_for_section=[s['user']['id'] for s in students_by_section[section['id']]]
assignments_for_section=False
maximum_permutations_to_try=100
while (not assignments_for_section):
maximum_permutations_to_try=maximum_permutations_to_try-1
if maximum_permutations_to_try == 0:
print("failed to find a working permutation to asssign reviewers for {}".format(section['name']))
assignments_for_section=False
break
random.shuffle(student_ids_for_section) # shuffle the student's IDs
assignments_for_section=try_to_assignments(student_ids_for_section, course_groups)
if not assignments_for_section:
print("trying again".format(assignments_for_section))
if assignments_for_section:
print("assignments_for_section={}".format(assignments_for_section))
if len(assignments_for_section) != len(student_ids_for_section):
print("len(assignments_for_section) {0} not equal to len(student_ids_for_section) {1} ".format(len(assignments_for_section), len(student_ids_for_section)))
for a in assignments_for_section: # copy into the set of assignemnts of reviewers for the course
assignments[a]=assignments_for_section[a]
print("assignments={}".format(assignments))
if options.testing:
return
for a in assignments:
reviewer=a
reviewee=assignments[reviewer]
print("{0} is assigned to review: {1}".format(student_name_from_id(reviewer, students), student_name_from_id(reviewee, students)))
if not options.testing:
assign_assessor_as_peer_reviewer(course_id, new_assignment_id, reviewer, reviewee)
return
if __name__ == "__main__": main()
| 39.437828
| 171
| 0.646876
|
5ae41e24f7f5e283c535c54343519735c3c42f63
| 1,826
|
py
|
Python
|
res/res/biz/flavors_get.py
|
onap/vfc-gvnfm-vnfres
|
2ff32469650ac5b6dc6b65d99cc27f3f7aab4161
|
[
"Apache-2.0"
] | 1
|
2021-10-15T15:26:31.000Z
|
2021-10-15T15:26:31.000Z
|
res/res/biz/flavors_get.py
|
onap/vfc-gvnfm-vnfres
|
2ff32469650ac5b6dc6b65d99cc27f3f7aab4161
|
[
"Apache-2.0"
] | null | null | null |
res/res/biz/flavors_get.py
|
onap/vfc-gvnfm-vnfres
|
2ff32469650ac5b6dc6b65d99cc27f3f7aab4161
|
[
"Apache-2.0"
] | null | null | null |
# Copyright @ 2020 China Mobile (SuZhou) Software Technology Co.,Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from res.biz.base import BaseService
from res.pub.database.models import FlavourInstModel
from res.resources.serializers import FlavorInfoSerializer
logger = logging.getLogger(__name__)
class GetFlavorsService(BaseService):
def __init__(self):
super(GetFlavorsService, self).__init__()
def get_flavors(self, vnf_instance_id):
return self.query_resources(
res_type="Flavors",
logger=logger,
resources=FlavourInstModel.objects.filter(
instid=vnf_instance_id),
cvt_fun=self.fill_flavours_data,
res_serializer=FlavorInfoSerializer
)
def fill_flavours_data(self, flavor):
flavours_data = {
"flavourid": flavor.flavourid,
"name": flavor.name,
"vcpu": flavor.vcpu,
"memory": flavor.memory,
"extraspecs": flavor.extraspecs,
"instid": flavor.instid,
"tenant": flavor.tenant,
"vimid": flavor.vimid,
"resouceid": flavor.resouceid,
"create_time": flavor.create_time
}
return flavours_data
| 35.115385
| 75
| 0.653888
|
695d4678e10e770448bf0fe7b30e5e71bc94adc2
| 428
|
py
|
Python
|
dskc/io/util.py
|
NovaSBE-DSKC/predict-campaing-sucess-rate
|
fec339aee7c883f55d64130eb69e490f765ee27d
|
[
"MIT"
] | null | null | null |
dskc/io/util.py
|
NovaSBE-DSKC/predict-campaing-sucess-rate
|
fec339aee7c883f55d64130eb69e490f765ee27d
|
[
"MIT"
] | null | null | null |
dskc/io/util.py
|
NovaSBE-DSKC/predict-campaing-sucess-rate
|
fec339aee7c883f55d64130eb69e490f765ee27d
|
[
"MIT"
] | null | null | null |
import os
def get_root_path():
'''
:return: root path of the project
'''
path = os.getcwd()
MAX_ITERATIONS = 10
i = 0
while True:
# check inside folders
for x in os.walk(path):
if x[0].endswith("dskc"):
return path
# go to father directory
path = os.path.normpath(path + os.sep + os.pardir)
#check if passed max iteration
i += 1
if i > MAX_ITERATIONS:
return
| 15.851852
| 54
| 0.591121
|
41b9a683c60f42fc7d4a6077118d1fd8ef04a105
| 280,301
|
py
|
Python
|
python/jsbeautifier/tests/generated/tests.py
|
Abd-Ur-Rehman/js-beautify
|
f47d361887a33fbd998975dac0abe33c9b70019c
|
[
"MIT"
] | 56
|
2015-02-12T16:19:16.000Z
|
2022-01-29T12:12:51.000Z
|
python/jsbeautifier/tests/generated/tests.py
|
Abd-Ur-Rehman/js-beautify
|
f47d361887a33fbd998975dac0abe33c9b70019c
|
[
"MIT"
] | 2
|
2016-04-20T20:29:43.000Z
|
2020-06-11T09:12:04.000Z
|
python/jsbeautifier/tests/generated/tests.py
|
Abd-Ur-Rehman/js-beautify
|
f47d361887a33fbd998975dac0abe33c9b70019c
|
[
"MIT"
] | 9
|
2015-07-14T19:23:21.000Z
|
2022-03-01T04:15:06.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
AUTO-GENERATED. DO NOT MODIFY.
Script: test/generate-tests.js
Template: test/data/javascript/python.mustache
Data: test/data/javascript/tests.js
The MIT License (MIT)
Copyright (c) 2007-2018 Einar Lielmanis, Liam Newman, and contributors.
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation files
(the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
import re
import unittest
import jsbeautifier
import six
import copy
class TestJSBeautifier(unittest.TestCase):
options = None
@classmethod
def setUpClass(cls):
pass
cls.wrapregex = re.compile('^(.+)$', re.MULTILINE)
def reset_options(self):
true = True
false = False
default_options = jsbeautifier.default_options()
default_options.indent_size = 4
default_options.indent_char = ' '
default_options.preserve_newlines = true
default_options.jslint_happy = false
default_options.indent_level = 0
default_options.break_chained_methods = false
default_options.eol = '\n'
default_options.indent_size = 4
default_options.indent_char = ' '
default_options.preserve_newlines = true
default_options.jslint_happy = false
self.options = copy.copy(default_options)
def test_unescape(self):
# Test cases contributed by <chrisjshull on GitHub.com>
test_fragment = self.decodesto
self.reset_options()
bt = self.bt
def unicode_char(value):
return six.unichr(value)
bt('"\\\\s"') # == "\\s" in the js source
bt("'\\\\s'") # == '\\s' in the js source
bt("'\\\\\\s'") # == '\\\s' in the js source
bt("'\\s'") # == '\s' in the js source
bt('"•"')
bt('"—"')
bt('"\\x41\\x42\\x43\\x01"', '"\\x41\\x42\\x43\\x01"')
bt('"\\u2022"', '"\\u2022"')
bt('a = /\s+/')
#bt('a = /\\x41/','a = /A/')
bt('"\\u2022";a = /\s+/;"\\x41\\x42\\x43\\x01".match(/\\x41/);','"\\u2022";\na = /\s+/;\n"\\x41\\x42\\x43\\x01".match(/\\x41/);')
test_fragment('"\\x22\\x27",\'\\x22\\x27\',"\\x5c",\'\\x5c\',"\\xff and \\xzz","unicode \\u0000 \\u0022 \\u0027 \\u005c \\uffff \\uzzzz"', '"\\x22\\x27", \'\\x22\\x27\', "\\x5c", \'\\x5c\', "\\xff and \\xzz", "unicode \\u0000 \\u0022 \\u0027 \\u005c \\uffff \\uzzzz"')
self.options.unescape_strings = True
bt('"\\x41\\x42\\x43\\x01"', '"ABC\\x01"')
test_fragment('"\\x20\\x40\\x4a"', '" @J"')
test_fragment('"\\xff\\x40\\x4a"')
test_fragment('"\\u0072\\u016B\\u0137\\u012B\\u0074\\u0069\\u0073"', six.u('"\u0072\u016B\u0137\u012B\u0074\u0069\u0073"'))
bt('a = /\s+/')
test_fragment('"\\x22\\x27",\'\\x22\\x27\',"\\x5c",\'\\x5c\',"\\xff","unicode \\u0000 \\u0022 \\u0027 \\u005c \\uffff"',
'"\\"\\\'", \'\\"\\\'\', "\\\\", \'\\\\\', "\\xff", "unicode \\u0000 \\" \\\' \\\\ ' + unicode_char(0xffff) + '"')
# For error case, return the string unchanged
test_fragment('"\\x22\\x27",\'\\x22\\x27\',"\\x5c",\'\\x5c\',"\\xff and \\xzz","unicode \\u0000 \\u0022 \\u0027 \\u005c \\uffff \\uzzzz"',
'"\\"\\\'", \'\\"\\\'\', "\\\\", \'\\\\\', "\\xff and \\xzz", "unicode \\u0000 \\u0022 \\u0027 \\u005c \\uffff \\uzzzz"')
self.options.unescape_strings = False
def test_beautifier(self):
test_fragment = self.decodesto
bt = self.bt
true = True
false = False
def unicode_char(value):
return six.unichr(value)
##============================================================
# Line wrap test inputs
#....---------1---------2---------3---------4---------5---------6---------7
#....1234567890123456789012345678901234567890123456789012345678901234567890
wrap_input_1=(
'foo.bar().baz().cucumber((f && "sass") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'return between_return_and_expression_should_never_wrap.but_this_can\n' +
'throw between_throw_and_expression_should_never_wrap.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();\n' +
'object_literal = {\n' +
' propertx: first_token + 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap + but_this_can,\n' +
' propertz: first_token_should_never_wrap + !but_this_can,\n' +
' proper: "first_token_should_never_wrap" + "but_this_can"\n' +
'}');
#....---------1---------2---------3---------4---------5---------6---------7
#....1234567890123456789012345678901234567890123456789012345678901234567890
wrap_input_2=(
'{\n' +
' foo.bar().baz().cucumber((f && "sass") || (leans && mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
' return between_return_and_expression_should_never_wrap.but_this_can\n' +
' throw between_throw_and_expression_should_never_wrap.but_this_can\n' +
' if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();\n' +
' object_literal = {\n' +
' propertx: first_token + 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap + but_this_can,\n' +
' propertz: first_token_should_never_wrap + !but_this_can,\n' +
' proper: "first_token_should_never_wrap" + "but_this_can"\n' +
' }' +
'}');
#============================================================
# Unicode Support
self.reset_options()
bt('var ' + unicode_char(3232) + '_' + unicode_char(3232) + ' = "hi";')
bt(
'var ' + unicode_char(228) + 'x = {\n' +
' ' + unicode_char(228) + 'rgerlich: true\n' +
'};')
bt(
'var \\u00E4\\u0ca0\\u0cA0\\u0Ca0 = {\n' +
' \\u0ca0rgerlich: true\n' +
'};')
bt(
'var \\u00E4add\\u0025 = {\n' +
' \\u0044rgerlich\\u0ca0: true\n' +
'};')
bt(
'var' + unicode_char(160) + unicode_char(3232) + '_' + unicode_char(3232) + ' = "hi";',
# -- output --
'var ' + unicode_char(3232) + '_' + unicode_char(3232) + ' = "hi";')
#============================================================
# Test template and continuation strings
self.reset_options()
bt('`This is a ${template} string.`')
bt(
'`This\n' +
' is\n' +
' a\n' +
' ${template}\n' +
' string.`')
bt(
'a = `This is a continuation\\\n' +
'string.`')
bt(
'a = "This is a continuation\\\n' +
'string."')
bt(
'`SELECT\n' +
' nextval(\'${this.options.schema ? `${this.options.schema}.` : \'\'}"${this.tableName}_${this.autoIncrementField}_seq"\'::regclass\n' +
' ) nextval;`')
# Tests for #1030
bt(
'const composeUrl = (host) => {\n' +
' return `${host `test`}`;\n' +
'};')
bt(
'const composeUrl = (host, api, key, data) => {\n' +
' switch (api) {\n' +
' case "Init":\n' +
' return `${host}/vwapi/Init?VWID=${key}&DATA=${encodeURIComponent(\n' +
' Object.keys(data).map((k) => `${k}=${ data[k]}` ).join(";")\n' +
' )}`;\n' +
' case "Pay":\n' +
' return `${host}/vwapi/Pay?SessionId=${par}`;\n' +
' };\n' +
'};')
#============================================================
# ES7 Decorators
self.reset_options()
bt('@foo')
bt('@foo(bar)')
bt(
'@foo(function(k, v) {\n' +
' implementation();\n' +
'})')
#============================================================
# ES7 exponential
self.reset_options()
bt('x ** 2')
bt('x ** -2')
#============================================================
# Spread operator
self.reset_options()
self.options.brace_style = "collapse,preserve-inline"
bt('const m = { ...item, c: 3 };')
bt(
'const m = {\n' +
' ...item,\n' +
' c: 3\n' +
'};')
bt('const m = { c: 3, ...item };')
bt('const m = [...item, 3];')
bt('const m = [3, ...item];')
#============================================================
# Object literal shorthand functions
self.reset_options()
bt(
'return {\n' +
' foo() {\n' +
' return 42;\n' +
' }\n' +
'}')
bt(
'var foo = {\n' +
' * bar() {\n' +
' yield 42;\n' +
' }\n' +
'};')
bt(
'var foo = {bar(){return 42;},*barGen(){yield 42;}};',
# -- output --
'var foo = {\n' +
' bar() {\n' +
' return 42;\n' +
' },\n' +
' * barGen() {\n' +
' yield 42;\n' +
' }\n' +
'};')
# also handle generator shorthand in class - #1013
bt(
'class A {\n' +
' fn() {\n' +
' return true;\n' +
' }\n' +
'\n' +
' * gen() {\n' +
' return true;\n' +
' }\n' +
'}')
bt(
'class A {\n' +
' * gen() {\n' +
' return true;\n' +
' }\n' +
'\n' +
' fn() {\n' +
' return true;\n' +
' }\n' +
'}')
#============================================================
# End With Newline - (end_with_newline = "true")
self.reset_options()
self.options.end_with_newline = true
test_fragment('', '\n')
test_fragment(' return .5', ' return .5\n')
test_fragment(
' \n' +
'\n' +
'return .5\n' +
'\n' +
'\n' +
'\n',
# -- output --
' return .5\n')
test_fragment('\n')
# End With Newline - (end_with_newline = "false")
self.reset_options()
self.options.end_with_newline = false
test_fragment('')
test_fragment(' return .5')
test_fragment(
' \n' +
'\n' +
'return .5\n' +
'\n' +
'\n' +
'\n',
# -- output --
' return .5')
test_fragment('\n', '')
#============================================================
# Support Indent Level Options and Base Indent Autodetection - ()
self.reset_options()
test_fragment(' a')
test_fragment(
' function test(){\n' +
' console.log("this is a test");\n' +
'}',
# -- output --
' function test() {\n' +
' console.log("this is a test");\n' +
' }')
test_fragment(
' // This is a random comment\n' +
'function test(){\n' +
' console.log("this is a test");\n' +
'}',
# -- output --
' // This is a random comment\n' +
' function test() {\n' +
' console.log("this is a test");\n' +
' }')
# Support Indent Level Options and Base Indent Autodetection - (indent_level = "0")
self.reset_options()
self.options.indent_level = 0
test_fragment(' a')
test_fragment(
' function test(){\n' +
' console.log("this is a test");\n' +
'}',
# -- output --
' function test() {\n' +
' console.log("this is a test");\n' +
' }')
test_fragment(
' // This is a random comment\n' +
'function test(){\n' +
' console.log("this is a test");\n' +
'}',
# -- output --
' // This is a random comment\n' +
' function test() {\n' +
' console.log("this is a test");\n' +
' }')
# Support Indent Level Options and Base Indent Autodetection - (indent_level = "1")
self.reset_options()
self.options.indent_level = 1
test_fragment(' a', ' a')
test_fragment(
' function test(){\n' +
' console.log("this is a test");\n' +
'}',
# -- output --
' function test() {\n' +
' console.log("this is a test");\n' +
' }')
test_fragment(
' // This is a random comment\n' +
'function test(){\n' +
' console.log("this is a test");\n' +
'}',
# -- output --
' // This is a random comment\n' +
' function test() {\n' +
' console.log("this is a test");\n' +
' }')
# Support Indent Level Options and Base Indent Autodetection - (indent_level = "2")
self.reset_options()
self.options.indent_level = 2
test_fragment('a', ' a')
test_fragment(
'function test(){\n' +
' console.log("this is a test");\n' +
'}',
# -- output --
' function test() {\n' +
' console.log("this is a test");\n' +
' }')
test_fragment(
'// This is a random comment\n' +
'function test(){\n' +
' console.log("this is a test");\n' +
'}',
# -- output --
' // This is a random comment\n' +
' function test() {\n' +
' console.log("this is a test");\n' +
' }')
# Support Indent Level Options and Base Indent Autodetection - (indent_with_tabs = "true", indent_level = "2")
self.reset_options()
self.options.indent_with_tabs = true
self.options.indent_level = 2
test_fragment('a', '\t\ta')
test_fragment(
'function test(){\n' +
' console.log("this is a test");\n' +
'}',
# -- output --
'\t\tfunction test() {\n' +
'\t\t\tconsole.log("this is a test");\n' +
'\t\t}')
test_fragment(
'// This is a random comment\n' +
'function test(){\n' +
' console.log("this is a test");\n' +
'}',
# -- output --
'\t\t// This is a random comment\n' +
'\t\tfunction test() {\n' +
'\t\t\tconsole.log("this is a test");\n' +
'\t\t}')
# Support Indent Level Options and Base Indent Autodetection - (indent_level = "0")
self.reset_options()
self.options.indent_level = 0
test_fragment('\t a')
test_fragment(
'\t function test(){\n' +
' console.log("this is a test");\n' +
'}',
# -- output --
'\t function test() {\n' +
'\t console.log("this is a test");\n' +
'\t }')
test_fragment(
'\t // This is a random comment\n' +
'function test(){\n' +
' console.log("this is a test");\n' +
'}',
# -- output --
'\t // This is a random comment\n' +
'\t function test() {\n' +
'\t console.log("this is a test");\n' +
'\t }')
#============================================================
# Support simple language specific option inheritance/overriding - (js = "{ "indent_size": 3 }", css = "{ "indent_size": 5 }")
self.reset_options()
self.options.js = { 'indent_size': 3 }
self.options.css = { 'indent_size': 5 }
bt(
'if (a == b) {\n' +
' test();\n' +
'}')
# Support simple language specific option inheritance/overriding - (html = "{ "js": { "indent_size": 3 }, "css": { "indent_size": 5 } }")
self.reset_options()
self.options.html = { 'js': { 'indent_size': 3 }, 'css': { 'indent_size': 5 } }
bt(
'if (a == b) {\n' +
' test();\n' +
'}')
# Support simple language specific option inheritance/overriding - (indent_size = "9", html = "{ "js": { "indent_size": 3 }, "css": { "indent_size": 5 }, "indent_size": 2}", js = "{ "indent_size": 4 }", css = "{ "indent_size": 3 }")
self.reset_options()
self.options.indent_size = 9
self.options.html = { 'js': { 'indent_size': 3 }, 'css': { 'indent_size': 5 }, 'indent_size': 2}
self.options.js = { 'indent_size': 4 }
self.options.css = { 'indent_size': 3 }
bt(
'if (a == b) {\n' +
' test();\n' +
'}')
#============================================================
# Brace style permutations - (brace_style = ""collapse,preserve-inline"")
self.reset_options()
self.options.brace_style = 'collapse,preserve-inline'
bt(
'var a ={a: 2};\n' +
'var a ={a: 2};',
# -- output --
'var a = { a: 2 };\n' +
'var a = { a: 2 };')
bt(
'//case 1\n' +
'if (a == 1){}\n' +
'//case 2\n' +
'else if (a == 2){}',
# -- output --
'//case 1\n' +
'if (a == 1) {}\n' +
'//case 2\n' +
'else if (a == 2) {}')
bt('if(1){2}else{3}', 'if (1) { 2 } else { 3 }')
bt('try{a();}catch(b){c();}catch(d){}finally{e();}', 'try { a(); } catch (b) { c(); } catch (d) {} finally { e(); }')
# Brace style permutations - (brace_style = ""collapse,preserve-inline"")
self.reset_options()
self.options.brace_style = 'collapse,preserve-inline'
bt(
'var a =\n' +
'{\n' +
'a: 2\n' +
'}\n' +
';\n' +
'var a =\n' +
'{\n' +
'a: 2\n' +
'}\n' +
';',
# -- output --
'var a = {\n' +
' a: 2\n' +
'};\n' +
'var a = {\n' +
' a: 2\n' +
'};')
bt(
'//case 1\n' +
'if (a == 1)\n' +
'{}\n' +
'//case 2\n' +
'else if (a == 2)\n' +
'{}',
# -- output --
'//case 1\n' +
'if (a == 1) {}\n' +
'//case 2\n' +
'else if (a == 2) {}')
bt(
'if(1)\n' +
'{\n' +
'2\n' +
'}\n' +
'else\n' +
'{\n' +
'3\n' +
'}',
# -- output --
'if (1) {\n' +
' 2\n' +
'} else {\n' +
' 3\n' +
'}')
bt(
'try\n' +
'{\n' +
'a();\n' +
'}\n' +
'catch(b)\n' +
'{\n' +
'c();\n' +
'}\n' +
'catch(d)\n' +
'{}\n' +
'finally\n' +
'{\n' +
'e();\n' +
'}',
# -- output --
'try {\n' +
' a();\n' +
'} catch (b) {\n' +
' c();\n' +
'} catch (d) {} finally {\n' +
' e();\n' +
'}')
# Brace style permutations - ()
self.reset_options()
bt(
'var a ={a: 2};\n' +
'var a ={a: 2};',
# -- output --
'var a = {\n' +
' a: 2\n' +
'};\n' +
'var a = {\n' +
' a: 2\n' +
'};')
bt(
'//case 1\n' +
'if (a == 1){}\n' +
'//case 2\n' +
'else if (a == 2){}',
# -- output --
'//case 1\n' +
'if (a == 1) {}\n' +
'//case 2\n' +
'else if (a == 2) {}')
bt(
'if(1){2}else{3}',
# -- output --
'if (1) {\n' +
' 2\n' +
'} else {\n' +
' 3\n' +
'}')
bt(
'try{a();}catch(b){c();}catch(d){}finally{e();}',
# -- output --
'try {\n' +
' a();\n' +
'} catch (b) {\n' +
' c();\n' +
'} catch (d) {} finally {\n' +
' e();\n' +
'}')
# Brace style permutations - (brace_style = ""collapse"")
self.reset_options()
self.options.brace_style = 'collapse'
bt(
'var a ={a: 2};\n' +
'var a ={a: 2};',
# -- output --
'var a = {\n' +
' a: 2\n' +
'};\n' +
'var a = {\n' +
' a: 2\n' +
'};')
bt(
'//case 1\n' +
'if (a == 1){}\n' +
'//case 2\n' +
'else if (a == 2){}',
# -- output --
'//case 1\n' +
'if (a == 1) {}\n' +
'//case 2\n' +
'else if (a == 2) {}')
bt(
'if(1){2}else{3}',
# -- output --
'if (1) {\n' +
' 2\n' +
'} else {\n' +
' 3\n' +
'}')
bt(
'try{a();}catch(b){c();}catch(d){}finally{e();}',
# -- output --
'try {\n' +
' a();\n' +
'} catch (b) {\n' +
' c();\n' +
'} catch (d) {} finally {\n' +
' e();\n' +
'}')
# Brace style permutations - (brace_style = ""collapse"")
self.reset_options()
self.options.brace_style = 'collapse'
bt(
'var a =\n' +
'{\n' +
'a: 2\n' +
'}\n' +
';\n' +
'var a =\n' +
'{\n' +
'a: 2\n' +
'}\n' +
';',
# -- output --
'var a = {\n' +
' a: 2\n' +
'};\n' +
'var a = {\n' +
' a: 2\n' +
'};')
bt(
'//case 1\n' +
'if (a == 1)\n' +
'{}\n' +
'//case 2\n' +
'else if (a == 2)\n' +
'{}',
# -- output --
'//case 1\n' +
'if (a == 1) {}\n' +
'//case 2\n' +
'else if (a == 2) {}')
bt(
'if(1)\n' +
'{\n' +
'2\n' +
'}\n' +
'else\n' +
'{\n' +
'3\n' +
'}',
# -- output --
'if (1) {\n' +
' 2\n' +
'} else {\n' +
' 3\n' +
'}')
bt(
'try\n' +
'{\n' +
'a();\n' +
'}\n' +
'catch(b)\n' +
'{\n' +
'c();\n' +
'}\n' +
'catch(d)\n' +
'{}\n' +
'finally\n' +
'{\n' +
'e();\n' +
'}',
# -- output --
'try {\n' +
' a();\n' +
'} catch (b) {\n' +
' c();\n' +
'} catch (d) {} finally {\n' +
' e();\n' +
'}')
#============================================================
# Comma-first option - (comma_first = "false")
self.reset_options()
self.options.comma_first = false
bt(
'{a:1, b:2}',
# -- output --
'{\n' +
' a: 1,\n' +
' b: 2\n' +
'}')
bt(
'var a=1, b=c[d], e=6;',
# -- output --
'var a = 1,\n' +
' b = c[d],\n' +
' e = 6;')
bt(
'for(var a=1,b=2,c=3;d<3;d++)\n' +
'e',
# -- output --
'for (var a = 1, b = 2, c = 3; d < 3; d++)\n' +
' e')
bt(
'for(var a=1,b=2,\n' +
'c=3;d<3;d++)\n' +
'e',
# -- output --
'for (var a = 1, b = 2,\n' +
' c = 3; d < 3; d++)\n' +
' e')
bt(
'function foo() {\n' +
' return [\n' +
' "one",\n' +
' "two"\n' +
' ];\n' +
'}')
bt(
'a=[[1,2],[4,5],[7,8]]',
# -- output --
'a = [\n' +
' [1, 2],\n' +
' [4, 5],\n' +
' [7, 8]\n' +
']')
bt(
'a=[[1,2],[4,5],[7,8],]',
# -- output --
'a = [\n' +
' [1, 2],\n' +
' [4, 5],\n' +
' [7, 8],\n' +
']')
bt(
'a=[[1,2],[4,5],function(){},[7,8]]',
# -- output --
'a = [\n' +
' [1, 2],\n' +
' [4, 5],\n' +
' function() {},\n' +
' [7, 8]\n' +
']')
bt(
'a=[[1,2],[4,5],function(){},function(){},[7,8]]',
# -- output --
'a = [\n' +
' [1, 2],\n' +
' [4, 5],\n' +
' function() {},\n' +
' function() {},\n' +
' [7, 8]\n' +
']')
bt(
'a=[[1,2],[4,5],function(){},[7,8]]',
# -- output --
'a = [\n' +
' [1, 2],\n' +
' [4, 5],\n' +
' function() {},\n' +
' [7, 8]\n' +
']')
bt('a=[b,c,function(){},function(){},d]', 'a = [b, c, function() {}, function() {}, d]')
bt(
'a=[b,c,\n' +
'function(){},function(){},d]',
# -- output --
'a = [b, c,\n' +
' function() {},\n' +
' function() {},\n' +
' d\n' +
']')
bt('a=[a[1],b[4],c[d[7]]]', 'a = [a[1], b[4], c[d[7]]]')
bt('[1,2,[3,4,[5,6],7],8]', '[1, 2, [3, 4, [5, 6], 7], 8]')
bt(
'[[["1","2"],["3","4"]],[["5","6","7"],["8","9","0"]],[["1","2","3"],["4","5","6","7"],["8","9","0"]]]',
# -- output --
'[\n' +
' [\n' +
' ["1", "2"],\n' +
' ["3", "4"]\n' +
' ],\n' +
' [\n' +
' ["5", "6", "7"],\n' +
' ["8", "9", "0"]\n' +
' ],\n' +
' [\n' +
' ["1", "2", "3"],\n' +
' ["4", "5", "6", "7"],\n' +
' ["8", "9", "0"]\n' +
' ]\n' +
']')
bt(
'changeCollection.add({\n' +
' name: "Jonathan" // New line inserted after this line on every save\n' +
' , age: 25\n' +
'});',
# -- output --
'changeCollection.add({\n' +
' name: "Jonathan" // New line inserted after this line on every save\n' +
' ,\n' +
' age: 25\n' +
'});')
bt(
'changeCollection.add(\n' +
' function() {\n' +
' return true;\n' +
' },\n' +
' function() {\n' +
' return true;\n' +
' }\n' +
');')
# Comma-first option - (comma_first = "true")
self.reset_options()
self.options.comma_first = true
bt(
'{a:1, b:2}',
# -- output --
'{\n' +
' a: 1\n' +
' , b: 2\n' +
'}')
bt(
'var a=1, b=c[d], e=6;',
# -- output --
'var a = 1\n' +
' , b = c[d]\n' +
' , e = 6;')
bt(
'for(var a=1,b=2,c=3;d<3;d++)\n' +
'e',
# -- output --
'for (var a = 1, b = 2, c = 3; d < 3; d++)\n' +
' e')
bt(
'for(var a=1,b=2,\n' +
'c=3;d<3;d++)\n' +
'e',
# -- output --
'for (var a = 1, b = 2\n' +
' , c = 3; d < 3; d++)\n' +
' e')
bt(
'function foo() {\n' +
' return [\n' +
' "one"\n' +
' , "two"\n' +
' ];\n' +
'}')
bt(
'a=[[1,2],[4,5],[7,8]]',
# -- output --
'a = [\n' +
' [1, 2]\n' +
' , [4, 5]\n' +
' , [7, 8]\n' +
']')
bt(
'a=[[1,2],[4,5],[7,8],]',
# -- output --
'a = [\n' +
' [1, 2]\n' +
' , [4, 5]\n' +
' , [7, 8]\n' +
', ]')
bt(
'a=[[1,2],[4,5],function(){},[7,8]]',
# -- output --
'a = [\n' +
' [1, 2]\n' +
' , [4, 5]\n' +
' , function() {}\n' +
' , [7, 8]\n' +
']')
bt(
'a=[[1,2],[4,5],function(){},function(){},[7,8]]',
# -- output --
'a = [\n' +
' [1, 2]\n' +
' , [4, 5]\n' +
' , function() {}\n' +
' , function() {}\n' +
' , [7, 8]\n' +
']')
bt(
'a=[[1,2],[4,5],function(){},[7,8]]',
# -- output --
'a = [\n' +
' [1, 2]\n' +
' , [4, 5]\n' +
' , function() {}\n' +
' , [7, 8]\n' +
']')
bt('a=[b,c,function(){},function(){},d]', 'a = [b, c, function() {}, function() {}, d]')
bt(
'a=[b,c,\n' +
'function(){},function(){},d]',
# -- output --
'a = [b, c\n' +
' , function() {}\n' +
' , function() {}\n' +
' , d\n' +
']')
bt('a=[a[1],b[4],c[d[7]]]', 'a = [a[1], b[4], c[d[7]]]')
bt('[1,2,[3,4,[5,6],7],8]', '[1, 2, [3, 4, [5, 6], 7], 8]')
bt(
'[[["1","2"],["3","4"]],[["5","6","7"],["8","9","0"]],[["1","2","3"],["4","5","6","7"],["8","9","0"]]]',
# -- output --
'[\n' +
' [\n' +
' ["1", "2"]\n' +
' , ["3", "4"]\n' +
' ]\n' +
' , [\n' +
' ["5", "6", "7"]\n' +
' , ["8", "9", "0"]\n' +
' ]\n' +
' , [\n' +
' ["1", "2", "3"]\n' +
' , ["4", "5", "6", "7"]\n' +
' , ["8", "9", "0"]\n' +
' ]\n' +
']')
bt(
'changeCollection.add({\n' +
' name: "Jonathan" // New line inserted after this line on every save\n' +
' , age: 25\n' +
'});')
bt(
'changeCollection.add(\n' +
' function() {\n' +
' return true;\n' +
' },\n' +
' function() {\n' +
' return true;\n' +
' }\n' +
');',
# -- output --
'changeCollection.add(\n' +
' function() {\n' +
' return true;\n' +
' }\n' +
' , function() {\n' +
' return true;\n' +
' }\n' +
');')
#============================================================
# Unindent chained functions - (unindent_chained_methods = "true")
self.reset_options()
self.options.unindent_chained_methods = true
bt(
'f().f().f()\n' +
' .f().f();',
# -- output --
'f().f().f()\n' +
'.f().f();')
bt(
'f()\n' +
' .f()\n' +
' .f();',
# -- output --
'f()\n' +
'.f()\n' +
'.f();')
bt(
'f(function() {\n' +
' f()\n' +
' .f()\n' +
' .f();\n' +
'});',
# -- output --
'f(function() {\n' +
' f()\n' +
' .f()\n' +
' .f();\n' +
'});')
# regression test for fix #1378
bt(
'f(function() {\n' +
' if(g === 1)\n' +
' g = 0;\n' +
' else\n' +
' g = 1;\n' +
'\n' +
' f()\n' +
' .f()\n' +
' .f();\n' +
'});',
# -- output --
'f(function() {\n' +
' if (g === 1)\n' +
' g = 0;\n' +
' else\n' +
' g = 1;\n' +
'\n' +
' f()\n' +
' .f()\n' +
' .f();\n' +
'});')
# regression test for fix #1533
bt(
'angular.module("test").controller("testCtrl", function($scope) {\n' +
' $scope.tnew;\n' +
' $scope.toggle_tnew = function() {\n' +
' $scope.mode = 0;\n' +
' if (!$scope.tnew) {\n' +
' $scope.tnew = {};\n' +
' } else $scope.tnew = null;\n' +
' }\n' +
' $scope.fn = function() {\n' +
' return null;\n' +
' }\n' +
'});')
#============================================================
# Space in parens tests - (space_in_paren = "false", space_in_empty_paren = "false")
self.reset_options()
self.options.space_in_paren = false
self.options.space_in_empty_paren = false
bt('if(p) foo(a,b);', 'if (p) foo(a, b);')
bt(
'try{while(true){willThrow()}}catch(result)switch(result){case 1:++result }',
# -- output --
'try {\n' +
' while (true) {\n' +
' willThrow()\n' +
' }\n' +
'} catch (result) switch (result) {\n' +
' case 1:\n' +
' ++result\n' +
'}')
bt('((e/((a+(b)*c)-d))^2)*5;', '((e / ((a + (b) * c) - d)) ^ 2) * 5;')
bt(
'function f(a,b) {if(a) b()}function g(a,b) {if(!a) b()}',
# -- output --
'function f(a, b) {\n' +
' if (a) b()\n' +
'}\n' +
'\n' +
'function g(a, b) {\n' +
' if (!a) b()\n' +
'}')
bt('a=[][ ]( );', 'a = [][]();')
bt('a=()( )[ ];', 'a = ()()[];')
bt('a=[b,c,d];', 'a = [b, c, d];')
bt('a= f[b];', 'a = f[b];')
bt(
'{\n' +
' files: a[][ {\n' +
' expand: true,\n' +
' cwd: "www/gui/",\n' +
' src: b(c)[ "im/design_standards/*.*" ],\n' +
' dest: "www/gui/build"\n' +
' } ]\n' +
'}',
# -- output --
'{\n' +
' files: a[][{\n' +
' expand: true,\n' +
' cwd: "www/gui/",\n' +
' src: b(c)["im/design_standards/*.*"],\n' +
' dest: "www/gui/build"\n' +
' }]\n' +
'}')
# Space in parens tests - (space_in_paren = "false", space_in_empty_paren = "true")
self.reset_options()
self.options.space_in_paren = false
self.options.space_in_empty_paren = true
bt('if(p) foo(a,b);', 'if (p) foo(a, b);')
bt(
'try{while(true){willThrow()}}catch(result)switch(result){case 1:++result }',
# -- output --
'try {\n' +
' while (true) {\n' +
' willThrow()\n' +
' }\n' +
'} catch (result) switch (result) {\n' +
' case 1:\n' +
' ++result\n' +
'}')
bt('((e/((a+(b)*c)-d))^2)*5;', '((e / ((a + (b) * c) - d)) ^ 2) * 5;')
bt(
'function f(a,b) {if(a) b()}function g(a,b) {if(!a) b()}',
# -- output --
'function f(a, b) {\n' +
' if (a) b()\n' +
'}\n' +
'\n' +
'function g(a, b) {\n' +
' if (!a) b()\n' +
'}')
bt('a=[][ ]( );', 'a = [][]();')
bt('a=()( )[ ];', 'a = ()()[];')
bt('a=[b,c,d];', 'a = [b, c, d];')
bt('a= f[b];', 'a = f[b];')
bt(
'{\n' +
' files: a[][ {\n' +
' expand: true,\n' +
' cwd: "www/gui/",\n' +
' src: b(c)[ "im/design_standards/*.*" ],\n' +
' dest: "www/gui/build"\n' +
' } ]\n' +
'}',
# -- output --
'{\n' +
' files: a[][{\n' +
' expand: true,\n' +
' cwd: "www/gui/",\n' +
' src: b(c)["im/design_standards/*.*"],\n' +
' dest: "www/gui/build"\n' +
' }]\n' +
'}')
# Space in parens tests - (space_in_paren = "true", space_in_empty_paren = "false")
self.reset_options()
self.options.space_in_paren = true
self.options.space_in_empty_paren = false
bt('if(p) foo(a,b);', 'if ( p ) foo( a, b );')
bt(
'try{while(true){willThrow()}}catch(result)switch(result){case 1:++result }',
# -- output --
'try {\n' +
' while ( true ) {\n' +
' willThrow()\n' +
' }\n' +
'} catch ( result ) switch ( result ) {\n' +
' case 1:\n' +
' ++result\n' +
'}')
bt('((e/((a+(b)*c)-d))^2)*5;', '( ( e / ( ( a + ( b ) * c ) - d ) ) ^ 2 ) * 5;')
bt(
'function f(a,b) {if(a) b()}function g(a,b) {if(!a) b()}',
# -- output --
'function f( a, b ) {\n' +
' if ( a ) b()\n' +
'}\n' +
'\n' +
'function g( a, b ) {\n' +
' if ( !a ) b()\n' +
'}')
bt('a=[][ ]( );', 'a = [][]();')
bt('a=()( )[ ];', 'a = ()()[];')
bt('a=[b,c,d];', 'a = [ b, c, d ];')
bt('a= f[b];', 'a = f[ b ];')
bt(
'{\n' +
' files: a[][ {\n' +
' expand: true,\n' +
' cwd: "www/gui/",\n' +
' src: b(c)[ "im/design_standards/*.*" ],\n' +
' dest: "www/gui/build"\n' +
' } ]\n' +
'}',
# -- output --
'{\n' +
' files: a[][ {\n' +
' expand: true,\n' +
' cwd: "www/gui/",\n' +
' src: b( c )[ "im/design_standards/*.*" ],\n' +
' dest: "www/gui/build"\n' +
' } ]\n' +
'}')
# Space in parens tests - (space_in_paren = "true", space_in_empty_paren = "true")
self.reset_options()
self.options.space_in_paren = true
self.options.space_in_empty_paren = true
bt('if(p) foo(a,b);', 'if ( p ) foo( a, b );')
bt(
'try{while(true){willThrow()}}catch(result)switch(result){case 1:++result }',
# -- output --
'try {\n' +
' while ( true ) {\n' +
' willThrow( )\n' +
' }\n' +
'} catch ( result ) switch ( result ) {\n' +
' case 1:\n' +
' ++result\n' +
'}')
bt('((e/((a+(b)*c)-d))^2)*5;', '( ( e / ( ( a + ( b ) * c ) - d ) ) ^ 2 ) * 5;')
bt(
'function f(a,b) {if(a) b()}function g(a,b) {if(!a) b()}',
# -- output --
'function f( a, b ) {\n' +
' if ( a ) b( )\n' +
'}\n' +
'\n' +
'function g( a, b ) {\n' +
' if ( !a ) b( )\n' +
'}')
bt('a=[][ ]( );', 'a = [ ][ ]( );')
bt('a=()( )[ ];', 'a = ( )( )[ ];')
bt('a=[b,c,d];', 'a = [ b, c, d ];')
bt('a= f[b];', 'a = f[ b ];')
bt(
'{\n' +
' files: a[][ {\n' +
' expand: true,\n' +
' cwd: "www/gui/",\n' +
' src: b(c)[ "im/design_standards/*.*" ],\n' +
' dest: "www/gui/build"\n' +
' } ]\n' +
'}',
# -- output --
'{\n' +
' files: a[ ][ {\n' +
' expand: true,\n' +
' cwd: "www/gui/",\n' +
' src: b( c )[ "im/design_standards/*.*" ],\n' +
' dest: "www/gui/build"\n' +
' } ]\n' +
'}')
#============================================================
# general preserve_newlines tests - (preserve_newlines = "false")
self.reset_options()
self.options.preserve_newlines = false
bt(
'if (foo) // comment\n' +
' bar();')
bt(
'if (foo) // comment\n' +
' bar();')
bt(
'if (foo) // comment\n' +
' (bar());')
bt(
'if (foo) // comment\n' +
' (bar());')
bt(
'if (foo) // comment\n' +
' /asdf/;')
bt(
'this.oa = new OAuth(\n' +
' _requestToken,\n' +
' _accessToken,\n' +
' consumer_key\n' +
');',
# -- output --
'this.oa = new OAuth(_requestToken, _accessToken, consumer_key);')
bt(
'foo = {\n' +
' x: y, // #44\n' +
' w: z // #44\n' +
'}')
bt(
'switch (x) {\n' +
' case "a":\n' +
' // comment on newline\n' +
' break;\n' +
' case "b": // comment on same line\n' +
' break;\n' +
'}')
bt(
'this.type =\n' +
' this.options =\n' +
' // comment\n' +
' this.enabled null;',
# -- output --
'this.type = this.options =\n' +
' // comment\n' +
' this.enabled null;')
bt(
'someObj\n' +
' .someFunc1()\n' +
' // This comment should not break the indent\n' +
' .someFunc2();',
# -- output --
'someObj.someFunc1()\n' +
' // This comment should not break the indent\n' +
' .someFunc2();')
bt(
'if (true ||\n' +
'!true) return;',
# -- output --
'if (true || !true) return;')
bt(
'if\n' +
'(foo)\n' +
'if\n' +
'(bar)\n' +
'if\n' +
'(baz)\n' +
'whee();\n' +
'a();',
# -- output --
'if (foo)\n' +
' if (bar)\n' +
' if (baz) whee();\n' +
'a();')
bt(
'if\n' +
'(foo)\n' +
'if\n' +
'(bar)\n' +
'if\n' +
'(baz)\n' +
'whee();\n' +
'else\n' +
'a();',
# -- output --
'if (foo)\n' +
' if (bar)\n' +
' if (baz) whee();\n' +
' else a();')
bt(
'if (foo)\n' +
'bar();\n' +
'else\n' +
'car();',
# -- output --
'if (foo) bar();\n' +
'else car();')
bt(
'if (foo) if (bar) if (baz);\n' +
'a();',
# -- output --
'if (foo)\n' +
' if (bar)\n' +
' if (baz);\n' +
'a();')
bt(
'if (foo) if (bar) if (baz) whee();\n' +
'a();',
# -- output --
'if (foo)\n' +
' if (bar)\n' +
' if (baz) whee();\n' +
'a();')
bt(
'if (foo) a()\n' +
'if (bar) if (baz) whee();\n' +
'a();',
# -- output --
'if (foo) a()\n' +
'if (bar)\n' +
' if (baz) whee();\n' +
'a();')
bt(
'if (foo);\n' +
'if (bar) if (baz) whee();\n' +
'a();',
# -- output --
'if (foo);\n' +
'if (bar)\n' +
' if (baz) whee();\n' +
'a();')
bt(
'if (options)\n' +
' for (var p in options)\n' +
' this[p] = options[p];',
# -- output --
'if (options)\n' +
' for (var p in options) this[p] = options[p];')
bt(
'if (options) for (var p in options) this[p] = options[p];',
# -- output --
'if (options)\n' +
' for (var p in options) this[p] = options[p];')
bt(
'if (options) do q(); while (b());',
# -- output --
'if (options)\n' +
' do q(); while (b());')
bt(
'if (options) while (b()) q();',
# -- output --
'if (options)\n' +
' while (b()) q();')
bt(
'if (options) do while (b()) q(); while (a());',
# -- output --
'if (options)\n' +
' do\n' +
' while (b()) q(); while (a());')
bt(
'function f(a, b, c,\n' +
'd, e) {}',
# -- output --
'function f(a, b, c, d, e) {}')
bt(
'function f(a,b) {if(a) b()}function g(a,b) {if(!a) b()}',
# -- output --
'function f(a, b) {\n' +
' if (a) b()\n' +
'}\n' +
'\n' +
'function g(a, b) {\n' +
' if (!a) b()\n' +
'}')
bt(
'function f(a,b) {if(a) b()}\n' +
'\n' +
'\n' +
'\n' +
'function g(a,b) {if(!a) b()}',
# -- output --
'function f(a, b) {\n' +
' if (a) b()\n' +
'}\n' +
'\n' +
'function g(a, b) {\n' +
' if (!a) b()\n' +
'}')
bt(
'(if(a) b())(if(a) b())',
# -- output --
'(\n' +
' if (a) b())(\n' +
' if (a) b())')
bt(
'(if(a) b())\n' +
'\n' +
'\n' +
'(if(a) b())',
# -- output --
'(\n' +
' if (a) b())\n' +
'(\n' +
' if (a) b())')
bt(
'if\n' +
'(a)\n' +
'b();',
# -- output --
'if (a) b();')
bt(
'var a =\n' +
'foo',
# -- output --
'var a = foo')
bt(
'var a = {\n' +
'"a":1,\n' +
'"b":2}',
# -- output --
'var a = {\n' +
' "a": 1,\n' +
' "b": 2\n' +
'}')
bt(
'var a = {\n' +
'\'a\':1,\n' +
'\'b\':2}',
# -- output --
'var a = {\n' +
' \'a\': 1,\n' +
' \'b\': 2\n' +
'}')
bt('var a = /*i*/ "b";')
bt(
'var a = /*i*/\n' +
'"b";',
# -- output --
'var a = /*i*/ "b";')
bt(
'{\n' +
'\n' +
'\n' +
'"x"\n' +
'}',
# -- output --
'{\n' +
' "x"\n' +
'}')
bt(
'if(a &&\n' +
'b\n' +
'||\n' +
'c\n' +
'||d\n' +
'&&\n' +
'e) e = f',
# -- output --
'if (a && b || c || d && e) e = f')
bt(
'if(a &&\n' +
'(b\n' +
'||\n' +
'c\n' +
'||d)\n' +
'&&\n' +
'e) e = f',
# -- output --
'if (a && (b || c || d) && e) e = f')
test_fragment(
'\n' +
'\n' +
'"x"',
# -- output --
'"x"')
test_fragment(
'{\n' +
'\n' +
'"x"\n' +
'h=5;\n' +
'}',
# -- output --
'{\n' +
' "x"\n' +
' h = 5;\n' +
'}')
bt(
'var a = "foo" +\n' +
' "bar";',
# -- output --
'var a = "foo" + "bar";')
bt(
'var a = 42; // foo\n' +
'\n' +
'var b;',
# -- output --
'var a = 42; // foo\n' +
'var b;')
bt(
'var a = 42; // foo\n' +
'\n' +
'\n' +
'var b;',
# -- output --
'var a = 42; // foo\n' +
'var b;')
bt(
'a = 1;\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'b = 2;',
# -- output --
'a = 1;\n' +
'b = 2;')
# general preserve_newlines tests - (preserve_newlines = "true")
self.reset_options()
self.options.preserve_newlines = true
bt(
'if (foo) // comment\n' +
' bar();')
bt(
'if (foo) // comment\n' +
' bar();')
bt(
'if (foo) // comment\n' +
' (bar());')
bt(
'if (foo) // comment\n' +
' (bar());')
bt(
'if (foo) // comment\n' +
' /asdf/;')
bt(
'this.oa = new OAuth(\n' +
' _requestToken,\n' +
' _accessToken,\n' +
' consumer_key\n' +
');')
bt(
'foo = {\n' +
' x: y, // #44\n' +
' w: z // #44\n' +
'}')
bt(
'switch (x) {\n' +
' case "a":\n' +
' // comment on newline\n' +
' break;\n' +
' case "b": // comment on same line\n' +
' break;\n' +
'}')
bt(
'this.type =\n' +
' this.options =\n' +
' // comment\n' +
' this.enabled null;')
bt(
'someObj\n' +
' .someFunc1()\n' +
' // This comment should not break the indent\n' +
' .someFunc2();')
bt(
'if (true ||\n' +
'!true) return;',
# -- output --
'if (true ||\n' +
' !true) return;')
bt(
'if\n' +
'(foo)\n' +
'if\n' +
'(bar)\n' +
'if\n' +
'(baz)\n' +
'whee();\n' +
'a();',
# -- output --
'if (foo)\n' +
' if (bar)\n' +
' if (baz)\n' +
' whee();\n' +
'a();')
bt(
'if\n' +
'(foo)\n' +
'if\n' +
'(bar)\n' +
'if\n' +
'(baz)\n' +
'whee();\n' +
'else\n' +
'a();',
# -- output --
'if (foo)\n' +
' if (bar)\n' +
' if (baz)\n' +
' whee();\n' +
' else\n' +
' a();')
bt(
'if (foo)\n' +
'bar();\n' +
'else\n' +
'car();',
# -- output --
'if (foo)\n' +
' bar();\n' +
'else\n' +
' car();')
bt(
'if (foo) if (bar) if (baz);\n' +
'a();',
# -- output --
'if (foo)\n' +
' if (bar)\n' +
' if (baz);\n' +
'a();')
bt(
'if (foo) if (bar) if (baz) whee();\n' +
'a();',
# -- output --
'if (foo)\n' +
' if (bar)\n' +
' if (baz) whee();\n' +
'a();')
bt(
'if (foo) a()\n' +
'if (bar) if (baz) whee();\n' +
'a();',
# -- output --
'if (foo) a()\n' +
'if (bar)\n' +
' if (baz) whee();\n' +
'a();')
bt(
'if (foo);\n' +
'if (bar) if (baz) whee();\n' +
'a();',
# -- output --
'if (foo);\n' +
'if (bar)\n' +
' if (baz) whee();\n' +
'a();')
bt(
'if (options)\n' +
' for (var p in options)\n' +
' this[p] = options[p];')
bt(
'if (options) for (var p in options) this[p] = options[p];',
# -- output --
'if (options)\n' +
' for (var p in options) this[p] = options[p];')
bt(
'if (options) do q(); while (b());',
# -- output --
'if (options)\n' +
' do q(); while (b());')
bt(
'if (options) while (b()) q();',
# -- output --
'if (options)\n' +
' while (b()) q();')
bt(
'if (options) do while (b()) q(); while (a());',
# -- output --
'if (options)\n' +
' do\n' +
' while (b()) q(); while (a());')
bt(
'function f(a, b, c,\n' +
'd, e) {}',
# -- output --
'function f(a, b, c,\n' +
' d, e) {}')
bt(
'function f(a,b) {if(a) b()}function g(a,b) {if(!a) b()}',
# -- output --
'function f(a, b) {\n' +
' if (a) b()\n' +
'}\n' +
'\n' +
'function g(a, b) {\n' +
' if (!a) b()\n' +
'}')
bt(
'function f(a,b) {if(a) b()}\n' +
'\n' +
'\n' +
'\n' +
'function g(a,b) {if(!a) b()}',
# -- output --
'function f(a, b) {\n' +
' if (a) b()\n' +
'}\n' +
'\n' +
'\n' +
'\n' +
'function g(a, b) {\n' +
' if (!a) b()\n' +
'}')
bt(
'(if(a) b())(if(a) b())',
# -- output --
'(\n' +
' if (a) b())(\n' +
' if (a) b())')
bt(
'(if(a) b())\n' +
'\n' +
'\n' +
'(if(a) b())',
# -- output --
'(\n' +
' if (a) b())\n' +
'\n' +
'\n' +
'(\n' +
' if (a) b())')
bt(
'if\n' +
'(a)\n' +
'b();',
# -- output --
'if (a)\n' +
' b();')
bt(
'var a =\n' +
'foo',
# -- output --
'var a =\n' +
' foo')
bt(
'var a = {\n' +
'"a":1,\n' +
'"b":2}',
# -- output --
'var a = {\n' +
' "a": 1,\n' +
' "b": 2\n' +
'}')
bt(
'var a = {\n' +
'\'a\':1,\n' +
'\'b\':2}',
# -- output --
'var a = {\n' +
' \'a\': 1,\n' +
' \'b\': 2\n' +
'}')
bt('var a = /*i*/ "b";')
bt(
'var a = /*i*/\n' +
'"b";',
# -- output --
'var a = /*i*/\n' +
' "b";')
bt(
'{\n' +
'\n' +
'\n' +
'"x"\n' +
'}',
# -- output --
'{\n' +
'\n' +
'\n' +
' "x"\n' +
'}')
bt(
'if(a &&\n' +
'b\n' +
'||\n' +
'c\n' +
'||d\n' +
'&&\n' +
'e) e = f',
# -- output --
'if (a &&\n' +
' b ||\n' +
' c ||\n' +
' d &&\n' +
' e) e = f')
bt(
'if(a &&\n' +
'(b\n' +
'||\n' +
'c\n' +
'||d)\n' +
'&&\n' +
'e) e = f',
# -- output --
'if (a &&\n' +
' (b ||\n' +
' c ||\n' +
' d) &&\n' +
' e) e = f')
test_fragment(
'\n' +
'\n' +
'"x"',
# -- output --
'"x"')
test_fragment(
'{\n' +
'\n' +
'"x"\n' +
'h=5;\n' +
'}',
# -- output --
'{\n' +
'\n' +
' "x"\n' +
' h = 5;\n' +
'}')
bt(
'var a = "foo" +\n' +
' "bar";')
bt(
'var a = 42; // foo\n' +
'\n' +
'var b;')
bt(
'var a = 42; // foo\n' +
'\n' +
'\n' +
'var b;')
bt(
'a = 1;\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'b = 2;')
#============================================================
# break chained methods - (break_chained_methods = "false", preserve_newlines = "false")
self.reset_options()
self.options.break_chained_methods = false
self.options.preserve_newlines = false
bt(
'foo\n' +
'.bar()\n' +
'.baz().cucumber(fat)',
# -- output --
'foo.bar().baz().cucumber(fat)')
bt(
'foo\n' +
'.bar()\n' +
'.baz().cucumber(fat); foo.bar().baz().cucumber(fat)',
# -- output --
'foo.bar().baz().cucumber(fat);\n' +
'foo.bar().baz().cucumber(fat)')
bt(
'foo\n' +
'.bar()\n' +
'.baz().cucumber(fat)\n' +
' foo.bar().baz().cucumber(fat)',
# -- output --
'foo.bar().baz().cucumber(fat)\n' +
'foo.bar().baz().cucumber(fat)')
bt(
'this\n' +
'.something = foo.bar()\n' +
'.baz().cucumber(fat)',
# -- output --
'this.something = foo.bar().baz().cucumber(fat)')
bt('this.something.xxx = foo.moo.bar()')
bt(
'this\n' +
'.something\n' +
'.xxx = foo.moo\n' +
'.bar()',
# -- output --
'this.something.xxx = foo.moo.bar()')
# break chained methods - (break_chained_methods = "false", preserve_newlines = "true")
self.reset_options()
self.options.break_chained_methods = false
self.options.preserve_newlines = true
bt(
'foo\n' +
'.bar()\n' +
'.baz().cucumber(fat)',
# -- output --
'foo\n' +
' .bar()\n' +
' .baz().cucumber(fat)')
bt(
'foo\n' +
'.bar()\n' +
'.baz().cucumber(fat); foo.bar().baz().cucumber(fat)',
# -- output --
'foo\n' +
' .bar()\n' +
' .baz().cucumber(fat);\n' +
'foo.bar().baz().cucumber(fat)')
bt(
'foo\n' +
'.bar()\n' +
'.baz().cucumber(fat)\n' +
' foo.bar().baz().cucumber(fat)',
# -- output --
'foo\n' +
' .bar()\n' +
' .baz().cucumber(fat)\n' +
'foo.bar().baz().cucumber(fat)')
bt(
'this\n' +
'.something = foo.bar()\n' +
'.baz().cucumber(fat)',
# -- output --
'this\n' +
' .something = foo.bar()\n' +
' .baz().cucumber(fat)')
bt('this.something.xxx = foo.moo.bar()')
bt(
'this\n' +
'.something\n' +
'.xxx = foo.moo\n' +
'.bar()',
# -- output --
'this\n' +
' .something\n' +
' .xxx = foo.moo\n' +
' .bar()')
# break chained methods - (break_chained_methods = "true", preserve_newlines = "false")
self.reset_options()
self.options.break_chained_methods = true
self.options.preserve_newlines = false
bt(
'foo\n' +
'.bar()\n' +
'.baz().cucumber(fat)',
# -- output --
'foo.bar()\n' +
' .baz()\n' +
' .cucumber(fat)')
bt(
'foo\n' +
'.bar()\n' +
'.baz().cucumber(fat); foo.bar().baz().cucumber(fat)',
# -- output --
'foo.bar()\n' +
' .baz()\n' +
' .cucumber(fat);\n' +
'foo.bar()\n' +
' .baz()\n' +
' .cucumber(fat)')
bt(
'foo\n' +
'.bar()\n' +
'.baz().cucumber(fat)\n' +
' foo.bar().baz().cucumber(fat)',
# -- output --
'foo.bar()\n' +
' .baz()\n' +
' .cucumber(fat)\n' +
'foo.bar()\n' +
' .baz()\n' +
' .cucumber(fat)')
bt(
'this\n' +
'.something = foo.bar()\n' +
'.baz().cucumber(fat)',
# -- output --
'this.something = foo.bar()\n' +
' .baz()\n' +
' .cucumber(fat)')
bt('this.something.xxx = foo.moo.bar()')
bt(
'this\n' +
'.something\n' +
'.xxx = foo.moo\n' +
'.bar()',
# -- output --
'this.something.xxx = foo.moo.bar()')
# break chained methods - (break_chained_methods = "true", preserve_newlines = "true")
self.reset_options()
self.options.break_chained_methods = true
self.options.preserve_newlines = true
bt(
'foo\n' +
'.bar()\n' +
'.baz().cucumber(fat)',
# -- output --
'foo\n' +
' .bar()\n' +
' .baz()\n' +
' .cucumber(fat)')
bt(
'foo\n' +
'.bar()\n' +
'.baz().cucumber(fat); foo.bar().baz().cucumber(fat)',
# -- output --
'foo\n' +
' .bar()\n' +
' .baz()\n' +
' .cucumber(fat);\n' +
'foo.bar()\n' +
' .baz()\n' +
' .cucumber(fat)')
bt(
'foo\n' +
'.bar()\n' +
'.baz().cucumber(fat)\n' +
' foo.bar().baz().cucumber(fat)',
# -- output --
'foo\n' +
' .bar()\n' +
' .baz()\n' +
' .cucumber(fat)\n' +
'foo.bar()\n' +
' .baz()\n' +
' .cucumber(fat)')
bt(
'this\n' +
'.something = foo.bar()\n' +
'.baz().cucumber(fat)',
# -- output --
'this\n' +
' .something = foo.bar()\n' +
' .baz()\n' +
' .cucumber(fat)')
bt('this.something.xxx = foo.moo.bar()')
bt(
'this\n' +
'.something\n' +
'.xxx = foo.moo\n' +
'.bar()',
# -- output --
'this\n' +
' .something\n' +
' .xxx = foo.moo\n' +
' .bar()')
#============================================================
# line wrapping 0
self.reset_options()
self.options.preserve_newlines = false
self.options.wrap_line_length = 0
test_fragment(
'' + wrap_input_1 + '',
# -- output --
'foo.bar().baz().cucumber((f && "sass") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap.but_this_can\n' +
'return between_return_and_expression_should_never_wrap.but_this_can\n' +
'throw between_throw_and_expression_should_never_wrap.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_.okay();\n' +
'object_literal = {\n' +
' propertx: first_token + 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap + but_this_can,\n' +
' propertz: first_token_should_never_wrap + !but_this_can,\n' +
' proper: "first_token_should_never_wrap" + "but_this_can"\n' +
'}')
test_fragment(
'' + wrap_input_2 + '',
# -- output --
'{\n' +
' foo.bar().baz().cucumber((f && "sass") || (leans && mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap.but_this_can\n' +
' return between_return_and_expression_should_never_wrap.but_this_can\n' +
' throw between_throw_and_expression_should_never_wrap.but_this_can\n' +
' if (wraps_can_occur && inside_an_if_block) that_is_.okay();\n' +
' object_literal = {\n' +
' propertx: first_token + 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap + but_this_can,\n' +
' propertz: first_token_should_never_wrap + !but_this_can,\n' +
' proper: "first_token_should_never_wrap" + "but_this_can"\n' +
' }\n' +
'}')
#============================================================
# line wrapping 70
self.reset_options()
self.options.preserve_newlines = false
self.options.wrap_line_length = 70
test_fragment(
'' + wrap_input_1 + '',
# -- output --
'foo.bar().baz().cucumber((f && "sass") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap.but_this_can\n' +
'return between_return_and_expression_should_never_wrap.but_this_can\n' +
'throw between_throw_and_expression_should_never_wrap.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_.okay();\n' +
'object_literal = {\n' +
' propertx: first_token + 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap + but_this_can,\n' +
' propertz: first_token_should_never_wrap + !but_this_can,\n' +
' proper: "first_token_should_never_wrap" + "but_this_can"\n' +
'}')
test_fragment(
'' + wrap_input_2 + '',
# -- output --
'{\n' +
' foo.bar().baz().cucumber((f && "sass") || (leans && mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap.but_this_can\n' +
' return between_return_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
' throw between_throw_and_expression_should_never_wrap.but_this_can\n' +
' if (wraps_can_occur && inside_an_if_block) that_is_.okay();\n' +
' object_literal = {\n' +
' propertx: first_token + 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap + but_this_can,\n' +
' propertz: first_token_should_never_wrap + !but_this_can,\n' +
' proper: "first_token_should_never_wrap" + "but_this_can"\n' +
' }\n' +
'}')
#============================================================
# line wrapping 40
self.reset_options()
self.options.preserve_newlines = false
self.options.wrap_line_length = 40
test_fragment(
'' + wrap_input_1 + '',
# -- output --
'foo.bar().baz().cucumber((f &&\n' +
' "sass") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'return between_return_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
'throw between_throw_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_.okay();\n' +
'object_literal = {\n' +
' propertx: first_token +\n' +
' 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap +\n' +
' but_this_can,\n' +
' propertz: first_token_should_never_wrap +\n' +
' !but_this_can,\n' +
' proper: "first_token_should_never_wrap" +\n' +
' "but_this_can"\n' +
'}')
test_fragment(
'' + wrap_input_2 + '',
# -- output --
'{\n' +
' foo.bar().baz().cucumber((f &&\n' +
' "sass") || (leans &&\n' +
' mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
' return between_return_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
' throw between_throw_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
' if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_\n' +
' .okay();\n' +
' object_literal = {\n' +
' propertx: first_token +\n' +
' 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap +\n' +
' but_this_can,\n' +
' propertz: first_token_should_never_wrap +\n' +
' !but_this_can,\n' +
' proper: "first_token_should_never_wrap" +\n' +
' "but_this_can"\n' +
' }\n' +
'}')
#============================================================
# line wrapping 41
self.reset_options()
self.options.preserve_newlines = false
self.options.wrap_line_length = 41
test_fragment(
'' + wrap_input_1 + '',
# -- output --
'foo.bar().baz().cucumber((f && "sass") ||\n' +
' (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'return between_return_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
'throw between_throw_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_.okay();\n' +
'object_literal = {\n' +
' propertx: first_token +\n' +
' 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap +\n' +
' but_this_can,\n' +
' propertz: first_token_should_never_wrap +\n' +
' !but_this_can,\n' +
' proper: "first_token_should_never_wrap" +\n' +
' "but_this_can"\n' +
'}')
test_fragment(
'' + wrap_input_2 + '',
# -- output --
'{\n' +
' foo.bar().baz().cucumber((f &&\n' +
' "sass") || (leans &&\n' +
' mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
' return between_return_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
' throw between_throw_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
' if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_\n' +
' .okay();\n' +
' object_literal = {\n' +
' propertx: first_token +\n' +
' 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap +\n' +
' but_this_can,\n' +
' propertz: first_token_should_never_wrap +\n' +
' !but_this_can,\n' +
' proper: "first_token_should_never_wrap" +\n' +
' "but_this_can"\n' +
' }\n' +
'}')
#============================================================
# line wrapping 45
self.reset_options()
self.options.preserve_newlines = false
self.options.wrap_line_length = 45
test_fragment(
'' + wrap_input_1 + '',
# -- output --
'foo.bar().baz().cucumber((f && "sass") || (\n' +
' leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'return between_return_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
'throw between_throw_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block)\n' +
' that_is_.okay();\n' +
'object_literal = {\n' +
' propertx: first_token +\n' +
' 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap +\n' +
' but_this_can,\n' +
' propertz: first_token_should_never_wrap +\n' +
' !but_this_can,\n' +
' proper: "first_token_should_never_wrap" +\n' +
' "but_this_can"\n' +
'}')
test_fragment(
'' + wrap_input_2 + '',
# -- output --
'{\n' +
' foo.bar().baz().cucumber((f && "sass") ||\n' +
' (leans && mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
' return between_return_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
' throw between_throw_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
' if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_.okay();\n' +
' object_literal = {\n' +
' propertx: first_token +\n' +
' 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap +\n' +
' but_this_can,\n' +
' propertz: first_token_should_never_wrap +\n' +
' !but_this_can,\n' +
' proper: "first_token_should_never_wrap" +\n' +
' "but_this_can"\n' +
' }\n' +
'}')
#============================================================
# line wrapping 0
self.reset_options()
self.options.preserve_newlines = true
self.options.wrap_line_length = 0
test_fragment(
'' + wrap_input_1 + '',
# -- output --
'foo.bar().baz().cucumber((f && "sass") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'return between_return_and_expression_should_never_wrap.but_this_can\n' +
'throw between_throw_and_expression_should_never_wrap.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n' +
' .okay();\n' +
'object_literal = {\n' +
' propertx: first_token + 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap + but_this_can,\n' +
' propertz: first_token_should_never_wrap + !but_this_can,\n' +
' proper: "first_token_should_never_wrap" + "but_this_can"\n' +
'}')
test_fragment(
'' + wrap_input_2 + '',
# -- output --
'{\n' +
' foo.bar().baz().cucumber((f && "sass") || (leans && mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
' return between_return_and_expression_should_never_wrap.but_this_can\n' +
' throw between_throw_and_expression_should_never_wrap.but_this_can\n' +
' if (wraps_can_occur && inside_an_if_block) that_is_\n' +
' .okay();\n' +
' object_literal = {\n' +
' propertx: first_token + 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap + but_this_can,\n' +
' propertz: first_token_should_never_wrap + !but_this_can,\n' +
' proper: "first_token_should_never_wrap" + "but_this_can"\n' +
' }\n' +
'}')
#============================================================
# line wrapping 70
self.reset_options()
self.options.preserve_newlines = true
self.options.wrap_line_length = 70
test_fragment(
'' + wrap_input_1 + '',
# -- output --
'foo.bar().baz().cucumber((f && "sass") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'return between_return_and_expression_should_never_wrap.but_this_can\n' +
'throw between_throw_and_expression_should_never_wrap.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n' +
' .okay();\n' +
'object_literal = {\n' +
' propertx: first_token + 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap + but_this_can,\n' +
' propertz: first_token_should_never_wrap + !but_this_can,\n' +
' proper: "first_token_should_never_wrap" + "but_this_can"\n' +
'}')
test_fragment(
'' + wrap_input_2 + '',
# -- output --
'{\n' +
' foo.bar().baz().cucumber((f && "sass") || (leans && mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
' return between_return_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
' throw between_throw_and_expression_should_never_wrap.but_this_can\n' +
' if (wraps_can_occur && inside_an_if_block) that_is_\n' +
' .okay();\n' +
' object_literal = {\n' +
' propertx: first_token + 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap + but_this_can,\n' +
' propertz: first_token_should_never_wrap + !but_this_can,\n' +
' proper: "first_token_should_never_wrap" + "but_this_can"\n' +
' }\n' +
'}')
#============================================================
# line wrapping 40
self.reset_options()
self.options.preserve_newlines = true
self.options.wrap_line_length = 40
test_fragment(
'' + wrap_input_1 + '',
# -- output --
'foo.bar().baz().cucumber((f &&\n' +
' "sass") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'return between_return_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
'throw between_throw_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_\n' +
' .okay();\n' +
'object_literal = {\n' +
' propertx: first_token +\n' +
' 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap +\n' +
' but_this_can,\n' +
' propertz: first_token_should_never_wrap +\n' +
' !but_this_can,\n' +
' proper: "first_token_should_never_wrap" +\n' +
' "but_this_can"\n' +
'}')
test_fragment(
'' + wrap_input_2 + '',
# -- output --
'{\n' +
' foo.bar().baz().cucumber((f &&\n' +
' "sass") || (leans &&\n' +
' mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
' return between_return_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
' throw between_throw_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
' if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_\n' +
' .okay();\n' +
' object_literal = {\n' +
' propertx: first_token +\n' +
' 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap +\n' +
' but_this_can,\n' +
' propertz: first_token_should_never_wrap +\n' +
' !but_this_can,\n' +
' proper: "first_token_should_never_wrap" +\n' +
' "but_this_can"\n' +
' }\n' +
'}')
#============================================================
# line wrapping 41
self.reset_options()
self.options.preserve_newlines = true
self.options.wrap_line_length = 41
test_fragment(
'' + wrap_input_1 + '',
# -- output --
'foo.bar().baz().cucumber((f && "sass") ||\n' +
' (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'return between_return_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
'throw between_throw_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_\n' +
' .okay();\n' +
'object_literal = {\n' +
' propertx: first_token +\n' +
' 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap +\n' +
' but_this_can,\n' +
' propertz: first_token_should_never_wrap +\n' +
' !but_this_can,\n' +
' proper: "first_token_should_never_wrap" +\n' +
' "but_this_can"\n' +
'}')
test_fragment(
'' + wrap_input_2 + '',
# -- output --
'{\n' +
' foo.bar().baz().cucumber((f &&\n' +
' "sass") || (leans &&\n' +
' mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
' return between_return_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
' throw between_throw_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
' if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_\n' +
' .okay();\n' +
' object_literal = {\n' +
' propertx: first_token +\n' +
' 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap +\n' +
' but_this_can,\n' +
' propertz: first_token_should_never_wrap +\n' +
' !but_this_can,\n' +
' proper: "first_token_should_never_wrap" +\n' +
' "but_this_can"\n' +
' }\n' +
'}')
#============================================================
# line wrapping 45
self.reset_options()
self.options.preserve_newlines = true
self.options.wrap_line_length = 45
test_fragment(
'' + wrap_input_1 + '',
# -- output --
'foo.bar().baz().cucumber((f && "sass") || (\n' +
' leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'return between_return_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
'throw between_throw_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block)\n' +
' that_is_\n' +
' .okay();\n' +
'object_literal = {\n' +
' propertx: first_token +\n' +
' 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap +\n' +
' but_this_can,\n' +
' propertz: first_token_should_never_wrap +\n' +
' !but_this_can,\n' +
' proper: "first_token_should_never_wrap" +\n' +
' "but_this_can"\n' +
'}')
test_fragment(
'' + wrap_input_2 + '',
# -- output --
'{\n' +
' foo.bar().baz().cucumber((f && "sass") ||\n' +
' (leans && mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
' return between_return_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
' throw between_throw_and_expression_should_never_wrap\n' +
' .but_this_can\n' +
' if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_\n' +
' .okay();\n' +
' object_literal = {\n' +
' propertx: first_token +\n' +
' 12345678.99999E-6,\n' +
' property: first_token_should_never_wrap +\n' +
' but_this_can,\n' +
' propertz: first_token_should_never_wrap +\n' +
' !but_this_can,\n' +
' proper: "first_token_should_never_wrap" +\n' +
' "but_this_can"\n' +
' }\n' +
'}')
#============================================================
# general preserve_newlines tests preserve limit
self.reset_options()
self.options.preserve_newlines = true
self.options.max_preserve_newlines = 8
bt(
'a = 1;\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'b = 2;',
# -- output --
'a = 1;\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'\n' +
'b = 2;')
#============================================================
# more random test
self.reset_options()
bt('return function();')
bt('var a = function();')
bt('var a = 5 + function();')
# actionscript import
bt('import foo.*;')
# actionscript
bt('function f(a: a, b: b)')
bt(
'function a(a) {} function b(b) {} function c(c) {}',
# -- output --
'function a(a) {}\n' +
'\n' +
'function b(b) {}\n' +
'\n' +
'function c(c) {}')
bt('foo(a, function() {})')
bt('foo(a, /regex/)')
bt(
'/* foo */\n' +
'"x"')
test_fragment(
'roo = {\n' +
' /*\n' +
' ****\n' +
' FOO\n' +
' ****\n' +
' */\n' +
' BAR: 0\n' +
'};')
test_fragment(
'if (zz) {\n' +
' // ....\n' +
'}\n' +
'(function')
bt(
'a = //comment\n' +
' /regex/;')
bt('var a = new function();')
bt('new function')
bt(
'if (a)\n' +
'{\n' +
'b;\n' +
'}\n' +
'else\n' +
'{\n' +
'c;\n' +
'}',
# -- output --
'if (a) {\n' +
' b;\n' +
'} else {\n' +
' c;\n' +
'}')
#============================================================
# operator_position option - ensure no neswlines if preserve_newlines is false - (preserve_newlines = "false")
self.reset_options()
self.options.preserve_newlines = false
bt(
'var res = a + b - c / d * e % f;\n' +
'var res = g & h | i ^ j;\n' +
'var res = (k && l || m) ? n : o;\n' +
'var res = p >> q << r >>> s;\n' +
'var res = t === u !== v != w == x >= y <= z > aa < ab;\n' +
'ac + -ad')
bt(
'var res = a + b\n' +
'- c /\n' +
'd * e\n' +
'%\n' +
'f;\n' +
' var res = g & h\n' +
'| i ^\n' +
'j;\n' +
'var res = (k &&\n' +
'l\n' +
'|| m) ?\n' +
'n\n' +
': o\n' +
';\n' +
'var res = p\n' +
'>> q <<\n' +
'r\n' +
'>>> s;\n' +
'var res\n' +
' = t\n' +
'\n' +
' === u !== v\n' +
' !=\n' +
'w\n' +
'== x >=\n' +
'y <= z > aa <\n' +
'ab;\n' +
'ac +\n' +
'-ad',
# -- output --
'var res = a + b - c / d * e % f;\n' +
'var res = g & h | i ^ j;\n' +
'var res = (k && l || m) ? n : o;\n' +
'var res = p >> q << r >>> s;\n' +
'var res = t === u !== v != w == x >= y <= z > aa < ab;\n' +
'ac + -ad')
# operator_position option - ensure no neswlines if preserve_newlines is false - (operator_position = ""before-newline"", preserve_newlines = "false")
self.reset_options()
self.options.operator_position = 'before-newline'
self.options.preserve_newlines = false
bt(
'var res = a + b - c / d * e % f;\n' +
'var res = g & h | i ^ j;\n' +
'var res = (k && l || m) ? n : o;\n' +
'var res = p >> q << r >>> s;\n' +
'var res = t === u !== v != w == x >= y <= z > aa < ab;\n' +
'ac + -ad')
bt(
'var res = a + b\n' +
'- c /\n' +
'd * e\n' +
'%\n' +
'f;\n' +
' var res = g & h\n' +
'| i ^\n' +
'j;\n' +
'var res = (k &&\n' +
'l\n' +
'|| m) ?\n' +
'n\n' +
': o\n' +
';\n' +
'var res = p\n' +
'>> q <<\n' +
'r\n' +
'>>> s;\n' +
'var res\n' +
' = t\n' +
'\n' +
' === u !== v\n' +
' !=\n' +
'w\n' +
'== x >=\n' +
'y <= z > aa <\n' +
'ab;\n' +
'ac +\n' +
'-ad',
# -- output --
'var res = a + b - c / d * e % f;\n' +
'var res = g & h | i ^ j;\n' +
'var res = (k && l || m) ? n : o;\n' +
'var res = p >> q << r >>> s;\n' +
'var res = t === u !== v != w == x >= y <= z > aa < ab;\n' +
'ac + -ad')
# operator_position option - ensure no neswlines if preserve_newlines is false - (operator_position = ""after-newline"", preserve_newlines = "false")
self.reset_options()
self.options.operator_position = 'after-newline'
self.options.preserve_newlines = false
bt(
'var res = a + b - c / d * e % f;\n' +
'var res = g & h | i ^ j;\n' +
'var res = (k && l || m) ? n : o;\n' +
'var res = p >> q << r >>> s;\n' +
'var res = t === u !== v != w == x >= y <= z > aa < ab;\n' +
'ac + -ad')
bt(
'var res = a + b\n' +
'- c /\n' +
'd * e\n' +
'%\n' +
'f;\n' +
' var res = g & h\n' +
'| i ^\n' +
'j;\n' +
'var res = (k &&\n' +
'l\n' +
'|| m) ?\n' +
'n\n' +
': o\n' +
';\n' +
'var res = p\n' +
'>> q <<\n' +
'r\n' +
'>>> s;\n' +
'var res\n' +
' = t\n' +
'\n' +
' === u !== v\n' +
' !=\n' +
'w\n' +
'== x >=\n' +
'y <= z > aa <\n' +
'ab;\n' +
'ac +\n' +
'-ad',
# -- output --
'var res = a + b - c / d * e % f;\n' +
'var res = g & h | i ^ j;\n' +
'var res = (k && l || m) ? n : o;\n' +
'var res = p >> q << r >>> s;\n' +
'var res = t === u !== v != w == x >= y <= z > aa < ab;\n' +
'ac + -ad')
# operator_position option - ensure no neswlines if preserve_newlines is false - (operator_position = ""preserve-newline"", preserve_newlines = "false")
self.reset_options()
self.options.operator_position = 'preserve-newline'
self.options.preserve_newlines = false
bt(
'var res = a + b - c / d * e % f;\n' +
'var res = g & h | i ^ j;\n' +
'var res = (k && l || m) ? n : o;\n' +
'var res = p >> q << r >>> s;\n' +
'var res = t === u !== v != w == x >= y <= z > aa < ab;\n' +
'ac + -ad')
bt(
'var res = a + b\n' +
'- c /\n' +
'd * e\n' +
'%\n' +
'f;\n' +
' var res = g & h\n' +
'| i ^\n' +
'j;\n' +
'var res = (k &&\n' +
'l\n' +
'|| m) ?\n' +
'n\n' +
': o\n' +
';\n' +
'var res = p\n' +
'>> q <<\n' +
'r\n' +
'>>> s;\n' +
'var res\n' +
' = t\n' +
'\n' +
' === u !== v\n' +
' !=\n' +
'w\n' +
'== x >=\n' +
'y <= z > aa <\n' +
'ab;\n' +
'ac +\n' +
'-ad',
# -- output --
'var res = a + b - c / d * e % f;\n' +
'var res = g & h | i ^ j;\n' +
'var res = (k && l || m) ? n : o;\n' +
'var res = p >> q << r >>> s;\n' +
'var res = t === u !== v != w == x >= y <= z > aa < ab;\n' +
'ac + -ad')
#============================================================
# operator_position option - set to "before-newline" (default value) - ()
self.reset_options()
# comprehensive, various newlines
bt(
'var res = a + b\n' +
'- c /\n' +
'd * e\n' +
'%\n' +
'f;\n' +
' var res = g & h\n' +
'| i ^\n' +
'j;\n' +
'var res = (k &&\n' +
'l\n' +
'|| m) ?\n' +
'n\n' +
': o\n' +
';\n' +
'var res = p\n' +
'>> q <<\n' +
'r\n' +
'>>> s;\n' +
'var res\n' +
' = t\n' +
'\n' +
' === u !== v\n' +
' !=\n' +
'w\n' +
'== x >=\n' +
'y <= z > aa <\n' +
'ab;\n' +
'ac +\n' +
'-ad',
# -- output --
'var res = a + b -\n' +
' c /\n' +
' d * e %\n' +
' f;\n' +
'var res = g & h |\n' +
' i ^\n' +
' j;\n' +
'var res = (k &&\n' +
' l ||\n' +
' m) ?\n' +
' n :\n' +
' o;\n' +
'var res = p >>\n' +
' q <<\n' +
' r >>>\n' +
' s;\n' +
'var res = t\n' +
'\n' +
' ===\n' +
' u !== v !=\n' +
' w ==\n' +
' x >=\n' +
' y <= z > aa <\n' +
' ab;\n' +
'ac +\n' +
' -ad')
# colon special case
bt(
'var a = {\n' +
' b\n' +
': bval,\n' +
' c:\n' +
'cval\n' +
' ,d: dval\n' +
'};\n' +
'var e = f ? g\n' +
': h;\n' +
'var i = j ? k :\n' +
'l;',
# -- output --
'var a = {\n' +
' b: bval,\n' +
' c: cval,\n' +
' d: dval\n' +
'};\n' +
'var e = f ? g :\n' +
' h;\n' +
'var i = j ? k :\n' +
' l;')
# catch-all, includes brackets and other various code
bt(
'var d = 1;\n' +
'if (a === b\n' +
' && c) {\n' +
' d = (c * everything\n' +
' / something_else) %\n' +
' b;\n' +
' e\n' +
' += d;\n' +
'\n' +
'} else if (!(complex && simple) ||\n' +
' (emotion && emotion.name === "happy")) {\n' +
' cryTearsOfJoy(many ||\n' +
' anOcean\n' +
' || aRiver);\n' +
'}',
# -- output --
'var d = 1;\n' +
'if (a === b &&\n' +
' c) {\n' +
' d = (c * everything /\n' +
' something_else) %\n' +
' b;\n' +
' e\n' +
' += d;\n' +
'\n' +
'} else if (!(complex && simple) ||\n' +
' (emotion && emotion.name === "happy")) {\n' +
' cryTearsOfJoy(many ||\n' +
' anOcean ||\n' +
' aRiver);\n' +
'}')
# operator_position option - set to "before-newline" (default value) - (operator_position = ""before-newline"")
self.reset_options()
self.options.operator_position = 'before-newline'
# comprehensive, various newlines
bt(
'var res = a + b\n' +
'- c /\n' +
'd * e\n' +
'%\n' +
'f;\n' +
' var res = g & h\n' +
'| i ^\n' +
'j;\n' +
'var res = (k &&\n' +
'l\n' +
'|| m) ?\n' +
'n\n' +
': o\n' +
';\n' +
'var res = p\n' +
'>> q <<\n' +
'r\n' +
'>>> s;\n' +
'var res\n' +
' = t\n' +
'\n' +
' === u !== v\n' +
' !=\n' +
'w\n' +
'== x >=\n' +
'y <= z > aa <\n' +
'ab;\n' +
'ac +\n' +
'-ad',
# -- output --
'var res = a + b -\n' +
' c /\n' +
' d * e %\n' +
' f;\n' +
'var res = g & h |\n' +
' i ^\n' +
' j;\n' +
'var res = (k &&\n' +
' l ||\n' +
' m) ?\n' +
' n :\n' +
' o;\n' +
'var res = p >>\n' +
' q <<\n' +
' r >>>\n' +
' s;\n' +
'var res = t\n' +
'\n' +
' ===\n' +
' u !== v !=\n' +
' w ==\n' +
' x >=\n' +
' y <= z > aa <\n' +
' ab;\n' +
'ac +\n' +
' -ad')
# colon special case
bt(
'var a = {\n' +
' b\n' +
': bval,\n' +
' c:\n' +
'cval\n' +
' ,d: dval\n' +
'};\n' +
'var e = f ? g\n' +
': h;\n' +
'var i = j ? k :\n' +
'l;',
# -- output --
'var a = {\n' +
' b: bval,\n' +
' c: cval,\n' +
' d: dval\n' +
'};\n' +
'var e = f ? g :\n' +
' h;\n' +
'var i = j ? k :\n' +
' l;')
# catch-all, includes brackets and other various code
bt(
'var d = 1;\n' +
'if (a === b\n' +
' && c) {\n' +
' d = (c * everything\n' +
' / something_else) %\n' +
' b;\n' +
' e\n' +
' += d;\n' +
'\n' +
'} else if (!(complex && simple) ||\n' +
' (emotion && emotion.name === "happy")) {\n' +
' cryTearsOfJoy(many ||\n' +
' anOcean\n' +
' || aRiver);\n' +
'}',
# -- output --
'var d = 1;\n' +
'if (a === b &&\n' +
' c) {\n' +
' d = (c * everything /\n' +
' something_else) %\n' +
' b;\n' +
' e\n' +
' += d;\n' +
'\n' +
'} else if (!(complex && simple) ||\n' +
' (emotion && emotion.name === "happy")) {\n' +
' cryTearsOfJoy(many ||\n' +
' anOcean ||\n' +
' aRiver);\n' +
'}')
#============================================================
# operator_position option - set to "after_newline"
self.reset_options()
self.options.operator_position = 'after-newline'
# comprehensive, various newlines
bt(
'var res = a + b\n' +
'- c /\n' +
'd * e\n' +
'%\n' +
'f;\n' +
' var res = g & h\n' +
'| i ^\n' +
'j;\n' +
'var res = (k &&\n' +
'l\n' +
'|| m) ?\n' +
'n\n' +
': o\n' +
';\n' +
'var res = p\n' +
'>> q <<\n' +
'r\n' +
'>>> s;\n' +
'var res\n' +
' = t\n' +
'\n' +
' === u !== v\n' +
' !=\n' +
'w\n' +
'== x >=\n' +
'y <= z > aa <\n' +
'ab;\n' +
'ac +\n' +
'-ad',
# -- output --
'var res = a + b\n' +
' - c\n' +
' / d * e\n' +
' % f;\n' +
'var res = g & h\n' +
' | i\n' +
' ^ j;\n' +
'var res = (k\n' +
' && l\n' +
' || m)\n' +
' ? n\n' +
' : o;\n' +
'var res = p\n' +
' >> q\n' +
' << r\n' +
' >>> s;\n' +
'var res = t\n' +
'\n' +
' === u !== v\n' +
' != w\n' +
' == x\n' +
' >= y <= z > aa\n' +
' < ab;\n' +
'ac\n' +
' + -ad')
# colon special case
bt(
'var a = {\n' +
' b\n' +
': bval,\n' +
' c:\n' +
'cval\n' +
' ,d: dval\n' +
'};\n' +
'var e = f ? g\n' +
': h;\n' +
'var i = j ? k :\n' +
'l;',
# -- output --
'var a = {\n' +
' b: bval,\n' +
' c: cval,\n' +
' d: dval\n' +
'};\n' +
'var e = f ? g\n' +
' : h;\n' +
'var i = j ? k\n' +
' : l;')
# catch-all, includes brackets and other various code
bt(
'var d = 1;\n' +
'if (a === b\n' +
' && c) {\n' +
' d = (c * everything\n' +
' / something_else) %\n' +
' b;\n' +
' e\n' +
' += d;\n' +
'\n' +
'} else if (!(complex && simple) ||\n' +
' (emotion && emotion.name === "happy")) {\n' +
' cryTearsOfJoy(many ||\n' +
' anOcean\n' +
' || aRiver);\n' +
'}',
# -- output --
'var d = 1;\n' +
'if (a === b\n' +
' && c) {\n' +
' d = (c * everything\n' +
' / something_else)\n' +
' % b;\n' +
' e\n' +
' += d;\n' +
'\n' +
'} else if (!(complex && simple)\n' +
' || (emotion && emotion.name === "happy")) {\n' +
' cryTearsOfJoy(many\n' +
' || anOcean\n' +
' || aRiver);\n' +
'}')
#============================================================
# operator_position option - set to "preserve-newline"
self.reset_options()
self.options.operator_position = 'preserve-newline'
# comprehensive, various newlines
bt(
'var res = a + b\n' +
'- c /\n' +
'd * e\n' +
'%\n' +
'f;\n' +
' var res = g & h\n' +
'| i ^\n' +
'j;\n' +
'var res = (k &&\n' +
'l\n' +
'|| m) ?\n' +
'n\n' +
': o\n' +
';\n' +
'var res = p\n' +
'>> q <<\n' +
'r\n' +
'>>> s;\n' +
'var res\n' +
' = t\n' +
'\n' +
' === u !== v\n' +
' !=\n' +
'w\n' +
'== x >=\n' +
'y <= z > aa <\n' +
'ab;\n' +
'ac +\n' +
'-ad',
# -- output --
'var res = a + b\n' +
' - c /\n' +
' d * e\n' +
' %\n' +
' f;\n' +
'var res = g & h\n' +
' | i ^\n' +
' j;\n' +
'var res = (k &&\n' +
' l\n' +
' || m) ?\n' +
' n\n' +
' : o;\n' +
'var res = p\n' +
' >> q <<\n' +
' r\n' +
' >>> s;\n' +
'var res = t\n' +
'\n' +
' === u !== v\n' +
' !=\n' +
' w\n' +
' == x >=\n' +
' y <= z > aa <\n' +
' ab;\n' +
'ac +\n' +
' -ad')
# colon special case
bt(
'var a = {\n' +
' b\n' +
': bval,\n' +
' c:\n' +
'cval\n' +
' ,d: dval\n' +
'};\n' +
'var e = f ? g\n' +
': h;\n' +
'var i = j ? k :\n' +
'l;',
# -- output --
'var a = {\n' +
' b: bval,\n' +
' c: cval,\n' +
' d: dval\n' +
'};\n' +
'var e = f ? g\n' +
' : h;\n' +
'var i = j ? k :\n' +
' l;')
# catch-all, includes brackets and other various code
bt(
'var d = 1;\n' +
'if (a === b\n' +
' && c) {\n' +
' d = (c * everything\n' +
' / something_else) %\n' +
' b;\n' +
' e\n' +
' += d;\n' +
'\n' +
'} else if (!(complex && simple) ||\n' +
' (emotion && emotion.name === "happy")) {\n' +
' cryTearsOfJoy(many ||\n' +
' anOcean\n' +
' || aRiver);\n' +
'}')
#============================================================
# Yield tests
self.reset_options()
bt('yield /foo\\//;')
bt('result = yield pgClient.query_(queryString);')
bt('yield [1, 2]')
bt('yield function() {};')
bt('yield* bar();')
# yield should have no space between yield and star
bt('yield * bar();', 'yield* bar();')
# yield should have space between star and generator
bt('yield *bar();', 'yield* bar();')
#============================================================
# Async / await tests
self.reset_options()
bt('async function foo() {}')
bt('let w = async function foo() {}')
bt(
'async function foo() {}\n' +
'var x = await foo();')
# async function as an input to another function
bt('wrapper(async function foo() {})')
# await on inline anonymous function. should have a space after await
bt(
'async function() {\n' +
' var w = await(async function() {\n' +
' return await foo();\n' +
' })();\n' +
'}',
# -- output --
'async function() {\n' +
' var w = await (async function() {\n' +
' return await foo();\n' +
' })();\n' +
'}')
# Regression test #1228
bt('const module = await import("...")')
# ensure that this doesn't break anyone with the async library
bt('async.map(function(t) {})')
# async on arrow function. should have a space after async
bt(
'async() => {}',
# -- output --
'async () => {}')
# async on arrow function. should have a space after async
bt(
'async() => {\n' +
' return 5;\n' +
'}',
# -- output --
'async () => {\n' +
' return 5;\n' +
'}')
# async on arrow function returning expression. should have a space after async
bt(
'async() => 5;',
# -- output --
'async () => 5;')
# async on arrow function returning object literal. should have a space after async
bt(
'async(x) => ({\n' +
' foo: "5"\n' +
'})',
# -- output --
'async (x) => ({\n' +
' foo: "5"\n' +
'})')
bt(
'async (x) => {\n' +
' return x * 2;\n' +
'}')
bt('async () => 5;')
bt('async x => x * 2;')
bt(
'async function() {\n' +
' const obj = {\n' +
' a: 1,\n' +
' b: await fn(),\n' +
' c: 2\n' +
' };\n' +
'}')
bt(
'const a = 1,\n' +
' b = a ? await foo() : b,\n' +
' c = await foo(),\n' +
' d = 3,\n' +
' e = (await foo()),\n' +
' f = 4;')
bt(
'a = {\n' +
' myVar: async function() {\n' +
' return a;\n' +
' },\n' +
' myOtherVar: async function() {\n' +
' yield b;\n' +
' }\n' +
'}')
bt(
'a = {\n' +
' myVar: async () => {\n' +
' return a;\n' +
' },\n' +
' myOtherVar: async async () => {\n' +
' yield b;\n' +
' }\n' +
'}')
#============================================================
# e4x - Test that e4x literals passed through when e4x-option is enabled
self.reset_options()
self.options.e4x = true
bt(
'xml=<a b="c"><d/><e>\n' +
' foo</e>x</a>;',
# -- output --
'xml = <a b="c"><d/><e>\n' +
' foo</e>x</a>;')
bt('<a b=\'This is a quoted "c".\'/>')
bt('<a b="This is a quoted \'c\'."/>')
bt('<a b="A quote \' inside string."/>')
bt('<a b=\'A quote " inside string.\'/>')
bt('<a b=\'Some """ quotes "" inside string.\'/>')
# Handles inline expressions
bt(
'xml=<{a} b="c"><d/><e v={z}>\n' +
' foo</e>x</{a}>;',
# -- output --
'xml = <{a} b="c"><d/><e v={z}>\n' +
' foo</e>x</{a}>;')
bt(
'xml=<{a} b="c">\n' +
' <e v={z}>\n' +
' foo</e>x</{a}>;',
# -- output --
'xml = <{a} b="c">\n' +
' <e v={z}>\n' +
' foo</e>x</{a}>;')
# xml literals with special characters in elem names - see http://www.w3.org/TR/REC-xml/#NT-NameChar
bt('xml = <_:.valid.xml- _:.valid.xml-="123"/>;')
# xml literals with attributes without equal sign
bt('xml = <elem someAttr/>;')
# Handles CDATA
bt(
'xml=<![CDATA[ b="c"><d/><e v={z}>\n' +
' foo</e>x/]]>;',
# -- output --
'xml = <![CDATA[ b="c"><d/><e v={z}>\n' +
' foo</e>x/]]>;')
bt('xml=<![CDATA[]]>;', 'xml = <![CDATA[]]>;')
bt('xml=<a b="c"><![CDATA[d/></a></{}]]></a>;', 'xml = <a b="c"><![CDATA[d/></a></{}]]></a>;')
# JSX - working jsx from http://prettydiff.com/unit_tests/beautification_javascript_jsx.txt
bt(
'var ListItem = React.createClass({\n' +
' render: function() {\n' +
' return (\n' +
' <li className="ListItem">\n' +
' <a href={ "/items/" + this.props.item.id }>\n' +
' this.props.item.name\n' +
' </a>\n' +
' </li>\n' +
' );\n' +
' }\n' +
'});')
bt(
'var List = React.createClass({\n' +
' renderList: function() {\n' +
' return this.props.items.map(function(item) {\n' +
' return <ListItem item={item} key={item.id} />;\n' +
' });\n' +
' },\n' +
'\n' +
' render: function() {\n' +
' return <ul className="List">\n' +
' this.renderList()\n' +
' </ul>\n' +
' }\n' +
'});')
bt(
'var Mist = React.createClass({\n' +
' renderList: function() {\n' +
' return this.props.items.map(function(item) {\n' +
' return <ListItem item={return <tag>{item}</tag>} key={item.id} />;\n' +
' });\n' +
' }\n' +
'});')
bt(
'// JSX\n' +
'var box = <Box>\n' +
' {shouldShowAnswer(user) ?\n' +
' <Answer value={false}>no</Answer> : <Box.Comment>\n' +
' Text Content\n' +
' </Box.Comment>}\n' +
' </Box>;\n' +
'var a = function() {\n' +
' return <tsdf>asdf</tsdf>;\n' +
'};\n' +
'\n' +
'var HelloMessage = React.createClass({\n' +
' render: function() {\n' +
' return <div {someAttr}>Hello {this.props.name}</div>;\n' +
' }\n' +
'});\n' +
'React.render(<HelloMessage name="John" />, mountNode);')
bt(
'var Timer = React.createClass({\n' +
' getInitialState: function() {\n' +
' return {\n' +
' secondsElapsed: 0\n' +
' };\n' +
' },\n' +
' tick: function() {\n' +
' this.setState({\n' +
' secondsElapsed: this.state.secondsElapsed + 1\n' +
' });\n' +
' },\n' +
' componentDidMount: function() {\n' +
' this.interval = setInterval(this.tick, 1000);\n' +
' },\n' +
' componentWillUnmount: function() {\n' +
' clearInterval(this.interval);\n' +
' },\n' +
' render: function() {\n' +
' return (\n' +
' <div>Seconds Elapsed: {this.state.secondsElapsed}</div>\n' +
' );\n' +
' }\n' +
'});\n' +
'React.render(<Timer />, mountNode);')
bt(
'var TodoList = React.createClass({\n' +
' render: function() {\n' +
' var createItem = function(itemText) {\n' +
' return <li>{itemText}</li>;\n' +
' };\n' +
' return <ul>{this.props.items.map(createItem)}</ul>;\n' +
' }\n' +
'});')
bt(
'var TodoApp = React.createClass({\n' +
' getInitialState: function() {\n' +
' return {\n' +
' items: [],\n' +
' text: \'\'\n' +
' };\n' +
' },\n' +
' onChange: function(e) {\n' +
' this.setState({\n' +
' text: e.target.value\n' +
' });\n' +
' },\n' +
' handleSubmit: function(e) {\n' +
' e.preventDefault();\n' +
' var nextItems = this.state.items.concat([this.state.text]);\n' +
' var nextText = \'\';\n' +
' this.setState({\n' +
' items: nextItems,\n' +
' text: nextText\n' +
' });\n' +
' },\n' +
' render: function() {\n' +
' return (\n' +
' <div>\n' +
' <h3 {someAttr}>TODO</h3>\n' +
' <TodoList items={this.state.items} />\n' +
' <form onSubmit={this.handleSubmit}>\n' +
' <input onChange={this.onChange} value={this.state.text} />\n' +
' <button>{\'Add #\' + (this.state.items.length + 1)}</button>\n' +
' </form>\n' +
' </div>\n' +
' );\n' +
' }\n' +
'});\n' +
'React.render(<TodoApp />, mountNode);')
bt(
'var converter = new Showdown.converter();\n' +
'var MarkdownEditor = React.createClass({\n' +
' getInitialState: function() {\n' +
' return {value: \'Type some *markdown* here!\'};\n' +
' },\n' +
' handleChange: function() {\n' +
' this.setState({value: this.refs.textarea.getDOMNode().value});\n' +
' },\n' +
' render: function() {\n' +
' return (\n' +
' <div className="MarkdownEditor">\n' +
' <h3>Input</h3>\n' +
' <textarea\n' +
' onChange={this.handleChange}\n' +
' ref="textarea"\n' +
' defaultValue={this.state.value} />\n' +
' <h3>Output</h3>\n' +
' <div\n' +
' className="content"\n' +
' dangerouslySetInnerHTML=\n' +
' />\n' +
' </div>\n' +
' );\n' +
' }\n' +
'});\n' +
'React.render(<MarkdownEditor />, mountNode);',
# -- output --
'var converter = new Showdown.converter();\n' +
'var MarkdownEditor = React.createClass({\n' +
' getInitialState: function() {\n' +
' return {\n' +
' value: \'Type some *markdown* here!\'\n' +
' };\n' +
' },\n' +
' handleChange: function() {\n' +
' this.setState({\n' +
' value: this.refs.textarea.getDOMNode().value\n' +
' });\n' +
' },\n' +
' render: function() {\n' +
' return (\n' +
' <div className="MarkdownEditor">\n' +
' <h3>Input</h3>\n' +
' <textarea\n' +
' onChange={this.handleChange}\n' +
' ref="textarea"\n' +
' defaultValue={this.state.value} />\n' +
' <h3>Output</h3>\n' +
' <div\n' +
' className="content"\n' +
' dangerouslySetInnerHTML=\n' +
' />\n' +
' </div>\n' +
' );\n' +
' }\n' +
'});\n' +
'React.render(<MarkdownEditor />, mountNode);')
# JSX - Not quite correct jsx formatting that still works
bt(
'var content = (\n' +
' <Nav>\n' +
' {/* child comment, put {} around */}\n' +
' <Person\n' +
' /* multi\n' +
' line\n' +
' comment */\n' +
' //attr="test"\n' +
' name={window.isLoggedIn ? window.name : \'\'} // end of line comment\n' +
' />\n' +
' </Nav>\n' +
' );\n' +
'var qwer = <DropDown> A dropdown list <Menu> <MenuItem>Do Something</MenuItem> <MenuItem>Do Something Fun!</MenuItem> <MenuItem>Do Something Else</MenuItem> </Menu> </DropDown>;\n' +
'render(dropdown);',
# -- output --
'var content = (\n' +
' <Nav>\n' +
' {/* child comment, put {} around */}\n' +
' <Person\n' +
' /* multi\n' +
' line\n' +
' comment */\n' +
' //attr="test"\n' +
' name={window.isLoggedIn ? window.name : \'\'} // end of line comment\n' +
' />\n' +
' </Nav>\n' +
');\n' +
'var qwer = <DropDown> A dropdown list <Menu> <MenuItem>Do Something</MenuItem> <MenuItem>Do Something Fun!</MenuItem> <MenuItem>Do Something Else</MenuItem> </Menu> </DropDown>;\n' +
'render(dropdown);')
# Handles messed up tags, as long as it isn't the same name
# as the root tag. Also handles tags of same name as root tag
# as long as nesting matches.
bt(
'xml=<a x="jn"><c></b></f><a><d jnj="jnn"><f></a ></nj></a>;',
# -- output --
'xml = <a x="jn"><c></b></f><a><d jnj="jnn"><f></a ></nj></a>;')
# If xml is not terminated, the remainder of the file is treated
# as part of the xml-literal (passed through unaltered)
test_fragment(
'xml=<a></b>\n' +
'c<b;',
# -- output --
'xml = <a></b>\n' +
'c<b;')
# Issue #646 = whitespace is allowed in attribute declarations
bt(
'let a = React.createClass({\n' +
' render() {\n' +
' return (\n' +
' <p className=\'a\'>\n' +
' <span>c</span>\n' +
' </p>\n' +
' );\n' +
' }\n' +
'});')
bt(
'let a = React.createClass({\n' +
' render() {\n' +
' return (\n' +
' <p className = \'b\'>\n' +
' <span>c</span>\n' +
' </p>\n' +
' );\n' +
' }\n' +
'});')
bt(
'let a = React.createClass({\n' +
' render() {\n' +
' return (\n' +
' <p className = "c">\n' +
' <span>c</span>\n' +
' </p>\n' +
' );\n' +
' }\n' +
'});')
bt(
'let a = React.createClass({\n' +
' render() {\n' +
' return (\n' +
' <{e} className = {d}>\n' +
' <span>c</span>\n' +
' </{e}>\n' +
' );\n' +
' }\n' +
'});')
# Issue #914 - Multiline attribute in root tag
bt(
'return (\n' +
' <a href="#"\n' +
' onClick={e => {\n' +
' e.preventDefault()\n' +
' onClick()\n' +
' }}>\n' +
' {children}\n' +
' </a>\n' +
');')
bt(
'return (\n' +
' <{\n' +
' a + b\n' +
' } href="#"\n' +
' onClick={e => {\n' +
' e.preventDefault()\n' +
' onClick()\n' +
' }}>\n' +
' {children}\n' +
' </{\n' +
' a + b\n' +
' }>\n' +
');')
bt(
'return (\n' +
' <{\n' +
' a + b\n' +
' } href="#"\n' +
' onClick={e => {\n' +
' e.preventDefault()\n' +
' onClick()\n' +
' }}>\n' +
' {children}\n' +
' </{a + b}>\n' +
' );',
# -- output --
'return (\n' +
' <{\n' +
' a + b\n' +
' } href="#"\n' +
' onClick={e => {\n' +
' e.preventDefault()\n' +
' onClick()\n' +
' }}>\n' +
' {children}\n' +
' </{a + b}>\n' +
');')
#============================================================
#
self.reset_options()
#============================================================
# e4x disabled
self.reset_options()
self.options.e4x = false
bt(
'xml=<a b="c"><d/><e>\n' +
' foo</e>x</a>;',
# -- output --
'xml = < a b = "c" > < d / > < e >\n' +
' foo < /e>x</a > ;')
#============================================================
# Multiple braces
self.reset_options()
bt(
'{{}/z/}',
# -- output --
'{\n' +
' {}\n' +
' /z/\n' +
'}')
#============================================================
# Space before conditional - (space_before_conditional = "false")
self.reset_options()
self.options.space_before_conditional = false
bt('if(a) b()')
bt('while(a) b()')
bt(
'do\n' +
' c();\n' +
'while(a) b()')
bt(
'if(a)\n' +
'b();',
# -- output --
'if(a)\n' +
' b();')
bt(
'while(a)\n' +
'b();',
# -- output --
'while(a)\n' +
' b();')
bt(
'do\n' +
'c();\n' +
'while(a);',
# -- output --
'do\n' +
' c();\n' +
'while(a);')
bt('return [];')
bt('return ();')
# Space before conditional - (space_before_conditional = "true")
self.reset_options()
self.options.space_before_conditional = true
bt('if (a) b()')
bt('while (a) b()')
bt(
'do\n' +
' c();\n' +
'while (a) b()')
bt(
'if(a)\n' +
'b();',
# -- output --
'if (a)\n' +
' b();')
bt(
'while(a)\n' +
'b();',
# -- output --
'while (a)\n' +
' b();')
bt(
'do\n' +
'c();\n' +
'while(a);',
# -- output --
'do\n' +
' c();\n' +
'while (a);')
bt('return [];')
bt('return ();')
#============================================================
# Beautify preserve formatting
self.reset_options()
bt(
'/* beautify preserve:start */\n' +
'/* beautify preserve:end */')
bt(
'/* beautify preserve:start */\n' +
' var a = 1;\n' +
'/* beautify preserve:end */')
bt(
'var a = 1;\n' +
'/* beautify preserve:start */\n' +
' var a = 1;\n' +
'/* beautify preserve:end */')
bt('/* beautify preserve:start */ {asdklgh;y;;{}dd2d}/* beautify preserve:end */')
bt(
'var a = 1;\n' +
'/* beautify preserve:start */\n' +
' var a = 1;\n' +
'/* beautify preserve:end */',
# -- output --
'var a = 1;\n' +
'/* beautify preserve:start */\n' +
' var a = 1;\n' +
'/* beautify preserve:end */')
bt(
'var a = 1;\n' +
' /* beautify preserve:start */\n' +
' var a = 1;\n' +
'/* beautify preserve:end */',
# -- output --
'var a = 1;\n' +
'/* beautify preserve:start */\n' +
' var a = 1;\n' +
'/* beautify preserve:end */')
bt(
'var a = {\n' +
' /* beautify preserve:start */\n' +
' one : 1\n' +
' two : 2,\n' +
' three : 3,\n' +
' ten : 10\n' +
' /* beautify preserve:end */\n' +
'};')
bt(
'var a = {\n' +
'/* beautify preserve:start */\n' +
' one : 1,\n' +
' two : 2,\n' +
' three : 3,\n' +
' ten : 10\n' +
'/* beautify preserve:end */\n' +
'};',
# -- output --
'var a = {\n' +
' /* beautify preserve:start */\n' +
' one : 1,\n' +
' two : 2,\n' +
' three : 3,\n' +
' ten : 10\n' +
'/* beautify preserve:end */\n' +
'};')
# one space before and after required, only single spaces inside.
bt(
'var a = {\n' +
'/* beautify preserve:start */\n' +
' one : 1,\n' +
' two : 2,\n' +
' three : 3,\n' +
' ten : 10\n' +
'};',
# -- output --
'var a = {\n' +
' /* beautify preserve:start */\n' +
' one: 1,\n' +
' two: 2,\n' +
' three: 3,\n' +
' ten: 10\n' +
'};')
bt(
'var a = {\n' +
'/*beautify preserve:start*/\n' +
' one : 1,\n' +
' two : 2,\n' +
' three : 3,\n' +
' ten : 10\n' +
'};',
# -- output --
'var a = {\n' +
' /*beautify preserve:start*/\n' +
' one: 1,\n' +
' two: 2,\n' +
' three: 3,\n' +
' ten: 10\n' +
'};')
bt(
'var a = {\n' +
'/*beautify preserve:start*/\n' +
' one : 1,\n' +
' two : 2,\n' +
' three : 3,\n' +
' ten : 10\n' +
'};',
# -- output --
'var a = {\n' +
' /*beautify preserve:start*/\n' +
' one: 1,\n' +
' two: 2,\n' +
' three: 3,\n' +
' ten: 10\n' +
'};')
# Directive: ignore
bt(
'/* beautify ignore:start */\n' +
'/* beautify ignore:end */')
bt(
'/* beautify ignore:start */\n' +
' var a,,,{ 1;\n' +
' /* beautify ignore:end */')
bt(
'var a = 1;\n' +
'/* beautify ignore:start */\n' +
' var a = 1;\n' +
'/* beautify ignore:end */')
# ignore starts _after_ the start comment, ends after the end comment
bt('/* beautify ignore:start */ {asdklgh;y;+++;dd2d}/* beautify ignore:end */')
bt('/* beautify ignore:start */ {asdklgh;y;+++;dd2d} /* beautify ignore:end */')
bt(
'var a = 1;\n' +
'/* beautify ignore:start */\n' +
' var a,,,{ 1;\n' +
'/*beautify ignore:end*/',
# -- output --
'var a = 1;\n' +
'/* beautify ignore:start */\n' +
' var a,,,{ 1;\n' +
'/*beautify ignore:end*/')
bt(
'var a = 1;\n' +
' /* beautify ignore:start */\n' +
' var a,,,{ 1;\n' +
'/* beautify ignore:end */',
# -- output --
'var a = 1;\n' +
'/* beautify ignore:start */\n' +
' var a,,,{ 1;\n' +
'/* beautify ignore:end */')
bt(
'var a = {\n' +
' /* beautify ignore:start */\n' +
' one : 1\n' +
' two : 2,\n' +
' three : {\n' +
' ten : 10\n' +
' /* beautify ignore:end */\n' +
'};')
bt(
'var a = {\n' +
'/* beautify ignore:start */\n' +
' one : 1\n' +
' two : 2,\n' +
' three : {\n' +
' ten : 10\n' +
'/* beautify ignore:end */\n' +
'};',
# -- output --
'var a = {\n' +
' /* beautify ignore:start */\n' +
' one : 1\n' +
' two : 2,\n' +
' three : {\n' +
' ten : 10\n' +
'/* beautify ignore:end */\n' +
'};')
# Directives - multiple and interacting
bt(
'var a = {\n' +
'/* beautify preserve:start */\n' +
'/* beautify preserve:start */\n' +
' one : 1,\n' +
' /* beautify preserve:end */\n' +
' two : 2,\n' +
' three : 3,\n' +
'/* beautify preserve:start */\n' +
' ten : 10\n' +
'/* beautify preserve:end */\n' +
'};',
# -- output --
'var a = {\n' +
' /* beautify preserve:start */\n' +
'/* beautify preserve:start */\n' +
' one : 1,\n' +
' /* beautify preserve:end */\n' +
' two: 2,\n' +
' three: 3,\n' +
' /* beautify preserve:start */\n' +
' ten : 10\n' +
'/* beautify preserve:end */\n' +
'};')
bt(
'var a = {\n' +
'/* beautify ignore:start */\n' +
' one : 1\n' +
' /* beautify ignore:end */\n' +
' two : 2,\n' +
'/* beautify ignore:start */\n' +
' three : {\n' +
' ten : 10\n' +
'/* beautify ignore:end */\n' +
'};',
# -- output --
'var a = {\n' +
' /* beautify ignore:start */\n' +
' one : 1\n' +
' /* beautify ignore:end */\n' +
' two: 2,\n' +
' /* beautify ignore:start */\n' +
' three : {\n' +
' ten : 10\n' +
'/* beautify ignore:end */\n' +
'};')
# Starts can occur together, ignore:end must occur alone.
bt(
'var a = {\n' +
'/* beautify ignore:start */\n' +
' one : 1\n' +
' NOTE: ignore end block does not support starting other directives\n' +
' This does not match the ending the ignore...\n' +
' /* beautify ignore:end preserve:start */\n' +
' two : 2,\n' +
'/* beautify ignore:start */\n' +
' three : {\n' +
' ten : 10\n' +
' ==The next comment ends the starting ignore==\n' +
'/* beautify ignore:end */\n' +
'};',
# -- output --
'var a = {\n' +
' /* beautify ignore:start */\n' +
' one : 1\n' +
' NOTE: ignore end block does not support starting other directives\n' +
' This does not match the ending the ignore...\n' +
' /* beautify ignore:end preserve:start */\n' +
' two : 2,\n' +
'/* beautify ignore:start */\n' +
' three : {\n' +
' ten : 10\n' +
' ==The next comment ends the starting ignore==\n' +
'/* beautify ignore:end */\n' +
'};')
bt(
'var a = {\n' +
'/* beautify ignore:start preserve:start */\n' +
' one : {\n' +
' /* beautify ignore:end */\n' +
' two : 2,\n' +
' /* beautify ignore:start */\n' +
' three : {\n' +
'/* beautify ignore:end */\n' +
' ten : 10\n' +
' // This is all preserved\n' +
'};',
# -- output --
'var a = {\n' +
' /* beautify ignore:start preserve:start */\n' +
' one : {\n' +
' /* beautify ignore:end */\n' +
' two : 2,\n' +
' /* beautify ignore:start */\n' +
' three : {\n' +
'/* beautify ignore:end */\n' +
' ten : 10\n' +
' // This is all preserved\n' +
'};')
bt(
'var a = {\n' +
'/* beautify ignore:start preserve:start */\n' +
' one : {\n' +
' /* beautify ignore:end */\n' +
' two : 2,\n' +
' /* beautify ignore:start */\n' +
' three : {\n' +
'/* beautify ignore:end */\n' +
' ten : 10,\n' +
'/* beautify preserve:end */\n' +
' eleven: 11\n' +
'};',
# -- output --
'var a = {\n' +
' /* beautify ignore:start preserve:start */\n' +
' one : {\n' +
' /* beautify ignore:end */\n' +
' two : 2,\n' +
' /* beautify ignore:start */\n' +
' three : {\n' +
'/* beautify ignore:end */\n' +
' ten : 10,\n' +
'/* beautify preserve:end */\n' +
' eleven: 11\n' +
'};')
#============================================================
# Comments and tests
self.reset_options()
# #913
bt(
'class test {\n' +
' method1() {\n' +
' let resp = null;\n' +
' }\n' +
' /**\n' +
' * @param {String} id\n' +
' */\n' +
' method2(id) {\n' +
' let resp2 = null;\n' +
' }\n' +
'}')
# #1090
bt(
'for (var i = 0; i < 20; ++i) // loop\n' +
' if (i % 3) {\n' +
' console.log(i);\n' +
' }\n' +
'console.log("done");')
# #1043
bt(
'var o = {\n' +
' k: 0\n' +
'}\n' +
'// ...\n' +
'foo(o)')
# #713 and #964
bt(
'Meteor.call("foo", bar, function(err, result) {\n' +
' Session.set("baz", result.lorem)\n' +
'})\n' +
'//blah blah')
# #815
bt(
'foo()\n' +
'// this is a comment\n' +
'bar()\n' +
'\n' +
'const foo = 5\n' +
'// comment\n' +
'bar()')
# This shows current behavior. Note #1069 is not addressed yet.
bt(
'if (modulus === 2) {\n' +
' // i might be odd here\n' +
' i += (i & 1);\n' +
' // now i is guaranteed to be even\n' +
' // this block is obviously about the statement above\n' +
'\n' +
' // #1069 This should attach to the block below\n' +
' // this comment is about the block after it.\n' +
'} else {\n' +
' // rounding up using integer arithmetic only\n' +
' if (i % modulus)\n' +
' i += modulus - (i % modulus);\n' +
' // now i is divisible by modulus\n' +
' // behavior of comments should be different for single statements vs block statements/expressions\n' +
'}\n' +
'\n' +
'if (modulus === 2)\n' +
' // i might be odd here\n' +
' i += (i & 1);\n' +
'// now i is guaranteed to be even\n' +
'// non-braced comments unindent immediately\n' +
'\n' +
'// this comment is about the block after it.\n' +
'else\n' +
' // rounding up using integer arithmetic only\n' +
' if (i % modulus)\n' +
' i += modulus - (i % modulus);\n' +
'// behavior of comments should be different for single statements vs block statements/expressions')
#============================================================
# Template Formatting
self.reset_options()
bt('<?=$view["name"]; ?>')
bt('a = <?= external() ?>;')
bt(
'<?php\n' +
'for($i = 1; $i <= 100; $i++;) {\n' +
' #count to 100!\n' +
' echo($i . "</br>");\n' +
'}\n' +
'?>')
bt('a = <%= external() %>;')
#============================================================
# minimal template handling - ()
self.reset_options()
bt('var a = <?php$view["name"]; ?>;', 'var a = <?php$view["name"]; ?>;')
bt(
'a = abc<?php\n' +
'for($i = 1; $i <= 100; $i++;) {\n' +
' #count to 100!\n' +
' echo($i . "</br>");\n' +
'}\n' +
'?>;')
test_fragment(
'<?php ?>\n' +
'test.met<?php someValue ?>hod();')
bt(
'<?php "A" ?>abc<?php "D" ?>;\n' +
'<?php "B" ?>.test();\n' +
'" <?php "C" \'D\' ?> "')
bt(
'<?php\n' +
'echo "A";\n' +
'?>;\n' +
'test.method();')
# minimal template handling - ()
self.reset_options()
bt('var a = <?=$view["name"]; ?>;', 'var a = <?=$view["name"]; ?>;')
bt(
'a = abc<?=\n' +
'for($i = 1; $i <= 100; $i++;) {\n' +
' #count to 100!\n' +
' echo($i . "</br>");\n' +
'}\n' +
'?>;')
test_fragment(
'<?= ?>\n' +
'test.met<?= someValue ?>hod();')
bt(
'<?= "A" ?>abc<?= "D" ?>;\n' +
'<?= "B" ?>.test();\n' +
'" <?= "C" \'D\' ?> "')
bt(
'<?=\n' +
'echo "A";\n' +
'?>;\n' +
'test.method();')
# minimal template handling - ()
self.reset_options()
bt('var a = <%$view["name"]; %>;', 'var a = <%$view["name"]; %>;')
bt(
'a = abc<%\n' +
'for($i = 1; $i <= 100; $i++;) {\n' +
' #count to 100!\n' +
' echo($i . "</br>");\n' +
'}\n' +
'%>;')
test_fragment(
'<% %>\n' +
'test.met<% someValue %>hod();')
bt(
'<% "A" %>abc<% "D" %>;\n' +
'<% "B" %>.test();\n' +
'" <% "C" \'D\' %> "')
bt(
'<%\n' +
'echo "A";\n' +
'%>;\n' +
'test.method();')
#============================================================
# jslint and space after anon function - (jslint_happy = "true", space_after_anon_function = "true")
self.reset_options()
self.options.jslint_happy = true
self.options.space_after_anon_function = true
bt(
'a=typeof(x)',
# -- output --
'a = typeof (x)')
bt(
'x();\n' +
'\n' +
'function(){}',
# -- output --
'x();\n' +
'\n' +
'function () {}')
bt(
'x();\n' +
'\n' +
'function y(){}',
# -- output --
'x();\n' +
'\n' +
'function y() {}')
bt(
'x();\n' +
'\n' +
'var x = {\n' +
'x: function(){}\n' +
'}',
# -- output --
'x();\n' +
'\n' +
'var x = {\n' +
' x: function () {}\n' +
'}')
bt(
'x();\n' +
'\n' +
'var x = {\n' +
'x: function y(){}\n' +
'}',
# -- output --
'x();\n' +
'\n' +
'var x = {\n' +
' x: function y() {}\n' +
'}')
bt(
'function () {\n' +
' var a, b, c, d, e = [],\n' +
' f;\n' +
'}')
bt(
'switch(x) {case 0: case 1: a(); break; default: break}',
# -- output --
'switch (x) {\n' +
'case 0:\n' +
'case 1:\n' +
' a();\n' +
' break;\n' +
'default:\n' +
' break\n' +
'}')
bt(
'switch(x){case -1:break;case !y:break;}',
# -- output --
'switch (x) {\n' +
'case -1:\n' +
' break;\n' +
'case !y:\n' +
' break;\n' +
'}')
# typical greasemonkey start
test_fragment(
'// comment 2\n' +
'(function ()')
bt(
'var a2, b2, c2, d2 = 0, c = function() {}, d = \'\';',
# -- output --
'var a2, b2, c2, d2 = 0,\n' +
' c = function () {},\n' +
' d = \'\';')
bt(
'var a2, b2, c2, d2 = 0, c = function yoohoo() {}, d = \'\';',
# -- output --
'var a2, b2, c2, d2 = 0,\n' +
' c = function yoohoo() {},\n' +
' d = \'\';')
bt(
'var a2, b2, c2, d2 = 0, c = function() {},\n' +
'd = \'\';',
# -- output --
'var a2, b2, c2, d2 = 0,\n' +
' c = function () {},\n' +
' d = \'\';')
bt(
'var o2=$.extend(a);function(){alert(x);}',
# -- output --
'var o2 = $.extend(a);\n' +
'\n' +
'function () {\n' +
' alert(x);\n' +
'}')
bt(
'var o2=$.extend(a);function yoohoo(){alert(x);}',
# -- output --
'var o2 = $.extend(a);\n' +
'\n' +
'function yoohoo() {\n' +
' alert(x);\n' +
'}')
bt(
'function*() {\n' +
' yield 1;\n' +
'}',
# -- output --
'function* () {\n' +
' yield 1;\n' +
'}')
bt(
'function* yoohoo() {\n' +
' yield 1;\n' +
'}')
bt(
'function* x() {\n' +
' yield 1;\n' +
'}')
bt(
'async x() {\n' +
' yield 1;\n' +
'}')
bt(
'var a={data(){},\n' +
'data2(){}}',
# -- output --
'var a = {\n' +
' data() {},\n' +
' data2() {}\n' +
'}')
bt(
'new Vue({\n' +
'data(){},\n' +
'data2(){}, a:1})',
# -- output --
'new Vue({\n' +
' data() {},\n' +
' data2() {},\n' +
' a: 1\n' +
'})')
bt(
'export default {data(){},\n' +
'data2(){},\n' +
'a:1}',
# -- output --
'export default {\n' +
' data() {},\n' +
' data2() {},\n' +
' a: 1\n' +
'}')
bt(
'var a={*data(){},*data2(){}}',
# -- output --
'var a = {\n' +
' * data() {},\n' +
' * data2() {}\n' +
'}')
bt(
'new Vue({\n' +
'*data(){},*data2(){}, a:1})',
# -- output --
'new Vue({\n' +
' * data() {},\n' +
' * data2() {},\n' +
' a: 1\n' +
'})')
bt(
'export default {*data(){},*data2(){},\n' +
'a:1}',
# -- output --
'export default {\n' +
' * data() {},\n' +
' * data2() {},\n' +
' a: 1\n' +
'}')
# jslint and space after anon function - (jslint_happy = "true", space_after_anon_function = "false")
self.reset_options()
self.options.jslint_happy = true
self.options.space_after_anon_function = false
bt(
'a=typeof(x)',
# -- output --
'a = typeof (x)')
bt(
'x();\n' +
'\n' +
'function(){}',
# -- output --
'x();\n' +
'\n' +
'function () {}')
bt(
'x();\n' +
'\n' +
'function y(){}',
# -- output --
'x();\n' +
'\n' +
'function y() {}')
bt(
'x();\n' +
'\n' +
'var x = {\n' +
'x: function(){}\n' +
'}',
# -- output --
'x();\n' +
'\n' +
'var x = {\n' +
' x: function () {}\n' +
'}')
bt(
'x();\n' +
'\n' +
'var x = {\n' +
'x: function y(){}\n' +
'}',
# -- output --
'x();\n' +
'\n' +
'var x = {\n' +
' x: function y() {}\n' +
'}')
bt(
'function () {\n' +
' var a, b, c, d, e = [],\n' +
' f;\n' +
'}')
bt(
'switch(x) {case 0: case 1: a(); break; default: break}',
# -- output --
'switch (x) {\n' +
'case 0:\n' +
'case 1:\n' +
' a();\n' +
' break;\n' +
'default:\n' +
' break\n' +
'}')
bt(
'switch(x){case -1:break;case !y:break;}',
# -- output --
'switch (x) {\n' +
'case -1:\n' +
' break;\n' +
'case !y:\n' +
' break;\n' +
'}')
# typical greasemonkey start
test_fragment(
'// comment 2\n' +
'(function ()')
bt(
'var a2, b2, c2, d2 = 0, c = function() {}, d = \'\';',
# -- output --
'var a2, b2, c2, d2 = 0,\n' +
' c = function () {},\n' +
' d = \'\';')
bt(
'var a2, b2, c2, d2 = 0, c = function yoohoo() {}, d = \'\';',
# -- output --
'var a2, b2, c2, d2 = 0,\n' +
' c = function yoohoo() {},\n' +
' d = \'\';')
bt(
'var a2, b2, c2, d2 = 0, c = function() {},\n' +
'd = \'\';',
# -- output --
'var a2, b2, c2, d2 = 0,\n' +
' c = function () {},\n' +
' d = \'\';')
bt(
'var o2=$.extend(a);function(){alert(x);}',
# -- output --
'var o2 = $.extend(a);\n' +
'\n' +
'function () {\n' +
' alert(x);\n' +
'}')
bt(
'var o2=$.extend(a);function yoohoo(){alert(x);}',
# -- output --
'var o2 = $.extend(a);\n' +
'\n' +
'function yoohoo() {\n' +
' alert(x);\n' +
'}')
bt(
'function*() {\n' +
' yield 1;\n' +
'}',
# -- output --
'function* () {\n' +
' yield 1;\n' +
'}')
bt(
'function* yoohoo() {\n' +
' yield 1;\n' +
'}')
bt(
'function* x() {\n' +
' yield 1;\n' +
'}')
bt(
'async x() {\n' +
' yield 1;\n' +
'}')
bt(
'var a={data(){},\n' +
'data2(){}}',
# -- output --
'var a = {\n' +
' data() {},\n' +
' data2() {}\n' +
'}')
bt(
'new Vue({\n' +
'data(){},\n' +
'data2(){}, a:1})',
# -- output --
'new Vue({\n' +
' data() {},\n' +
' data2() {},\n' +
' a: 1\n' +
'})')
bt(
'export default {data(){},\n' +
'data2(){},\n' +
'a:1}',
# -- output --
'export default {\n' +
' data() {},\n' +
' data2() {},\n' +
' a: 1\n' +
'}')
bt(
'var a={*data(){},*data2(){}}',
# -- output --
'var a = {\n' +
' * data() {},\n' +
' * data2() {}\n' +
'}')
bt(
'new Vue({\n' +
'*data(){},*data2(){}, a:1})',
# -- output --
'new Vue({\n' +
' * data() {},\n' +
' * data2() {},\n' +
' a: 1\n' +
'})')
bt(
'export default {*data(){},*data2(){},\n' +
'a:1}',
# -- output --
'export default {\n' +
' * data() {},\n' +
' * data2() {},\n' +
' a: 1\n' +
'}')
# jslint and space after anon function - (jslint_happy = "false", space_after_anon_function = "true")
self.reset_options()
self.options.jslint_happy = false
self.options.space_after_anon_function = true
bt(
'a=typeof(x)',
# -- output --
'a = typeof (x)')
bt(
'x();\n' +
'\n' +
'function(){}',
# -- output --
'x();\n' +
'\n' +
'function () {}')
bt(
'x();\n' +
'\n' +
'function y(){}',
# -- output --
'x();\n' +
'\n' +
'function y() {}')
bt(
'x();\n' +
'\n' +
'var x = {\n' +
'x: function(){}\n' +
'}',
# -- output --
'x();\n' +
'\n' +
'var x = {\n' +
' x: function () {}\n' +
'}')
bt(
'x();\n' +
'\n' +
'var x = {\n' +
'x: function y(){}\n' +
'}',
# -- output --
'x();\n' +
'\n' +
'var x = {\n' +
' x: function y() {}\n' +
'}')
bt(
'function () {\n' +
' var a, b, c, d, e = [],\n' +
' f;\n' +
'}')
bt(
'switch(x) {case 0: case 1: a(); break; default: break}',
# -- output --
'switch (x) {\n' +
' case 0:\n' +
' case 1:\n' +
' a();\n' +
' break;\n' +
' default:\n' +
' break\n' +
'}')
bt(
'switch(x){case -1:break;case !y:break;}',
# -- output --
'switch (x) {\n' +
' case -1:\n' +
' break;\n' +
' case !y:\n' +
' break;\n' +
'}')
# typical greasemonkey start
test_fragment(
'// comment 2\n' +
'(function ()')
bt(
'var a2, b2, c2, d2 = 0, c = function() {}, d = \'\';',
# -- output --
'var a2, b2, c2, d2 = 0,\n' +
' c = function () {},\n' +
' d = \'\';')
bt(
'var a2, b2, c2, d2 = 0, c = function yoohoo() {}, d = \'\';',
# -- output --
'var a2, b2, c2, d2 = 0,\n' +
' c = function yoohoo() {},\n' +
' d = \'\';')
bt(
'var a2, b2, c2, d2 = 0, c = function() {},\n' +
'd = \'\';',
# -- output --
'var a2, b2, c2, d2 = 0,\n' +
' c = function () {},\n' +
' d = \'\';')
bt(
'var o2=$.extend(a);function(){alert(x);}',
# -- output --
'var o2 = $.extend(a);\n' +
'\n' +
'function () {\n' +
' alert(x);\n' +
'}')
bt(
'var o2=$.extend(a);function yoohoo(){alert(x);}',
# -- output --
'var o2 = $.extend(a);\n' +
'\n' +
'function yoohoo() {\n' +
' alert(x);\n' +
'}')
bt(
'function*() {\n' +
' yield 1;\n' +
'}',
# -- output --
'function* () {\n' +
' yield 1;\n' +
'}')
bt(
'function* yoohoo() {\n' +
' yield 1;\n' +
'}')
bt(
'function* x() {\n' +
' yield 1;\n' +
'}')
bt(
'async x() {\n' +
' yield 1;\n' +
'}')
bt(
'var a={data(){},\n' +
'data2(){}}',
# -- output --
'var a = {\n' +
' data() {},\n' +
' data2() {}\n' +
'}')
bt(
'new Vue({\n' +
'data(){},\n' +
'data2(){}, a:1})',
# -- output --
'new Vue({\n' +
' data() {},\n' +
' data2() {},\n' +
' a: 1\n' +
'})')
bt(
'export default {data(){},\n' +
'data2(){},\n' +
'a:1}',
# -- output --
'export default {\n' +
' data() {},\n' +
' data2() {},\n' +
' a: 1\n' +
'}')
bt(
'var a={*data(){},*data2(){}}',
# -- output --
'var a = {\n' +
' * data() {},\n' +
' * data2() {}\n' +
'}')
bt(
'new Vue({\n' +
'*data(){},*data2(){}, a:1})',
# -- output --
'new Vue({\n' +
' * data() {},\n' +
' * data2() {},\n' +
' a: 1\n' +
'})')
bt(
'export default {*data(){},*data2(){},\n' +
'a:1}',
# -- output --
'export default {\n' +
' * data() {},\n' +
' * data2() {},\n' +
' a: 1\n' +
'}')
# jslint and space after anon function - (jslint_happy = "false", space_after_anon_function = "false")
self.reset_options()
self.options.jslint_happy = false
self.options.space_after_anon_function = false
bt(
'a=typeof(x)',
# -- output --
'a = typeof(x)')
bt(
'x();\n' +
'\n' +
'function(){}',
# -- output --
'x();\n' +
'\n' +
'function() {}')
bt(
'x();\n' +
'\n' +
'function y(){}',
# -- output --
'x();\n' +
'\n' +
'function y() {}')
bt(
'x();\n' +
'\n' +
'var x = {\n' +
'x: function(){}\n' +
'}',
# -- output --
'x();\n' +
'\n' +
'var x = {\n' +
' x: function() {}\n' +
'}')
bt(
'x();\n' +
'\n' +
'var x = {\n' +
'x: function y(){}\n' +
'}',
# -- output --
'x();\n' +
'\n' +
'var x = {\n' +
' x: function y() {}\n' +
'}')
bt(
'function () {\n' +
' var a, b, c, d, e = [],\n' +
' f;\n' +
'}',
# -- output --
'function() {\n' +
' var a, b, c, d, e = [],\n' +
' f;\n' +
'}')
bt(
'switch(x) {case 0: case 1: a(); break; default: break}',
# -- output --
'switch (x) {\n' +
' case 0:\n' +
' case 1:\n' +
' a();\n' +
' break;\n' +
' default:\n' +
' break\n' +
'}')
bt(
'switch(x){case -1:break;case !y:break;}',
# -- output --
'switch (x) {\n' +
' case -1:\n' +
' break;\n' +
' case !y:\n' +
' break;\n' +
'}')
# typical greasemonkey start
test_fragment(
'// comment 2\n' +
'(function()')
bt(
'var a2, b2, c2, d2 = 0, c = function() {}, d = \'\';',
# -- output --
'var a2, b2, c2, d2 = 0,\n' +
' c = function() {},\n' +
' d = \'\';')
bt(
'var a2, b2, c2, d2 = 0, c = function yoohoo() {}, d = \'\';',
# -- output --
'var a2, b2, c2, d2 = 0,\n' +
' c = function yoohoo() {},\n' +
' d = \'\';')
bt(
'var a2, b2, c2, d2 = 0, c = function() {},\n' +
'd = \'\';',
# -- output --
'var a2, b2, c2, d2 = 0,\n' +
' c = function() {},\n' +
' d = \'\';')
bt(
'var o2=$.extend(a);function(){alert(x);}',
# -- output --
'var o2 = $.extend(a);\n' +
'\n' +
'function() {\n' +
' alert(x);\n' +
'}')
bt(
'var o2=$.extend(a);function yoohoo(){alert(x);}',
# -- output --
'var o2 = $.extend(a);\n' +
'\n' +
'function yoohoo() {\n' +
' alert(x);\n' +
'}')
bt(
'function*() {\n' +
' yield 1;\n' +
'}')
bt(
'function* yoohoo() {\n' +
' yield 1;\n' +
'}')
bt(
'function* x() {\n' +
' yield 1;\n' +
'}')
bt(
'async x() {\n' +
' yield 1;\n' +
'}')
bt(
'var a={data(){},\n' +
'data2(){}}',
# -- output --
'var a = {\n' +
' data() {},\n' +
' data2() {}\n' +
'}')
bt(
'new Vue({\n' +
'data(){},\n' +
'data2(){}, a:1})',
# -- output --
'new Vue({\n' +
' data() {},\n' +
' data2() {},\n' +
' a: 1\n' +
'})')
bt(
'export default {data(){},\n' +
'data2(){},\n' +
'a:1}',
# -- output --
'export default {\n' +
' data() {},\n' +
' data2() {},\n' +
' a: 1\n' +
'}')
bt(
'var a={*data(){},*data2(){}}',
# -- output --
'var a = {\n' +
' * data() {},\n' +
' * data2() {}\n' +
'}')
bt(
'new Vue({\n' +
'*data(){},*data2(){}, a:1})',
# -- output --
'new Vue({\n' +
' * data() {},\n' +
' * data2() {},\n' +
' a: 1\n' +
'})')
bt(
'export default {*data(){},*data2(){},\n' +
'a:1}',
# -- output --
'export default {\n' +
' * data() {},\n' +
' * data2() {},\n' +
' a: 1\n' +
'}')
# jslint and space after anon function - (space_after_named_function = "true")
self.reset_options()
self.options.space_after_named_function = true
bt(
'a=typeof(x)',
# -- output --
'a = typeof(x)')
bt(
'x();\n' +
'\n' +
'function(){}',
# -- output --
'x();\n' +
'\n' +
'function() {}')
bt(
'x();\n' +
'\n' +
'function y(){}',
# -- output --
'x();\n' +
'\n' +
'function y () {}')
bt(
'x();\n' +
'\n' +
'var x = {\n' +
'x: function(){}\n' +
'}',
# -- output --
'x();\n' +
'\n' +
'var x = {\n' +
' x: function() {}\n' +
'}')
bt(
'x();\n' +
'\n' +
'var x = {\n' +
'x: function y(){}\n' +
'}',
# -- output --
'x();\n' +
'\n' +
'var x = {\n' +
' x: function y () {}\n' +
'}')
bt(
'function () {\n' +
' var a, b, c, d, e = [],\n' +
' f;\n' +
'}',
# -- output --
'function() {\n' +
' var a, b, c, d, e = [],\n' +
' f;\n' +
'}')
bt(
'switch(x) {case 0: case 1: a(); break; default: break}',
# -- output --
'switch (x) {\n' +
' case 0:\n' +
' case 1:\n' +
' a();\n' +
' break;\n' +
' default:\n' +
' break\n' +
'}')
bt(
'switch(x){case -1:break;case !y:break;}',
# -- output --
'switch (x) {\n' +
' case -1:\n' +
' break;\n' +
' case !y:\n' +
' break;\n' +
'}')
# typical greasemonkey start
test_fragment(
'// comment 2\n' +
'(function()')
bt(
'var a2, b2, c2, d2 = 0, c = function() {}, d = \'\';',
# -- output --
'var a2, b2, c2, d2 = 0,\n' +
' c = function() {},\n' +
' d = \'\';')
bt(
'var a2, b2, c2, d2 = 0, c = function yoohoo() {}, d = \'\';',
# -- output --
'var a2, b2, c2, d2 = 0,\n' +
' c = function yoohoo () {},\n' +
' d = \'\';')
bt(
'var a2, b2, c2, d2 = 0, c = function() {},\n' +
'd = \'\';',
# -- output --
'var a2, b2, c2, d2 = 0,\n' +
' c = function() {},\n' +
' d = \'\';')
bt(
'var o2=$.extend(a);function(){alert(x);}',
# -- output --
'var o2 = $.extend(a);\n' +
'\n' +
'function() {\n' +
' alert(x);\n' +
'}')
bt(
'var o2=$.extend(a);function yoohoo(){alert(x);}',
# -- output --
'var o2 = $.extend(a);\n' +
'\n' +
'function yoohoo () {\n' +
' alert(x);\n' +
'}')
bt(
'function*() {\n' +
' yield 1;\n' +
'}')
bt(
'function* yoohoo() {\n' +
' yield 1;\n' +
'}',
# -- output --
'function* yoohoo () {\n' +
' yield 1;\n' +
'}')
bt(
'function* x() {\n' +
' yield 1;\n' +
'}',
# -- output --
'function* x () {\n' +
' yield 1;\n' +
'}')
bt(
'async x() {\n' +
' yield 1;\n' +
'}',
# -- output --
'async x () {\n' +
' yield 1;\n' +
'}')
bt(
'var a={data(){},\n' +
'data2(){}}',
# -- output --
'var a = {\n' +
' data () {},\n' +
' data2 () {}\n' +
'}')
bt(
'new Vue({\n' +
'data(){},\n' +
'data2(){}, a:1})',
# -- output --
'new Vue({\n' +
' data () {},\n' +
' data2 () {},\n' +
' a: 1\n' +
'})')
bt(
'export default {data(){},\n' +
'data2(){},\n' +
'a:1}',
# -- output --
'export default {\n' +
' data () {},\n' +
' data2 () {},\n' +
' a: 1\n' +
'}')
bt(
'var a={*data(){},*data2(){}}',
# -- output --
'var a = {\n' +
' * data () {},\n' +
' * data2 () {}\n' +
'}')
bt(
'new Vue({\n' +
'*data(){},*data2(){}, a:1})',
# -- output --
'new Vue({\n' +
' * data () {},\n' +
' * data2 () {},\n' +
' a: 1\n' +
'})')
bt(
'export default {*data(){},*data2(){},\n' +
'a:1}',
# -- output --
'export default {\n' +
' * data () {},\n' +
' * data2 () {},\n' +
' a: 1\n' +
'}')
#============================================================
# Regression tests
self.reset_options()
# Issue 241
bt(
'obj\n' +
' .last({\n' +
' foo: 1,\n' +
' bar: 2\n' +
' });\n' +
'var test = 1;')
bt(
'obj\n' +
' .last(a, function() {\n' +
' var test;\n' +
' });\n' +
'var test = 1;')
bt(
'obj.first()\n' +
' .second()\n' +
' .last(function(err, response) {\n' +
' console.log(err);\n' +
' });')
# Issue 268 and 275
bt(
'obj.last(a, function() {\n' +
' var test;\n' +
'});\n' +
'var test = 1;')
bt(
'obj.last(a,\n' +
' function() {\n' +
' var test;\n' +
' });\n' +
'var test = 1;')
bt(
'(function() {if (!window.FOO) window.FOO || (window.FOO = function() {var b = {bar: "zort"};});})();',
# -- output --
'(function() {\n' +
' if (!window.FOO) window.FOO || (window.FOO = function() {\n' +
' var b = {\n' +
' bar: "zort"\n' +
' };\n' +
' });\n' +
'})();')
# Issue 281
bt(
'define(["dojo/_base/declare", "my/Employee", "dijit/form/Button",\n' +
' "dojo/_base/lang", "dojo/Deferred"\n' +
'], function(declare, Employee, Button, lang, Deferred) {\n' +
' return declare(Employee, {\n' +
' constructor: function() {\n' +
' new Button({\n' +
' onClick: lang.hitch(this, function() {\n' +
' new Deferred().then(lang.hitch(this, function() {\n' +
' this.salary * 0.25;\n' +
' }));\n' +
' })\n' +
' });\n' +
' }\n' +
' });\n' +
'});')
bt(
'define(["dojo/_base/declare", "my/Employee", "dijit/form/Button",\n' +
' "dojo/_base/lang", "dojo/Deferred"\n' +
' ],\n' +
' function(declare, Employee, Button, lang, Deferred) {\n' +
' return declare(Employee, {\n' +
' constructor: function() {\n' +
' new Button({\n' +
' onClick: lang.hitch(this, function() {\n' +
' new Deferred().then(lang.hitch(this, function() {\n' +
' this.salary * 0.25;\n' +
' }));\n' +
' })\n' +
' });\n' +
' }\n' +
' });\n' +
' });')
# Issue 459
bt(
'(function() {\n' +
' return {\n' +
' foo: function() {\n' +
' return "bar";\n' +
' },\n' +
' bar: ["bar"]\n' +
' };\n' +
'}());')
# Issue 505 - strings should end at newline unless continued by backslash
bt(
'var name = "a;\n' +
'name = "b";')
bt(
'var name = "a;\\\n' +
' name = b";')
# Issue 514 - some operators require spaces to distinguish them
bt('var c = "_ACTION_TO_NATIVEAPI_" + ++g++ + +new Date;')
bt('var c = "_ACTION_TO_NATIVEAPI_" - --g-- - -new Date;')
# Issue 440 - reserved words can be used as object property names
bt(
'a = {\n' +
' function: {},\n' +
' "function": {},\n' +
' throw: {},\n' +
' "throw": {},\n' +
' var: {},\n' +
' "var": {},\n' +
' set: {},\n' +
' "set": {},\n' +
' get: {},\n' +
' "get": {},\n' +
' if: {},\n' +
' "if": {},\n' +
' then: {},\n' +
' "then": {},\n' +
' else: {},\n' +
' "else": {},\n' +
' yay: {}\n' +
'};')
# Issue 331 - if-else with braces edge case
bt(
'if(x){a();}else{b();}if(y){c();}',
# -- output --
'if (x) {\n' +
' a();\n' +
'} else {\n' +
' b();\n' +
'}\n' +
'if (y) {\n' +
' c();\n' +
'}')
# Issue 485 - ensure function declarations behave the same in arrays as elsewhere
bt(
'var v = ["a",\n' +
' function() {\n' +
' return;\n' +
' }, {\n' +
' id: 1\n' +
' }\n' +
'];')
bt(
'var v = ["a", function() {\n' +
' return;\n' +
'}, {\n' +
' id: 1\n' +
'}];')
# Issue 382 - initial totally cursory support for es6 module export
bt(
'module "Even" {\n' +
' import odd from "Odd";\n' +
' export function sum(x, y) {\n' +
' return x + y;\n' +
' }\n' +
' export var pi = 3.141593;\n' +
' export default moduleName;\n' +
'}')
bt(
'module "Even" {\n' +
' export default function div(x, y) {}\n' +
'}')
# Issue 889 - export default { ... }
bt(
'export default {\n' +
' func1() {},\n' +
' func2() {}\n' +
' func3() {}\n' +
'}')
bt(
'export default {\n' +
' a() {\n' +
' return 1;\n' +
' },\n' +
' b() {\n' +
' return 2;\n' +
' },\n' +
' c() {\n' +
' return 3;\n' +
' }\n' +
'}')
# Issue 508
bt('set["name"]')
bt('get["name"]')
bt(
'a = {\n' +
' set b(x) {},\n' +
' c: 1,\n' +
' d: function() {}\n' +
'};')
bt(
'a = {\n' +
' get b() {\n' +
' retun 0;\n' +
' },\n' +
' c: 1,\n' +
' d: function() {}\n' +
'};')
# Issue 298 - do not under indent if/while/for condtionals experesions
bt(
'\'use strict\';\n' +
'if ([].some(function() {\n' +
' return false;\n' +
' })) {\n' +
' console.log("hello");\n' +
'}')
# Issue 298 - do not under indent if/while/for condtionals experesions
bt(
'\'use strict\';\n' +
'if ([].some(function() {\n' +
' return false;\n' +
' })) {\n' +
' console.log("hello");\n' +
'}')
# Issue 552 - Typescript? Okay... we didn't break it before, so try not to break it now.
bt(
'class Test {\n' +
' blah: string[];\n' +
' foo(): number {\n' +
' return 0;\n' +
' }\n' +
' bar(): number {\n' +
' return 0;\n' +
' }\n' +
'}')
# Issue 1544 - Typescript declare formatting (no newline).
bt(
'declare const require: any;\n' +
'declare function greet(greeting: string): void;\n' +
'declare var foo: number;\n' +
'declare namespace myLib {\n' +
' function makeGreeting(s: string): string;\n' +
' let numberOfGreetings: number;\n' +
'}\n' +
'declare let test: any;')
bt(
'interface Test {\n' +
' blah: string[];\n' +
' foo(): number {\n' +
' return 0;\n' +
' }\n' +
' bar(): number {\n' +
' return 0;\n' +
' }\n' +
'}')
# Issue 583 - Functions with comments after them should still indent correctly.
bt(
'function exit(code) {\n' +
' setTimeout(function() {\n' +
' phantom.exit(code);\n' +
' }, 0);\n' +
' phantom.onError = function() {};\n' +
'}\n' +
'// Comment')
# Issue 806 - newline arrow functions
bt(
'a.b("c",\n' +
' () => d.e\n' +
')')
# Issue 810 - es6 object literal detection
bt(
'function badFormatting() {\n' +
' return {\n' +
' a,\n' +
' b: c,\n' +
' d: e,\n' +
' f: g,\n' +
' h,\n' +
' i,\n' +
' j: k\n' +
' }\n' +
'}\n' +
'\n' +
'function goodFormatting() {\n' +
' return {\n' +
' a: b,\n' +
' c,\n' +
' d: e,\n' +
' f: g,\n' +
' h,\n' +
' i,\n' +
' j: k\n' +
' }\n' +
'}')
# Issue 602 - ES6 object literal shorthand functions
bt(
'return {\n' +
' fn1() {},\n' +
' fn2() {}\n' +
'}')
bt(
'throw {\n' +
' fn1() {},\n' +
' fn2() {}\n' +
'}')
bt(
'foo({\n' +
' fn1(a) {}\n' +
' fn2(a) {}\n' +
'})')
bt(
'foo("text", {\n' +
' fn1(a) {}\n' +
' fn2(a) {}\n' +
'})')
bt(
'oneArg = {\n' +
' fn1(a) {\n' +
' do();\n' +
' },\n' +
' fn2() {}\n' +
'}')
bt(
'multiArg = {\n' +
' fn1(a, b, c) {\n' +
' do();\n' +
' },\n' +
' fn2() {}\n' +
'}')
bt(
'noArgs = {\n' +
' fn1() {\n' +
' do();\n' +
' },\n' +
' fn2() {}\n' +
'}')
bt(
'emptyFn = {\n' +
' fn1() {},\n' +
' fn2() {}\n' +
'}')
bt(
'nested = {\n' +
' fns: {\n' +
' fn1() {},\n' +
' fn2() {}\n' +
' }\n' +
'}')
bt(
'array = [{\n' +
' fn1() {},\n' +
' prop: val,\n' +
' fn2() {}\n' +
'}]')
bt(
'expr = expr ? expr : {\n' +
' fn1() {},\n' +
' fn2() {}\n' +
'}')
bt(
'strange = valid + {\n' +
' fn1() {},\n' +
' fn2() {\n' +
' return 1;\n' +
' }\n' +
'}.fn2()')
# Issue 854 - Arrow function with statement block
bt(
'test(() => {\n' +
' var a = {}\n' +
'\n' +
' a.what = () => true ? 1 : 2\n' +
'\n' +
' a.thing = () => {\n' +
' b();\n' +
' }\n' +
'})')
# Issue 406 - Multiline array
bt(
'var tempName = [\n' +
' "temp",\n' +
' process.pid,\n' +
' (Math.random() * 0x1000000000).toString(36),\n' +
' new Date().getTime()\n' +
'].join("-");')
# Issue 1374 - Parameters starting with ! or [ merged into single line
bt(
'fn(\n' +
' 1,\n' +
' !1,\n' +
' 1,\n' +
' [1]\n' +
')')
# Issue 1288 - Negative numbers remove newlines in array
bt(
'var array = [\n' +
' -1,\n' +
' 0,\n' +
' "a",\n' +
' -2,\n' +
' 1,\n' +
' -3,\n' +
'];')
# Issue 1229 - Negated expressions in array
bt(
'a = [\n' +
' true && 1,\n' +
' true && 1,\n' +
' true && 1\n' +
']\n' +
'a = [\n' +
' !true && 1,\n' +
' !true && 1,\n' +
' !true && 1\n' +
']')
# Issue #996 - Input ends with backslash throws exception
test_fragment(
'sd = 1;\n' +
'/')
# Issue #1079 - unbraced if with comments should still look right
bt(
'if (console.log)\n' +
' for (var i = 0; i < 20; ++i)\n' +
' if (i % 3)\n' +
' console.log(i);\n' +
'// all done\n' +
'console.log("done");')
# Issue #1085 - function should not have blank line in a number of cases
bt(
'var transformer =\n' +
' options.transformer ||\n' +
' globalSettings.transformer ||\n' +
' function(x) {\n' +
' return x;\n' +
' };')
# Issue #569 - function should not have blank line in a number of cases
bt(
'(function(global) {\n' +
' "use strict";\n' +
'\n' +
' /* jshint ignore:start */\n' +
' include "somefile.js"\n' +
' /* jshint ignore:end */\n' +
'}(this));')
bt(
'function bindAuthEvent(eventName) {\n' +
' self.auth.on(eventName, function(event, meta) {\n' +
' self.emit(eventName, event, meta);\n' +
' });\n' +
'}\n' +
'["logged_in", "logged_out", "signed_up", "updated_user"].forEach(bindAuthEvent);\n' +
'\n' +
'function bindBrowserEvent(eventName) {\n' +
' browser.on(eventName, function(event, meta) {\n' +
' self.emit(eventName, event, meta);\n' +
' });\n' +
'}\n' +
'["navigating"].forEach(bindBrowserEvent);')
# Issue #892 - new line between chained methods
bt(
'foo\n' +
' .who()\n' +
'\n' +
' .knows()\n' +
' // comment\n' +
' .nothing() // comment\n' +
'\n' +
' .more()')
# Issue #1107 - Missing space between words for label
bt(
'function f(a) {c: do if (x) {} else if (y) {} while(0); return 0;}',
# -- output --
'function f(a) {\n' +
' c: do\n' +
' if (x) {} else if (y) {}\n' +
' while (0);\n' +
' return 0;\n' +
'}')
bt(
'function f(a) {c: if (x) {} else if (y) {} return 0;}',
# -- output --
'function f(a) {\n' +
' c: if (x) {} else if (y) {}\n' +
' return 0;\n' +
'}')
#============================================================
# Test non-positionable-ops
self.reset_options()
bt('a += 2;')
bt('a -= 2;')
bt('a *= 2;')
bt('a /= 2;')
bt('a %= 2;')
bt('a &= 2;')
bt('a ^= 2;')
bt('a |= 2;')
bt('a **= 2;')
bt('a <<= 2;')
bt('a >>= 2;')
#============================================================
#
self.reset_options()
# exponent literals
bt('a = 1e10')
bt('a = 1.3e10')
bt('a = 1.3e-10')
bt('a = -12345.3e-10')
bt('a = .12345e-10')
bt('a = 06789e-10')
bt('a = e - 10')
bt('a = 1.3e+10')
bt('a = 1.e-7')
bt('a = -12345.3e+10')
bt('a = .12345e+10')
bt('a = 06789e+10')
bt('a = e + 10')
bt('a=0e-12345.3e-10', 'a = 0e-12345 .3e-10')
bt('a=0.e-12345.3e-10', 'a = 0.e-12345 .3e-10')
bt('a=0x.e-12345.3e-10', 'a = 0x.e - 12345.3e-10')
bt('a=0x0.e-12345.3e-10', 'a = 0x0.e - 12345.3e-10')
bt('a=0x0.0e-12345.3e-10', 'a = 0x0 .0e-12345 .3e-10')
bt('a=0g-12345.3e-10', 'a = 0 g - 12345.3e-10')
bt('a=0.g-12345.3e-10', 'a = 0. g - 12345.3e-10')
bt('a=0x.g-12345.3e-10', 'a = 0x.g - 12345.3e-10')
bt('a=0x0.g-12345.3e-10', 'a = 0x0.g - 12345.3e-10')
bt('a=0x0.0g-12345.3e-10', 'a = 0x0 .0 g - 12345.3e-10')
# Decimal literals
bt('a = 0123456789;')
bt('a = 9876543210;')
bt('a = 5647308291;')
bt('a=030e-5', 'a = 030e-5')
bt('a=00+4', 'a = 00 + 4')
bt('a=32+4', 'a = 32 + 4')
bt('a=0.6g+4', 'a = 0.6 g + 4')
bt('a=01.10', 'a = 01.10')
bt('a=a.10', 'a = a .10')
bt('a=00B0x0', 'a = 00 B0x0')
bt('a=00B0xb0', 'a = 00 B0xb0')
bt('a=00B0x0b0', 'a = 00 B0x0b0')
bt('a=0090x0', 'a = 0090 x0')
bt('a=0g0b0o0', 'a = 0 g0b0o0')
# Hexadecimal literals
bt('a = 0x0123456789abcdef;')
bt('a = 0X0123456789ABCDEF;')
bt('a = 0xFeDcBa9876543210;')
bt('a=0x30e-5', 'a = 0x30e - 5')
bt('a=0xF0+4', 'a = 0xF0 + 4')
bt('a=0Xff+4', 'a = 0Xff + 4')
bt('a=0Xffg+4', 'a = 0Xff g + 4')
bt('a=0x01.10', 'a = 0x01 .10')
bt('a = 0xb0ce;')
bt('a = 0x0b0;')
bt('a=0x0B0x0', 'a = 0x0B0 x0')
bt('a=0x0B0xb0', 'a = 0x0B0 xb0')
bt('a=0x0B0x0b0', 'a = 0x0B0 x0b0')
bt('a=0X090x0', 'a = 0X090 x0')
bt('a=0Xg0b0o0', 'a = 0X g0b0o0')
# Octal literals
bt('a = 0o01234567;')
bt('a = 0O01234567;')
bt('a = 0o34120675;')
bt('a=0o30e-5', 'a = 0o30 e - 5')
bt('a=0o70+4', 'a = 0o70 + 4')
bt('a=0O77+4', 'a = 0O77 + 4')
bt('a=0O778+4', 'a = 0O77 8 + 4')
bt('a=0O77a+4', 'a = 0O77 a + 4')
bt('a=0o01.10', 'a = 0o01 .10')
bt('a=0o0B0x0', 'a = 0o0 B0x0')
bt('a=0o0B0xb0', 'a = 0o0 B0xb0')
bt('a=0o0B0x0b0', 'a = 0o0 B0x0b0')
bt('a=0O090x0', 'a = 0O0 90 x0')
bt('a=0Og0b0o0', 'a = 0O g0b0o0')
# Binary literals
bt('a = 0b010011;')
bt('a = 0B010011;')
bt('a = 0b01001100001111;')
bt('a=0b10e-5', 'a = 0b10 e - 5')
bt('a=0b10+4', 'a = 0b10 + 4')
bt('a=0B11+4', 'a = 0B11 + 4')
bt('a=0B112+4', 'a = 0B11 2 + 4')
bt('a=0B11a+4', 'a = 0B11 a + 4')
bt('a=0b01.10', 'a = 0b01 .10')
bt('a=0b0B0x0', 'a = 0b0 B0x0')
bt('a=0b0B0xb0', 'a = 0b0 B0xb0')
bt('a=0b0B0x0b0', 'a = 0b0 B0x0b0')
bt('a=0B090x0', 'a = 0B0 90 x0')
bt('a=0Bg0b0o0', 'a = 0B g0b0o0')
# BigInt literals
bt('a = 1n;')
bt('a = 1234567890123456789n;')
bt('a = -1234567890123456789n;')
bt('a = 1234567890123456789 N;')
bt('a=0b10e-5n', 'a = 0b10 e - 5n')
bt('a=.0n', 'a = .0 n')
bt('a=1.0n', 'a = 1.0 n')
bt('a=1e0n', 'a = 1e0 n')
bt('a=0n11a+4', 'a = 0n 11 a + 4')
#============================================================
# brace_style ,preserve-inline tests - (brace_style = ""collapse,preserve-inline"")
self.reset_options()
self.options.brace_style = 'collapse,preserve-inline'
bt('import { asdf } from "asdf";')
bt('import { get } from "asdf";')
bt('function inLine() { console.log("oh em gee"); }')
bt('if (cancer) { console.log("Im sorry but you only have so long to live..."); }')
bt('if (ding) { console.log("dong"); } else { console.log("dang"); }')
bt(
'function kindaComplex() {\n' +
' var a = 2;\n' +
' var obj = {};\n' +
' var obj2 = { a: "a", b: "b" };\n' +
' var obj3 = {\n' +
' c: "c",\n' +
' d: "d",\n' +
' e: "e"\n' +
' };\n' +
'}')
bt(
'function complex() {\n' +
' console.log("wowe");\n' +
' (function() { var a = 2; var b = 3; })();\n' +
' $.each(arr, function(el, idx) { return el; });\n' +
' var obj = {\n' +
' a: function() { console.log("test"); },\n' +
' b() {\n' +
' console.log("test2");\n' +
' }\n' +
' };\n' +
'}',
# -- output --
'function complex() {\n' +
' console.log("wowe");\n' +
' (function() { var a = 2; var b = 3; })();\n' +
' $.each(arr, function(el, idx) { return el; });\n' +
' var obj = {\n' +
' a: function() { console.log("test"); },\n' +
' b() {\n' +
' console.log("test2");\n' +
' }\n' +
' };\n' +
'}')
# brace_style ,preserve-inline tests - (brace_style = ""expand,preserve-inline"")
self.reset_options()
self.options.brace_style = 'expand,preserve-inline'
bt('import { asdf } from "asdf";')
bt('import { get } from "asdf";')
bt('function inLine() { console.log("oh em gee"); }')
bt('if (cancer) { console.log("Im sorry but you only have so long to live..."); }')
bt(
'if (ding) { console.log("dong"); } else { console.log("dang"); }',
# -- output --
'if (ding) { console.log("dong"); }\n' +
'else { console.log("dang"); }')
bt(
'function kindaComplex() {\n' +
' var a = 2;\n' +
' var obj = {};\n' +
' var obj2 = { a: "a", b: "b" };\n' +
' var obj3 = {\n' +
' c: "c",\n' +
' d: "d",\n' +
' e: "e"\n' +
' };\n' +
'}',
# -- output --
'function kindaComplex()\n' +
'{\n' +
' var a = 2;\n' +
' var obj = {};\n' +
' var obj2 = { a: "a", b: "b" };\n' +
' var obj3 = {\n' +
' c: "c",\n' +
' d: "d",\n' +
' e: "e"\n' +
' };\n' +
'}')
bt(
'function complex() {\n' +
' console.log("wowe");\n' +
' (function() { var a = 2; var b = 3; })();\n' +
' $.each(arr, function(el, idx) { return el; });\n' +
' var obj = {\n' +
' a: function() { console.log("test"); },\n' +
' b() {\n' +
' console.log("test2");\n' +
' }\n' +
' };\n' +
'}',
# -- output --
'function complex()\n' +
'{\n' +
' console.log("wowe");\n' +
' (function() { var a = 2; var b = 3; })();\n' +
' $.each(arr, function(el, idx) { return el; });\n' +
' var obj = {\n' +
' a: function() { console.log("test"); },\n' +
' b()\n' +
' {\n' +
' console.log("test2");\n' +
' }\n' +
' };\n' +
'}')
# brace_style ,preserve-inline tests - (brace_style = ""end-expand,preserve-inline"")
self.reset_options()
self.options.brace_style = 'end-expand,preserve-inline'
bt('import { asdf } from "asdf";')
bt('import { get } from "asdf";')
bt('function inLine() { console.log("oh em gee"); }')
bt('if (cancer) { console.log("Im sorry but you only have so long to live..."); }')
bt(
'if (ding) { console.log("dong"); } else { console.log("dang"); }',
# -- output --
'if (ding) { console.log("dong"); }\n' +
'else { console.log("dang"); }')
bt(
'function kindaComplex() {\n' +
' var a = 2;\n' +
' var obj = {};\n' +
' var obj2 = { a: "a", b: "b" };\n' +
' var obj3 = {\n' +
' c: "c",\n' +
' d: "d",\n' +
' e: "e"\n' +
' };\n' +
'}')
bt(
'function complex() {\n' +
' console.log("wowe");\n' +
' (function() { var a = 2; var b = 3; })();\n' +
' $.each(arr, function(el, idx) { return el; });\n' +
' var obj = {\n' +
' a: function() { console.log("test"); },\n' +
' b() {\n' +
' console.log("test2");\n' +
' }\n' +
' };\n' +
'}',
# -- output --
'function complex() {\n' +
' console.log("wowe");\n' +
' (function() { var a = 2; var b = 3; })();\n' +
' $.each(arr, function(el, idx) { return el; });\n' +
' var obj = {\n' +
' a: function() { console.log("test"); },\n' +
' b() {\n' +
' console.log("test2");\n' +
' }\n' +
' };\n' +
'}')
# brace_style ,preserve-inline tests - (brace_style = ""none,preserve-inline"")
self.reset_options()
self.options.brace_style = 'none,preserve-inline'
bt('import { asdf } from "asdf";')
bt('import { get } from "asdf";')
bt('function inLine() { console.log("oh em gee"); }')
bt('if (cancer) { console.log("Im sorry but you only have so long to live..."); }')
bt('if (ding) { console.log("dong"); } else { console.log("dang"); }')
bt(
'function kindaComplex() {\n' +
' var a = 2;\n' +
' var obj = {};\n' +
' var obj2 = { a: "a", b: "b" };\n' +
' var obj3 = {\n' +
' c: "c",\n' +
' d: "d",\n' +
' e: "e"\n' +
' };\n' +
'}')
bt(
'function complex() {\n' +
' console.log("wowe");\n' +
' (function() { var a = 2; var b = 3; })();\n' +
' $.each(arr, function(el, idx) { return el; });\n' +
' var obj = {\n' +
' a: function() { console.log("test"); },\n' +
' b() {\n' +
' console.log("test2");\n' +
' }\n' +
' };\n' +
'}',
# -- output --
'function complex() {\n' +
' console.log("wowe");\n' +
' (function() { var a = 2; var b = 3; })();\n' +
' $.each(arr, function(el, idx) { return el; });\n' +
' var obj = {\n' +
' a: function() { console.log("test"); },\n' +
' b() {\n' +
' console.log("test2");\n' +
' }\n' +
' };\n' +
'}')
# brace_style ,preserve-inline tests - (brace_style = ""collapse-preserve-inline"")
self.reset_options()
self.options.brace_style = 'collapse-preserve-inline'
bt('import { asdf } from "asdf";')
bt('import { get } from "asdf";')
bt('function inLine() { console.log("oh em gee"); }')
bt('if (cancer) { console.log("Im sorry but you only have so long to live..."); }')
bt('if (ding) { console.log("dong"); } else { console.log("dang"); }')
bt(
'function kindaComplex() {\n' +
' var a = 2;\n' +
' var obj = {};\n' +
' var obj2 = { a: "a", b: "b" };\n' +
' var obj3 = {\n' +
' c: "c",\n' +
' d: "d",\n' +
' e: "e"\n' +
' };\n' +
'}')
bt(
'function complex() {\n' +
' console.log("wowe");\n' +
' (function() { var a = 2; var b = 3; })();\n' +
' $.each(arr, function(el, idx) { return el; });\n' +
' var obj = {\n' +
' a: function() { console.log("test"); },\n' +
' b() {\n' +
' console.log("test2");\n' +
' }\n' +
' };\n' +
'}',
# -- output --
'function complex() {\n' +
' console.log("wowe");\n' +
' (function() { var a = 2; var b = 3; })();\n' +
' $.each(arr, function(el, idx) { return el; });\n' +
' var obj = {\n' +
' a: function() { console.log("test"); },\n' +
' b() {\n' +
' console.log("test2");\n' +
' }\n' +
' };\n' +
'}')
#============================================================
# Destructured and related
self.reset_options()
self.options.brace_style = 'collapse,preserve-inline'
# Issue 382 - import destructured
bt(
'module "Even" {\n' +
' import { odd, oddly } from "Odd";\n' +
'}')
bt(
'import defaultMember from "module-name";\n' +
'import * as name from "module-name";\n' +
'import { member } from "module-name";\n' +
'import { member as alias } from "module-name";\n' +
'import { member1, member2 } from "module-name";\n' +
'import { member1, member2 as alias2 } from "module-name";\n' +
'import defaultMember, { member, member2 } from "module-name";\n' +
'import defaultMember, * as name from "module-name";\n' +
'import "module-name";\n' +
'import("module-name")')
# Issue #1393 - dynamic import()
bt(
'if (from < to) {\n' +
' import(`dynamic${library}`);\n' +
'} else {\n' +
' import("otherdynamic");\n' +
'}')
# Issue #1197 - dynamic import() arrow syntax
bt('frontend = Async(() => import("../frontend").then(m => m.default ))', 'frontend = Async(() => import("../frontend").then(m => m.default))')
# Issue 858 - from is a keyword only after import
bt(
'if (from < to) {\n' +
' from++;\n' +
'} else {\n' +
' from--;\n' +
'}')
# Issue 511 - destrutured
bt(
'var { b, c } = require("../stores");\n' +
'var { ProjectStore } = require("../stores");\n' +
'\n' +
'function takeThing({ prop }) {\n' +
' console.log("inner prop", prop)\n' +
'}')
# Issue 315 - Short objects
bt('var a = { b: { c: { d: e } } };')
bt(
'var a = {\n' +
' b: {\n' +
' c: { d: e }\n' +
' c3: { d: e }\n' +
' },\n' +
' b2: { c: { d: e } }\n' +
'};')
# Issue 370 - Short objects in array
bt(
'var methods = [\n' +
' { name: "to" },\n' +
' { name: "step" },\n' +
' { name: "move" },\n' +
' { name: "min" },\n' +
' { name: "max" }\n' +
'];')
# Issue 838 - Short objects in array
bt(
'function(url, callback) {\n' +
' var script = document.createElement("script")\n' +
' if (true) script.onreadystatechange = function() {\n' +
' foo();\n' +
' }\n' +
' else script.onload = callback;\n' +
'}')
# Issue 578 - Odd indenting after function
bt(
'function bindAuthEvent(eventName) {\n' +
' self.auth.on(eventName, function(event, meta) {\n' +
' self.emit(eventName, event, meta);\n' +
' });\n' +
'}\n' +
'["logged_in", "logged_out", "signed_up", "updated_user"].forEach(bindAuthEvent);')
# Issue #487 - some short expressions examples
bt(
'if (a == 1) { a++; }\n' +
'a = { a: a };\n' +
'UserDB.findOne({ username: "xyz" }, function(err, user) {});\n' +
'import { fs } from "fs";')
# Issue #982 - Fixed return expression collapse-preserve-inline
bt(
'function foo(arg) {\n' +
' if (!arg) { a(); }\n' +
' if (!arg) { return false; }\n' +
' if (!arg) { throw "inline"; }\n' +
' return true;\n' +
'}')
# Issue #338 - Short expressions
bt(
'if (someCondition) { return something; }\n' +
'if (someCondition) {\n' +
' return something;\n' +
'}\n' +
'if (someCondition) { break; }\n' +
'if (someCondition) {\n' +
' return something;\n' +
'}')
# Issue #1283 - Javascript ++ Operator get wrong indent
bt(
'{this.foo++\n' +
'bar}',
# -- output --
'{\n' +
' this.foo++\n' +
' bar\n' +
'}')
# Issue #1283 - Javascript ++ Operator get wrong indent (2)
bt(
'axios.interceptors.request.use(\n' +
' config => {\n' +
' // loading\n' +
' window.store.loading++\n' +
' let extraParams = {}\n' +
' }\n' +
')')
#============================================================
# keep_array_indentation false
self.reset_options()
self.options.keep_array_indentation = false
bt(
'a = ["a", "b", "c",\n' +
' "d", "e", "f"]',
# -- output --
'a = ["a", "b", "c",\n' +
' "d", "e", "f"\n' +
']')
bt(
'a = ["a", "b", "c",\n' +
' "d", "e", "f",\n' +
' "g", "h", "i"]',
# -- output --
'a = ["a", "b", "c",\n' +
' "d", "e", "f",\n' +
' "g", "h", "i"\n' +
']')
bt(
'a = ["a", "b", "c",\n' +
' "d", "e", "f",\n' +
' "g", "h", "i"]',
# -- output --
'a = ["a", "b", "c",\n' +
' "d", "e", "f",\n' +
' "g", "h", "i"\n' +
']')
bt(
'var x = [{}\n' +
']',
# -- output --
'var x = [{}]')
bt(
'var x = [{foo:bar}\n' +
']',
# -- output --
'var x = [{\n' +
' foo: bar\n' +
'}]')
bt(
'a = ["something",\n' +
' "completely",\n' +
' "different"];\n' +
'if (x);',
# -- output --
'a = ["something",\n' +
' "completely",\n' +
' "different"\n' +
'];\n' +
'if (x);')
bt('a = ["a","b","c"]', 'a = ["a", "b", "c"]')
bt('a = ["a", "b","c"]', 'a = ["a", "b", "c"]')
bt(
'x = [{"a":0}]',
# -- output --
'x = [{\n' +
' "a": 0\n' +
'}]')
bt(
'{a([[a1]], {b;});}',
# -- output --
'{\n' +
' a([\n' +
' [a1]\n' +
' ], {\n' +
' b;\n' +
' });\n' +
'}')
bt(
'a ();\n' +
' [\n' +
' ["sdfsdfsd"],\n' +
' ["sdfsdfsdf"]\n' +
' ].toString();',
# -- output --
'a();\n' +
'[\n' +
' ["sdfsdfsd"],\n' +
' ["sdfsdfsdf"]\n' +
'].toString();')
bt(
'a ();\n' +
'a = [\n' +
' ["sdfsdfsd"],\n' +
' ["sdfsdfsdf"]\n' +
' ].toString();',
# -- output --
'a();\n' +
'a = [\n' +
' ["sdfsdfsd"],\n' +
' ["sdfsdfsdf"]\n' +
'].toString();')
bt(
'function() {\n' +
' Foo([\n' +
' ["sdfsdfsd"],\n' +
' ["sdfsdfsdf"]\n' +
' ]);\n' +
'}',
# -- output --
'function() {\n' +
' Foo([\n' +
' ["sdfsdfsd"],\n' +
' ["sdfsdfsdf"]\n' +
' ]);\n' +
'}')
bt(
'function foo() {\n' +
' return [\n' +
' "one",\n' +
' "two"\n' +
' ];\n' +
'}',
# -- output --
'function foo() {\n' +
' return [\n' +
' "one",\n' +
' "two"\n' +
' ];\n' +
'}')
bt(
'function foo() {\n' +
' return [\n' +
' {\n' +
' one: "x",\n' +
' two: [\n' +
' {\n' +
' id: "a",\n' +
' name: "apple"\n' +
' }, {\n' +
' id: "b",\n' +
' name: "banana"\n' +
' }\n' +
' ]\n' +
' }\n' +
' ];\n' +
'}',
# -- output --
'function foo() {\n' +
' return [{\n' +
' one: "x",\n' +
' two: [{\n' +
' id: "a",\n' +
' name: "apple"\n' +
' }, {\n' +
' id: "b",\n' +
' name: "banana"\n' +
' }]\n' +
' }];\n' +
'}')
bt(
'function foo() {\n' +
' return [\n' +
' {\n' +
' one: "x",\n' +
' two: [\n' +
' {\n' +
' id: "a",\n' +
' name: "apple"\n' +
' }, {\n' +
' id: "b",\n' +
' name: "banana"\n' +
' }\n' +
' ]\n' +
' }\n' +
' ];\n' +
'}',
# -- output --
'function foo() {\n' +
' return [{\n' +
' one: "x",\n' +
' two: [{\n' +
' id: "a",\n' +
' name: "apple"\n' +
' }, {\n' +
' id: "b",\n' +
' name: "banana"\n' +
' }]\n' +
' }];\n' +
'}')
#============================================================
# keep_array_indentation true
self.reset_options()
self.options.keep_array_indentation = true
bt(
'a = ["a", "b", "c",\n' +
' "d", "e", "f"]',
# -- output --
'a = ["a", "b", "c",\n' +
' "d", "e", "f"]')
bt(
'a = ["a", "b", "c",\n' +
' "d", "e", "f",\n' +
' "g", "h", "i"]',
# -- output --
'a = ["a", "b", "c",\n' +
' "d", "e", "f",\n' +
' "g", "h", "i"]')
bt(
'a = ["a", "b", "c",\n' +
' "d", "e", "f",\n' +
' "g", "h", "i"]',
# -- output --
'a = ["a", "b", "c",\n' +
' "d", "e", "f",\n' +
' "g", "h", "i"]')
bt(
'var x = [{}\n' +
']',
# -- output --
'var x = [{}\n' +
']')
bt(
'var x = [{foo:bar}\n' +
']',
# -- output --
'var x = [{\n' +
' foo: bar\n' +
' }\n' +
']')
bt(
'a = ["something",\n' +
' "completely",\n' +
' "different"];\n' +
'if (x);',
# -- output --
'a = ["something",\n' +
' "completely",\n' +
' "different"];\n' +
'if (x);')
bt('a = ["a","b","c"]', 'a = ["a", "b", "c"]')
bt('a = ["a", "b","c"]', 'a = ["a", "b", "c"]')
bt(
'x = [{"a":0}]',
# -- output --
'x = [{\n' +
' "a": 0\n' +
'}]')
bt(
'{a([[a1]], {b;});}',
# -- output --
'{\n' +
' a([[a1]], {\n' +
' b;\n' +
' });\n' +
'}')
bt(
'a ();\n' +
' [\n' +
' ["sdfsdfsd"],\n' +
' ["sdfsdfsdf"]\n' +
' ].toString();',
# -- output --
'a();\n' +
' [\n' +
' ["sdfsdfsd"],\n' +
' ["sdfsdfsdf"]\n' +
' ].toString();')
bt(
'a ();\n' +
'a = [\n' +
' ["sdfsdfsd"],\n' +
' ["sdfsdfsdf"]\n' +
' ].toString();',
# -- output --
'a();\n' +
'a = [\n' +
' ["sdfsdfsd"],\n' +
' ["sdfsdfsdf"]\n' +
' ].toString();')
bt(
'function() {\n' +
' Foo([\n' +
' ["sdfsdfsd"],\n' +
' ["sdfsdfsdf"]\n' +
' ]);\n' +
'}',
# -- output --
'function() {\n' +
' Foo([\n' +
' ["sdfsdfsd"],\n' +
' ["sdfsdfsdf"]\n' +
' ]);\n' +
'}')
bt(
'function foo() {\n' +
' return [\n' +
' "one",\n' +
' "two"\n' +
' ];\n' +
'}',
# -- output --
'function foo() {\n' +
' return [\n' +
' "one",\n' +
' "two"\n' +
' ];\n' +
'}')
bt(
'function foo() {\n' +
' return [\n' +
' {\n' +
' one: "x",\n' +
' two: [\n' +
' {\n' +
' id: "a",\n' +
' name: "apple"\n' +
' }, {\n' +
' id: "b",\n' +
' name: "banana"\n' +
' }\n' +
' ]\n' +
' }\n' +
' ];\n' +
'}',
# -- output --
'function foo() {\n' +
' return [\n' +
' {\n' +
' one: "x",\n' +
' two: [\n' +
' {\n' +
' id: "a",\n' +
' name: "apple"\n' +
' }, {\n' +
' id: "b",\n' +
' name: "banana"\n' +
' }\n' +
' ]\n' +
' }\n' +
' ];\n' +
'}')
#============================================================
# Old tests
self.reset_options()
bt('')
test_fragment(' return .5')
test_fragment(
' return .5;\n' +
' a();')
test_fragment(
' return .5;\n' +
' a();')
test_fragment(
' return .5;\n' +
' a();')
test_fragment(' < div')
bt('a = 1', 'a = 1')
bt('a=1', 'a = 1')
bt('(3) / 2')
bt('["a", "b"].join("")')
bt(
'a();\n' +
'\n' +
'b();')
bt(
'var a = 1 var b = 2',
# -- output --
'var a = 1\n' +
'var b = 2')
bt(
'var a=1, b=c[d], e=6;',
# -- output --
'var a = 1,\n' +
' b = c[d],\n' +
' e = 6;')
bt(
'var a,\n' +
' b,\n' +
' c;')
bt(
'let a = 1 let b = 2',
# -- output --
'let a = 1\n' +
'let b = 2')
bt(
'let a=1, b=c[d], e=6;',
# -- output --
'let a = 1,\n' +
' b = c[d],\n' +
' e = 6;')
bt(
'let a,\n' +
' b,\n' +
' c;')
bt(
'const a = 1 const b = 2',
# -- output --
'const a = 1\n' +
'const b = 2')
bt(
'const a=1, b=c[d], e=6;',
# -- output --
'const a = 1,\n' +
' b = c[d],\n' +
' e = 6;')
bt(
'const a,\n' +
' b,\n' +
' c;')
bt('a = " 12345 "')
bt('a = \' 12345 \'')
bt('if (a == 1) b = 2;')
bt(
'if(1){2}else{3}',
# -- output --
'if (1) {\n' +
' 2\n' +
'} else {\n' +
' 3\n' +
'}')
bt('if(1||2);', 'if (1 || 2);')
bt('(a==1)||(b==2)', '(a == 1) || (b == 2)')
bt(
'var a = 1 if (2) 3;',
# -- output --
'var a = 1\n' +
'if (2) 3;')
bt('a = a + 1')
bt('a = a == 1')
bt('/12345[^678]*9+/.match(a)')
bt('a /= 5')
bt('a = 0.5 * 3')
bt('a *= 10.55')
bt('a < .5')
bt('a <= .5')
bt('a<.5', 'a < .5')
bt('a<=.5', 'a <= .5')
bt('a = [1, 2, 3, 4]')
bt('F*(g/=f)*g+b', 'F * (g /= f) * g + b')
bt(
'a.b({c:d})',
# -- output --
'a.b({\n' +
' c: d\n' +
'})')
bt(
'a.b\n' +
'(\n' +
'{\n' +
'c:\n' +
'd\n' +
'}\n' +
')',
# -- output --
'a.b({\n' +
' c: d\n' +
'})')
bt(
'a.b({c:"d"})',
# -- output --
'a.b({\n' +
' c: "d"\n' +
'})')
bt(
'a.b\n' +
'(\n' +
'{\n' +
'c:\n' +
'"d"\n' +
'}\n' +
')',
# -- output --
'a.b({\n' +
' c: "d"\n' +
'})')
bt('a=!b', 'a = !b')
bt('a=!!b', 'a = !!b')
bt('a?b:c', 'a ? b : c')
bt('a?1:2', 'a ? 1 : 2')
bt('a?(b):c', 'a ? (b) : c')
bt(
'x={a:1,b:w=="foo"?x:y,c:z}',
# -- output --
'x = {\n' +
' a: 1,\n' +
' b: w == "foo" ? x : y,\n' +
' c: z\n' +
'}')
bt('x=a?b?c?d:e:f:g;', 'x = a ? b ? c ? d : e : f : g;')
bt(
'x=a?b?c?d:{e1:1,e2:2}:f:g;',
# -- output --
'x = a ? b ? c ? d : {\n' +
' e1: 1,\n' +
' e2: 2\n' +
'} : f : g;')
bt('function void(void) {}')
bt('if(!a)foo();', 'if (!a) foo();')
bt('a=~a', 'a = ~a')
bt(
'a;/*comment*/b;',
# -- output --
'a; /*comment*/\n' +
'b;')
bt(
'a;/* comment */b;',
# -- output --
'a; /* comment */\n' +
'b;')
# simple comments don't get touched at all
test_fragment(
'a;/*\n' +
'comment\n' +
'*/b;',
# -- output --
'a;\n' +
'/*\n' +
'comment\n' +
'*/\n' +
'b;')
bt(
'a;/**\n' +
'* javadoc\n' +
'*/b;',
# -- output --
'a;\n' +
'/**\n' +
' * javadoc\n' +
' */\n' +
'b;')
test_fragment(
'a;/**\n' +
'\n' +
'no javadoc\n' +
'*/b;',
# -- output --
'a;\n' +
'/**\n' +
'\n' +
'no javadoc\n' +
'*/\n' +
'b;')
# comment blocks detected and reindented even w/o javadoc starter
bt(
'a;/*\n' +
'* javadoc\n' +
'*/b;',
# -- output --
'a;\n' +
'/*\n' +
' * javadoc\n' +
' */\n' +
'b;')
bt('if(a)break;', 'if (a) break;')
bt(
'if(a){break}',
# -- output --
'if (a) {\n' +
' break\n' +
'}')
bt('if((a))foo();', 'if ((a)) foo();')
bt('for(var i=0;;) a', 'for (var i = 0;;) a')
bt(
'for(var i=0;;)\n' +
'a',
# -- output --
'for (var i = 0;;)\n' +
' a')
bt('a++;')
bt('for(;;i++)a()', 'for (;; i++) a()')
bt(
'for(;;i++)\n' +
'a()',
# -- output --
'for (;; i++)\n' +
' a()')
bt('for(;;++i)a', 'for (;; ++i) a')
bt('return(1)', 'return (1)')
bt(
'try{a();}catch(b){c();}finally{d();}',
# -- output --
'try {\n' +
' a();\n' +
'} catch (b) {\n' +
' c();\n' +
'} finally {\n' +
' d();\n' +
'}')
# magic function call
bt('(xx)()')
# another magic function call
bt('a[1]()')
bt(
'if(a){b();}else if(c) foo();',
# -- output --
'if (a) {\n' +
' b();\n' +
'} else if (c) foo();')
bt(
'switch(x) {case 0: case 1: a(); break; default: break}',
# -- output --
'switch (x) {\n' +
' case 0:\n' +
' case 1:\n' +
' a();\n' +
' break;\n' +
' default:\n' +
' break\n' +
'}')
bt(
'switch(x) {default: case 1: a(); break; case 0: break}',
# -- output --
'switch (x) {\n' +
' default:\n' +
' case 1:\n' +
' a();\n' +
' break;\n' +
' case 0:\n' +
' break\n' +
'}')
bt(
'switch(x){case -1:break;case !y:break;}',
# -- output --
'switch (x) {\n' +
' case -1:\n' +
' break;\n' +
' case !y:\n' +
' break;\n' +
'}')
bt('a !== b')
bt(
'if (a) b(); else c();',
# -- output --
'if (a) b();\n' +
'else c();')
# typical greasemonkey start
bt(
'// comment\n' +
'(function something() {})')
# duplicating newlines
bt(
'{\n' +
'\n' +
' x();\n' +
'\n' +
'}')
bt('if (a in b) foo();')
bt('if (a of b) foo();')
bt('if (a of [1, 2, 3]) foo();')
bt(
'if(X)if(Y)a();else b();else c();',
# -- output --
'if (X)\n' +
' if (Y) a();\n' +
' else b();\n' +
'else c();')
bt(
'if (foo) bar();\n' +
'else break')
bt('var a, b;')
bt('var a = new function();')
test_fragment('new function')
bt('var a, b')
bt(
'{a:1, b:2}',
# -- output --
'{\n' +
' a: 1,\n' +
' b: 2\n' +
'}')
bt(
'a={1:[-1],2:[+1]}',
# -- output --
'a = {\n' +
' 1: [-1],\n' +
' 2: [+1]\n' +
'}')
bt(
'var l = {\'a\':\'1\', \'b\':\'2\'}',
# -- output --
'var l = {\n' +
' \'a\': \'1\',\n' +
' \'b\': \'2\'\n' +
'}')
bt('if (template.user[n] in bk) foo();')
bt('return 45')
bt(
'return this.prevObject ||\n' +
'\n' +
' this.constructor(null);')
bt('If[1]')
bt('Then[1]')
bt('a = 1;// comment', 'a = 1; // comment')
bt('a = 1; // comment')
bt(
'a = 1;\n' +
' // comment',
# -- output --
'a = 1;\n' +
'// comment')
bt('a = [-1, -1, -1]')
bt(
'// a\n' +
'// b\n' +
'\n' +
'\n' +
'\n' +
'// c\n' +
'// d')
bt(
'// func-comment\n' +
'\n' +
'function foo() {}\n' +
'\n' +
'// end-func-comment')
# The exact formatting these should have is open for discussion, but they are at least reasonable
bt(
'a = [ // comment\n' +
' -1, -1, -1\n' +
']')
bt(
'var a = [ // comment\n' +
' -1, -1, -1\n' +
']')
bt(
'a = [ // comment\n' +
' -1, // comment\n' +
' -1, -1\n' +
']')
bt(
'var a = [ // comment\n' +
' -1, // comment\n' +
' -1, -1\n' +
']')
bt(
'o = [{a:b},{c:d}]',
# -- output --
'o = [{\n' +
' a: b\n' +
'}, {\n' +
' c: d\n' +
'}]')
# was: extra space appended
bt(
'if (a) {\n' +
' do();\n' +
'}')
# if/else statement with empty body
bt(
'if (a) {\n' +
'// comment\n' +
'}else{\n' +
'// comment\n' +
'}',
# -- output --
'if (a) {\n' +
' // comment\n' +
'} else {\n' +
' // comment\n' +
'}')
# multiple comments indentation
bt(
'if (a) {\n' +
'// comment\n' +
'// comment\n' +
'}',
# -- output --
'if (a) {\n' +
' // comment\n' +
' // comment\n' +
'}')
bt(
'if (a) b() else c();',
# -- output --
'if (a) b()\n' +
'else c();')
bt(
'if (a) b() else if c() d();',
# -- output --
'if (a) b()\n' +
'else if c() d();')
bt('{}')
bt(
'{\n' +
'\n' +
'}')
bt(
'do { a(); } while ( 1 );',
# -- output --
'do {\n' +
' a();\n' +
'} while (1);')
bt('do {} while (1);')
bt(
'do {\n' +
'} while (1);',
# -- output --
'do {} while (1);')
bt(
'do {\n' +
'\n' +
'} while (1);')
bt('var a = x(a, b, c)')
bt(
'delete x if (a) b();',
# -- output --
'delete x\n' +
'if (a) b();')
bt(
'delete x[x] if (a) b();',
# -- output --
'delete x[x]\n' +
'if (a) b();')
bt('for(var a=1,b=2)d', 'for (var a = 1, b = 2) d')
bt('for(var a=1,b=2,c=3) d', 'for (var a = 1, b = 2, c = 3) d')
bt(
'for(var a=1,b=2,c=3;d<3;d++)\n' +
'e',
# -- output --
'for (var a = 1, b = 2, c = 3; d < 3; d++)\n' +
' e')
bt(
'function x(){(a||b).c()}',
# -- output --
'function x() {\n' +
' (a || b).c()\n' +
'}')
bt(
'function x(){return - 1}',
# -- output --
'function x() {\n' +
' return -1\n' +
'}')
bt(
'function x(){return ! a}',
# -- output --
'function x() {\n' +
' return !a\n' +
'}')
bt('x => x')
bt('(x) => x')
bt(
'x => { x }',
# -- output --
'x => {\n' +
' x\n' +
'}')
bt(
'(x) => { x }',
# -- output --
'(x) => {\n' +
' x\n' +
'}')
# a common snippet in jQuery plugins
bt(
'settings = $.extend({},defaults,settings);',
# -- output --
'settings = $.extend({}, defaults, settings);')
bt('$http().then().finally().default()')
bt(
'$http()\n' +
'.then()\n' +
'.finally()\n' +
'.default()',
# -- output --
'$http()\n' +
' .then()\n' +
' .finally()\n' +
' .default()')
bt('$http().when.in.new.catch().throw()')
bt(
'$http()\n' +
'.when\n' +
'.in\n' +
'.new\n' +
'.catch()\n' +
'.throw()',
# -- output --
'$http()\n' +
' .when\n' +
' .in\n' +
' .new\n' +
' .catch()\n' +
' .throw()')
bt(
'{xxx;}()',
# -- output --
'{\n' +
' xxx;\n' +
'}()')
bt(
'a = \'a\'\n' +
'b = \'b\'')
bt('a = /reg/exp')
bt('a = /reg/')
bt('/abc/.test()')
bt('/abc/i.test()')
bt(
'{/abc/i.test()}',
# -- output --
'{\n' +
' /abc/i.test()\n' +
'}')
bt('var x=(a)/a;', 'var x = (a) / a;')
bt('x != -1')
bt('for (; s-->0;)t', 'for (; s-- > 0;) t')
bt('for (; s++>0;)u', 'for (; s++ > 0;) u')
bt('a = s++>s--;', 'a = s++ > s--;')
bt('a = s++>--s;', 'a = s++ > --s;')
bt(
'{x=#1=[]}',
# -- output --
'{\n' +
' x = #1=[]\n' +
'}')
bt(
'{a:#1={}}',
# -- output --
'{\n' +
' a: #1={}\n' +
'}')
bt(
'{a:#1#}',
# -- output --
'{\n' +
' a: #1#\n' +
'}')
test_fragment('"incomplete-string')
test_fragment('\'incomplete-string')
test_fragment('/incomplete-regex')
test_fragment('`incomplete-template-string')
test_fragment(
'{a:1},{a:2}',
# -- output --
'{\n' +
' a: 1\n' +
'}, {\n' +
' a: 2\n' +
'}')
test_fragment(
'var ary=[{a:1}, {a:2}];',
# -- output --
'var ary = [{\n' +
' a: 1\n' +
'}, {\n' +
' a: 2\n' +
'}];')
# incomplete
test_fragment(
'{a:#1',
# -- output --
'{\n' +
' a: #1')
# incomplete
test_fragment(
'{a:#',
# -- output --
'{\n' +
' a: #')
# incomplete
test_fragment(
'}}}',
# -- output --
'}\n' +
'}\n' +
'}')
test_fragment(
'<!--\n' +
'void();\n' +
'// -->')
# incomplete regexp
test_fragment('a=/regexp', 'a = /regexp')
bt(
'{a:#1=[],b:#1#,c:#999999#}',
# -- output --
'{\n' +
' a: #1=[],\n' +
' b: #1#,\n' +
' c: #999999#\n' +
'}')
bt(
'do{x()}while(a>1)',
# -- output --
'do {\n' +
' x()\n' +
'} while (a > 1)')
bt(
'x(); /reg/exp.match(something)',
# -- output --
'x();\n' +
'/reg/exp.match(something)')
test_fragment(
'something();(',
# -- output --
'something();\n' +
'(')
test_fragment(
'#!she/bangs, she bangs\n' +
'f=1',
# -- output --
'#!she/bangs, she bangs\n' +
'\n' +
'f = 1')
test_fragment(
'#!she/bangs, she bangs\n' +
'\n' +
'f=1',
# -- output --
'#!she/bangs, she bangs\n' +
'\n' +
'f = 1')
test_fragment(
'#!she/bangs, she bangs\n' +
'\n' +
'/* comment */')
test_fragment(
'#!she/bangs, she bangs\n' +
'\n' +
'\n' +
'/* comment */')
test_fragment('#')
test_fragment('#!')
test_fragment('#include')
test_fragment('#include "settings.jsxinc"')
test_fragment(
'#include "settings.jsxinc"\n' +
'\n' +
'\n' +
'/* comment */')
test_fragment(
'#include "settings.jsxinc"\n' +
'\n' +
'\n' +
'#include "settings.jsxinc"\n' +
'\n' +
'\n' +
'/* comment */')
bt('function namespace::something()')
test_fragment(
'<!--\n' +
'something();\n' +
'-->')
test_fragment(
'<!--\n' +
'if(i<0){bla();}\n' +
'-->',
# -- output --
'<!--\n' +
'if (i < 0) {\n' +
' bla();\n' +
'}\n' +
'-->')
bt(
'{foo();--bar;}',
# -- output --
'{\n' +
' foo();\n' +
' --bar;\n' +
'}')
bt(
'{foo();++bar;}',
# -- output --
'{\n' +
' foo();\n' +
' ++bar;\n' +
'}')
bt(
'{--bar;}',
# -- output --
'{\n' +
' --bar;\n' +
'}')
bt(
'{++bar;}',
# -- output --
'{\n' +
' ++bar;\n' +
'}')
bt('if(true)++a;', 'if (true) ++a;')
bt(
'if(true)\n' +
'++a;',
# -- output --
'if (true)\n' +
' ++a;')
bt('if(true)--a;', 'if (true) --a;')
bt(
'if(true)\n' +
'--a;',
# -- output --
'if (true)\n' +
' --a;')
bt('elem[array]++;')
bt('elem++ * elem[array]++;')
bt('elem-- * -elem[array]++;')
bt('elem-- + elem[array]++;')
bt('elem-- - elem[array]++;')
bt('elem-- - -elem[array]++;')
bt('elem-- - +elem[array]++;')
# Handling of newlines around unary ++ and -- operators
bt(
'{foo\n' +
'++bar;}',
# -- output --
'{\n' +
' foo\n' +
' ++bar;\n' +
'}')
bt(
'{foo++\n' +
'bar;}',
# -- output --
'{\n' +
' foo++\n' +
' bar;\n' +
'}')
# This is invalid, but harder to guard against. Issue #203.
bt(
'{foo\n' +
'++\n' +
'bar;}',
# -- output --
'{\n' +
' foo\n' +
' ++\n' +
' bar;\n' +
'}')
# regexps
bt(
'a(/abc\\/\\/def/);b()',
# -- output --
'a(/abc\\/\\/def/);\n' +
'b()')
bt(
'a(/a[b\\[\\]c]d/);b()',
# -- output --
'a(/a[b\\[\\]c]d/);\n' +
'b()')
# incomplete char class
test_fragment('a(/a[b\\[')
# allow unescaped / in char classes
bt(
'a(/[a/b]/);b()',
# -- output --
'a(/[a/b]/);\n' +
'b()')
bt('typeof /foo\\//;')
bt('throw /foo\\//;')
bt('do /foo\\//;')
bt('return /foo\\//;')
bt(
'switch (a) {\n' +
' case /foo\\//:\n' +
' b\n' +
'}')
bt(
'if (a) /foo\\//\n' +
'else /foo\\//;')
bt('if (foo) /regex/.test();')
bt('for (index in [1, 2, 3]) /^test$/i.test(s)')
bt(
'function foo() {\n' +
' return [\n' +
' "one",\n' +
' "two"\n' +
' ];\n' +
'}')
bt(
'a=[[1,2],[4,5],[7,8]]',
# -- output --
'a = [\n' +
' [1, 2],\n' +
' [4, 5],\n' +
' [7, 8]\n' +
']')
bt(
'a=[[1,2],[4,5],function(){},[7,8]]',
# -- output --
'a = [\n' +
' [1, 2],\n' +
' [4, 5],\n' +
' function() {},\n' +
' [7, 8]\n' +
']')
bt(
'a=[[1,2],[4,5],function(){},function(){},[7,8]]',
# -- output --
'a = [\n' +
' [1, 2],\n' +
' [4, 5],\n' +
' function() {},\n' +
' function() {},\n' +
' [7, 8]\n' +
']')
bt(
'a=[[1,2],[4,5],function(){},[7,8]]',
# -- output --
'a = [\n' +
' [1, 2],\n' +
' [4, 5],\n' +
' function() {},\n' +
' [7, 8]\n' +
']')
bt('a=[b,c,function(){},function(){},d]', 'a = [b, c, function() {}, function() {}, d]')
bt(
'a=[b,c,\n' +
'function(){},function(){},d]',
# -- output --
'a = [b, c,\n' +
' function() {},\n' +
' function() {},\n' +
' d\n' +
']')
bt('a=[a[1],b[4],c[d[7]]]', 'a = [a[1], b[4], c[d[7]]]')
bt('[1,2,[3,4,[5,6],7],8]', '[1, 2, [3, 4, [5, 6], 7], 8]')
bt(
'[[["1","2"],["3","4"]],[["5","6","7"],["8","9","0"]],[["1","2","3"],["4","5","6","7"],["8","9","0"]]]',
# -- output --
'[\n' +
' [\n' +
' ["1", "2"],\n' +
' ["3", "4"]\n' +
' ],\n' +
' [\n' +
' ["5", "6", "7"],\n' +
' ["8", "9", "0"]\n' +
' ],\n' +
' [\n' +
' ["1", "2", "3"],\n' +
' ["4", "5", "6", "7"],\n' +
' ["8", "9", "0"]\n' +
' ]\n' +
']')
bt(
'{[x()[0]];indent;}',
# -- output --
'{\n' +
' [x()[0]];\n' +
' indent;\n' +
'}')
bt(
'/*\n' +
' foo trailing space \n' +
' * bar trailing space \n' +
'**/')
bt(
'{\n' +
' /*\n' +
' foo \n' +
' * bar \n' +
' */\n' +
'}')
bt('return ++i')
bt(
'obj.num++\n' +
'foo()\n' +
'bar()\n' +
'\n' +
'obj.num--\n' +
'foo()\n' +
'bar()')
bt('return !!x')
bt('return !x')
bt('return [1,2]', 'return [1, 2]')
bt('return;')
bt(
'return\n' +
'func')
bt('catch(e)', 'catch (e)')
bt(
'var a=1,b={foo:2,bar:3},{baz:4,wham:5},c=4;',
# -- output --
'var a = 1,\n' +
' b = {\n' +
' foo: 2,\n' +
' bar: 3\n' +
' },\n' +
' {\n' +
' baz: 4,\n' +
' wham: 5\n' +
' }, c = 4;')
bt(
'var a=1,b={foo:2,bar:3},{baz:4,wham:5},\n' +
'c=4;',
# -- output --
'var a = 1,\n' +
' b = {\n' +
' foo: 2,\n' +
' bar: 3\n' +
' },\n' +
' {\n' +
' baz: 4,\n' +
' wham: 5\n' +
' },\n' +
' c = 4;')
# inline comment
bt(
'function x(/*int*/ start, /*string*/ foo)',
# -- output --
'function x( /*int*/ start, /*string*/ foo)')
# javadoc comment
bt(
'/**\n' +
'* foo\n' +
'*/',
# -- output --
'/**\n' +
' * foo\n' +
' */')
bt(
'{\n' +
'/**\n' +
'* foo\n' +
'*/\n' +
'}',
# -- output --
'{\n' +
' /**\n' +
' * foo\n' +
' */\n' +
'}')
# starless block comment
bt(
'/**\n' +
'foo\n' +
'*/')
bt(
'/**\n' +
'foo\n' +
'**/')
bt(
'/**\n' +
'foo\n' +
'bar\n' +
'**/')
bt(
'/**\n' +
'foo\n' +
'\n' +
'bar\n' +
'**/')
bt(
'/**\n' +
'foo\n' +
' bar\n' +
'**/')
bt(
'{\n' +
'/**\n' +
'foo\n' +
'*/\n' +
'}',
# -- output --
'{\n' +
' /**\n' +
' foo\n' +
' */\n' +
'}')
bt(
'{\n' +
'/**\n' +
'foo\n' +
'**/\n' +
'}',
# -- output --
'{\n' +
' /**\n' +
' foo\n' +
' **/\n' +
'}')
bt(
'{\n' +
'/**\n' +
'foo\n' +
'bar\n' +
'**/\n' +
'}',
# -- output --
'{\n' +
' /**\n' +
' foo\n' +
' bar\n' +
' **/\n' +
'}')
bt(
'{\n' +
'/**\n' +
'foo\n' +
'\n' +
'bar\n' +
'**/\n' +
'}',
# -- output --
'{\n' +
' /**\n' +
' foo\n' +
'\n' +
' bar\n' +
' **/\n' +
'}')
bt(
'{\n' +
'/**\n' +
'foo\n' +
' bar\n' +
'**/\n' +
'}',
# -- output --
'{\n' +
' /**\n' +
' foo\n' +
' bar\n' +
' **/\n' +
'}')
bt(
'{\n' +
' /**\n' +
' foo\n' +
'bar\n' +
' **/\n' +
'}')
bt(
'var a,b,c=1,d,e,f=2;',
# -- output --
'var a, b, c = 1,\n' +
' d, e, f = 2;')
bt(
'var a,b,c=[],d,e,f=2;',
# -- output --
'var a, b, c = [],\n' +
' d, e, f = 2;')
bt(
'function() {\n' +
' var a, b, c, d, e = [],\n' +
' f;\n' +
'}')
bt(
'do/regexp/;\n' +
'while(1);',
# -- output --
'do /regexp/;\n' +
'while (1);')
bt(
'var a = a,\n' +
'a;\n' +
'b = {\n' +
'b\n' +
'}',
# -- output --
'var a = a,\n' +
' a;\n' +
'b = {\n' +
' b\n' +
'}')
bt(
'var a = a,\n' +
' /* c */\n' +
' b;')
bt(
'var a = a,\n' +
' // c\n' +
' b;')
# weird element referencing
bt('foo.("bar");')
bt(
'if (a) a()\n' +
'else b()\n' +
'newline()')
bt(
'if (a) a()\n' +
'newline()')
bt('a=typeof(x)', 'a = typeof(x)')
bt(
'var a = function() {\n' +
' return null;\n' +
' },\n' +
' b = false;')
bt(
'var a = function() {\n' +
' func1()\n' +
'}')
bt(
'var a = function() {\n' +
' func1()\n' +
'}\n' +
'var b = function() {\n' +
' func2()\n' +
'}')
# code with and without semicolons
bt(
'var whatever = require("whatever");\n' +
'function() {\n' +
' a = 6;\n' +
'}',
# -- output --
'var whatever = require("whatever");\n' +
'\n' +
'function() {\n' +
' a = 6;\n' +
'}')
bt(
'var whatever = require("whatever")\n' +
'function() {\n' +
' a = 6\n' +
'}',
# -- output --
'var whatever = require("whatever")\n' +
'\n' +
'function() {\n' +
' a = 6\n' +
'}')
bt(
'{"x":[{"a":1,"b":3},\n' +
'7,8,8,8,8,{"b":99},{"a":11}]}',
# -- output --
'{\n' +
' "x": [{\n' +
' "a": 1,\n' +
' "b": 3\n' +
' },\n' +
' 7, 8, 8, 8, 8, {\n' +
' "b": 99\n' +
' }, {\n' +
' "a": 11\n' +
' }\n' +
' ]\n' +
'}')
bt(
'{"x":[{"a":1,"b":3},7,8,8,8,8,{"b":99},{"a":11}]}',
# -- output --
'{\n' +
' "x": [{\n' +
' "a": 1,\n' +
' "b": 3\n' +
' }, 7, 8, 8, 8, 8, {\n' +
' "b": 99\n' +
' }, {\n' +
' "a": 11\n' +
' }]\n' +
'}')
bt(
'{"1":{"1a":"1b"},"2"}',
# -- output --
'{\n' +
' "1": {\n' +
' "1a": "1b"\n' +
' },\n' +
' "2"\n' +
'}')
bt(
'{a:{a:b},c}',
# -- output --
'{\n' +
' a: {\n' +
' a: b\n' +
' },\n' +
' c\n' +
'}')
bt(
'{[y[a]];keep_indent;}',
# -- output --
'{\n' +
' [y[a]];\n' +
' keep_indent;\n' +
'}')
bt(
'if (x) {y} else { if (x) {y}}',
# -- output --
'if (x) {\n' +
' y\n' +
'} else {\n' +
' if (x) {\n' +
' y\n' +
' }\n' +
'}')
bt(
'if (foo) one()\n' +
'two()\n' +
'three()')
bt(
'if (1 + foo() && bar(baz()) / 2) one()\n' +
'two()\n' +
'three()')
bt(
'if (1 + foo() && bar(baz()) / 2) one();\n' +
'two();\n' +
'three();')
bt(
'var a=1,b={bang:2},c=3;',
# -- output --
'var a = 1,\n' +
' b = {\n' +
' bang: 2\n' +
' },\n' +
' c = 3;')
bt(
'var a={bing:1},b=2,c=3;',
# -- output --
'var a = {\n' +
' bing: 1\n' +
' },\n' +
' b = 2,\n' +
' c = 3;')
def test_beautifier_unconverted(self):
test_fragment = self.decodesto
bt = self.bt
self.reset_options()
#============================================================
bt(None, "")
self.reset_options()
#============================================================
# Test user pebkac protection, converts dash names to underscored names
setattr(self.options, 'end-with-newline', True)
test_fragment(None, '\n')
self.reset_options()
#============================================================
# Test passing dictionary or tuple
self.options = {'end_with_newline': True, 'eol': '\r\n' }
test_fragment(None, '\r\n')
self.options = {'end-with-newline': True}
test_fragment(None, '\n')
self.options = {'end-with-newline': False}
test_fragment(None, '')
self.options = ( ('end-with-newline', True), ('eol', '\r') )
test_fragment(None, '\r')
self.reset_options()
#============================================================
self.options.indent_size = 1
self.options.indent_char = ' '
bt('{ one_char() }', "{\n one_char()\n}")
bt('var a,b=1,c=2', 'var a, b = 1,\n c = 2')
self.options.indent_size = 4
self.options.indent_char = ' '
bt('{ one_char() }', "{\n one_char()\n}")
self.options.indent_size = 1
self.options.indent_char = "\t"
bt('{ one_char() }', "{\n\tone_char()\n}")
bt('x = a ? b : c; x;', 'x = a ? b : c;\nx;')
#set to something else than it should change to, but with tabs on, should override
self.options.indent_size = 5
self.options.indent_char = ' '
self.options.indent_with_tabs = True
bt('{ one_char() }', "{\n\tone_char()\n}")
bt('x = a ? b : c; x;', 'x = a ? b : c;\nx;')
self.reset_options()
#============================================================
self.options.preserve_newlines = False
bt('var\na=dont_preserve_newlines;', 'var a = dont_preserve_newlines;')
# make sure the blank line between function definitions stays
# even when preserve_newlines = False
bt('function foo() {\n return 1;\n}\n\nfunction foo() {\n return 1;\n}')
bt('function foo() {\n return 1;\n}\nfunction foo() {\n return 1;\n}',
'function foo() {\n return 1;\n}\n\nfunction foo() {\n return 1;\n}'
)
bt('function foo() {\n return 1;\n}\n\n\nfunction foo() {\n return 1;\n}',
'function foo() {\n return 1;\n}\n\nfunction foo() {\n return 1;\n}'
)
self.options.preserve_newlines = True
bt('var\na=do_preserve_newlines;', 'var\n a = do_preserve_newlines;')
bt('if (foo) // comment\n{\n bar();\n}')
self.reset_options()
#============================================================
# START tests for brace positioning
# If this is ever supported, update tests for each brace style.
# test_fragment('return\n{', 'return\n{') # can't support this?, but that's an improbable and extreme case anyway.
self.options.brace_style = 'expand'
bt('//case 1\nif (a == 1)\n{}\n//case 2\nelse if (a == 2)\n{}')
bt('if(1){2}else{3}', "if (1)\n{\n 2\n}\nelse\n{\n 3\n}")
bt('try{a();}catch(b){c();}catch(d){}finally{e();}',
"try\n{\n a();\n}\ncatch (b)\n{\n c();\n}\ncatch (d)\n{}\nfinally\n{\n e();\n}")
bt('if(a){b();}else if(c) foo();',
"if (a)\n{\n b();\n}\nelse if (c) foo();")
bt("if (a) {\n// comment\n}else{\n// comment\n}",
"if (a)\n{\n // comment\n}\nelse\n{\n // comment\n}") # if/else statement with empty body
bt('if (x) {y} else { if (x) {y}}',
'if (x)\n{\n y\n}\nelse\n{\n if (x)\n {\n y\n }\n}')
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}',
'if (a)\n{\n b;\n}\nelse\n{\n c;\n}')
test_fragment(' /*\n* xx\n*/\n// xx\nif (foo) {\n bar();\n}',
' /*\n * xx\n */\n // xx\n if (foo)\n {\n bar();\n }')
bt('if (foo)\n{}\nelse /regex/.test();')
test_fragment('if (foo) {', 'if (foo)\n{')
test_fragment('foo {', 'foo\n{')
test_fragment('return {', 'return {') # return needs the brace.
test_fragment('return /* inline */ {', 'return /* inline */ {')
test_fragment('return;\n{', 'return;\n{')
bt("throw {}")
bt("throw {\n foo;\n}")
bt('var foo = {}')
bt('function x() {\n foo();\n}zzz', 'function x()\n{\n foo();\n}\nzzz')
test_fragment('a: do {} while (); xxx', 'a: do {} while ();\nxxx')
bt('{a: do {} while (); xxx}', '{\n a: do {} while ();xxx\n}')
bt('var a = new function() {};')
bt('var a = new function a() {};', 'var a = new function a()\n{};')
bt('var a = new function()\n{};', 'var a = new function() {};')
bt('var a = new function a()\n{};')
bt('var a = new function a()\n {},\n b = new function b()\n {};')
bt("foo({\n 'a': 1\n},\n10);",
"foo(\n {\n 'a': 1\n },\n 10);")
bt('(["foo","bar"]).each(function(i) {return i;});',
'(["foo", "bar"]).each(function(i)\n{\n return i;\n});')
bt('(function(i) {return i;})();',
'(function(i)\n{\n return i;\n})();')
bt( "test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });")
bt( "test(\n" +
"/*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"},\n" +
"/*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test(\n" +
" /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });")
bt( "test( /*Argument 1*/\n" +
"{\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */\n" +
"{\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });")
self.options.brace_style = 'collapse'
bt('//case 1\nif (a == 1) {}\n//case 2\nelse if (a == 2) {}')
bt('if(1){2}else{3}', "if (1) {\n 2\n} else {\n 3\n}")
bt('try{a();}catch(b){c();}catch(d){}finally{e();}',
"try {\n a();\n} catch (b) {\n c();\n} catch (d) {} finally {\n e();\n}")
bt('if(a){b();}else if(c) foo();',
"if (a) {\n b();\n} else if (c) foo();")
bt("if (a) {\n// comment\n}else{\n// comment\n}",
"if (a) {\n // comment\n} else {\n // comment\n}") # if/else statement with empty body
bt('if (x) {y} else { if (x) {y}}',
'if (x) {\n y\n} else {\n if (x) {\n y\n }\n}')
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}',
'if (a) {\n b;\n} else {\n c;\n}')
test_fragment(' /*\n* xx\n*/\n// xx\nif (foo) {\n bar();\n}',
' /*\n * xx\n */\n // xx\n if (foo) {\n bar();\n }')
bt('if (foo) {} else /regex/.test();')
test_fragment('if (foo) {', 'if (foo) {')
test_fragment('foo {', 'foo {')
test_fragment('return {', 'return {') # return needs the brace.
test_fragment('return /* inline */ {', 'return /* inline */ {')
test_fragment('return;\n{', 'return; {')
bt("throw {}")
bt("throw {\n foo;\n}")
bt('var foo = {}')
bt('function x() {\n foo();\n}zzz', 'function x() {\n foo();\n}\nzzz')
test_fragment('a: do {} while (); xxx', 'a: do {} while ();\nxxx')
bt('{a: do {} while (); xxx}', '{\n a: do {} while ();xxx\n}')
bt('var a = new function() {};')
bt('var a = new function a() {};')
bt('var a = new function()\n{};', 'var a = new function() {};')
bt('var a = new function a()\n{};', 'var a = new function a() {};')
bt('var a = new function a()\n {},\n b = new function b()\n {};', 'var a = new function a() {},\n b = new function b() {};')
bt("foo({\n 'a': 1\n},\n10);",
"foo({\n 'a': 1\n },\n 10);")
bt('(["foo","bar"]).each(function(i) {return i;});',
'(["foo", "bar"]).each(function(i) {\n return i;\n});')
bt('(function(i) {return i;})();',
'(function(i) {\n return i;\n})();')
bt( "test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });")
bt( "test(\n" +
"/*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"},\n" +
"/*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test(\n" +
" /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });")
bt( "test( /*Argument 1*/\n" +
"{\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */\n" +
"{\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });")
self.options.brace_style = "end-expand"
bt('//case 1\nif (a == 1) {}\n//case 2\nelse if (a == 2) {}')
bt('if(1){2}else{3}', "if (1) {\n 2\n}\nelse {\n 3\n}")
bt('try{a();}catch(b){c();}catch(d){}finally{e();}',
"try {\n a();\n}\ncatch (b) {\n c();\n}\ncatch (d) {}\nfinally {\n e();\n}")
bt('if(a){b();}else if(c) foo();',
"if (a) {\n b();\n}\nelse if (c) foo();")
bt("if (a) {\n// comment\n}else{\n// comment\n}",
"if (a) {\n // comment\n}\nelse {\n // comment\n}") # if/else statement with empty body
bt('if (x) {y} else { if (x) {y}}',
'if (x) {\n y\n}\nelse {\n if (x) {\n y\n }\n}')
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}',
'if (a) {\n b;\n}\nelse {\n c;\n}')
test_fragment(' /*\n* xx\n*/\n// xx\nif (foo) {\n bar();\n}',
' /*\n * xx\n */\n // xx\n if (foo) {\n bar();\n }')
bt('if (foo) {}\nelse /regex/.test();')
test_fragment('if (foo) {', 'if (foo) {')
test_fragment('foo {', 'foo {')
test_fragment('return {', 'return {') # return needs the brace.
test_fragment('return /* inline */ {', 'return /* inline */ {')
test_fragment('return;\n{', 'return; {')
bt("throw {}")
bt("throw {\n foo;\n}")
bt('var foo = {}')
bt('function x() {\n foo();\n}zzz', 'function x() {\n foo();\n}\nzzz')
test_fragment('a: do {} while (); xxx', 'a: do {} while ();\nxxx')
bt('{a: do {} while (); xxx}', '{\n a: do {} while ();xxx\n}')
bt('var a = new function() {};')
bt('var a = new function a() {};')
bt('var a = new function()\n{};', 'var a = new function() {};')
bt('var a = new function a()\n{};', 'var a = new function a() {};')
bt('var a = new function a()\n {},\n b = new function b()\n {};', 'var a = new function a() {},\n b = new function b() {};')
bt("foo({\n 'a': 1\n},\n10);",
"foo({\n 'a': 1\n },\n 10);")
bt('(["foo","bar"]).each(function(i) {return i;});',
'(["foo", "bar"]).each(function(i) {\n return i;\n});')
bt('(function(i) {return i;})();',
'(function(i) {\n return i;\n})();')
bt( "test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });")
bt( "test(\n" +
"/*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"},\n" +
"/*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test(\n" +
" /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });")
bt( "test( /*Argument 1*/\n" +
"{\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */\n" +
"{\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });")
self.options.brace_style = 'none'
bt('//case 1\nif (a == 1)\n{}\n//case 2\nelse if (a == 2)\n{}')
bt('if(1){2}else{3}', "if (1) {\n 2\n} else {\n 3\n}")
bt('try{a();}catch(b){c();}catch(d){}finally{e();}',
"try {\n a();\n} catch (b) {\n c();\n} catch (d) {} finally {\n e();\n}")
bt('if(a){b();}else if(c) foo();',
"if (a) {\n b();\n} else if (c) foo();")
bt("if (a) {\n// comment\n}else{\n// comment\n}",
"if (a) {\n // comment\n} else {\n // comment\n}") # if/else statement with empty body
bt('if (x) {y} else { if (x) {y}}',
'if (x) {\n y\n} else {\n if (x) {\n y\n }\n}')
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}',
'if (a)\n{\n b;\n}\nelse\n{\n c;\n}')
test_fragment(' /*\n* xx\n*/\n// xx\nif (foo) {\n bar();\n}',
' /*\n * xx\n */\n // xx\n if (foo) {\n bar();\n }')
bt('if (foo)\n{}\nelse /regex/.test();')
test_fragment('if (foo) {')
test_fragment('foo {')
test_fragment('return {') # return needs the brace.
test_fragment('return /* inline */ {')
test_fragment('return;\n{')
bt("throw {}")
bt("throw {\n foo;\n}")
bt('var foo = {}')
bt('function x() {\n foo();\n}zzz', 'function x() {\n foo();\n}\nzzz')
test_fragment('a: do {} while (); xxx', 'a: do {} while ();\nxxx')
bt('{a: do {} while (); xxx}', '{\n a: do {} while ();xxx\n}')
bt('var a = new function() {};')
bt('var a = new function a() {};')
bt('var a = new function()\n{};', 'var a = new function() {};')
bt('var a = new function a()\n{};')
bt('var a = new function a()\n {},\n b = new function b()\n {};')
bt("foo({\n 'a': 1\n},\n10);",
"foo({\n 'a': 1\n },\n 10);")
bt('(["foo","bar"]).each(function(i) {return i;});',
'(["foo", "bar"]).each(function(i) {\n return i;\n});')
bt('(function(i) {return i;})();',
'(function(i) {\n return i;\n})();')
bt( "test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });")
bt( "test(\n" +
"/*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"},\n" +
"/*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test(\n" +
" /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });")
bt( "test( /*Argument 1*/\n" +
"{\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */\n" +
"{\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });")
# END tests for brace position
self.reset_options()
#============================================================
self.options.preserve_newlines = True
bt("var a = 'foo' +\n 'bar';")
bt('"foo""bar""baz"', '"foo"\n"bar"\n"baz"')
bt("'foo''bar''baz'", "'foo'\n'bar'\n'baz'")
bt("{\n get foo() {}\n}")
bt("{\n var a = get\n foo();\n}")
bt("{\n set foo() {}\n}")
bt("{\n var a = set\n foo();\n}")
bt("var x = {\n get function()\n}")
bt("var x = {\n set function()\n}")
# According to my current research get/set have no special meaning outside of an object literal
bt("var x = set\n\na() {}", "var x = set\n\na() {}")
bt("var x = set\n\nfunction() {}", "var x = set\n\nfunction() {}")
bt('<!-- foo\nbar();\n-->')
bt('<!-- dont crash') # -->
bt('for () /abc/.test()')
bt('if (k) /aaa/m.test(v) && l();')
bt('switch (true) {\n case /swf/i.test(foo):\n bar();\n}')
bt('createdAt = {\n type: Date,\n default: Date.now\n}')
bt('switch (createdAt) {\n case a:\n Date,\n default:\n Date.now\n}')
self.reset_options()
#============================================================
def decodesto(self, input, expectation=None):
if expectation is None:
expectation = input
self.assertMultiLineEqual(
jsbeautifier.beautify(input, self.options), expectation)
# if the expected is different from input, run it again
# expected output should be unchanged when run twice.
if not expectation is None:
self.assertMultiLineEqual(
jsbeautifier.beautify(expectation, self.options), expectation)
if self.options is None or not isinstance(self.options, (dict, tuple)):
# Everywhere we do newlines, they should be replaced with opts.eol
self.options.eol = '\r\\n'
expectation = expectation.replace('\n', '\r\n')
self.options.disabled = True
self.assertMultiLineEqual(
jsbeautifier.beautify(input, self.options), input or '')
self.assertMultiLineEqual(
jsbeautifier.beautify('\n\n' + expectation, self.options), '\n\n' + expectation)
self.options.disabled = False;
self.assertMultiLineEqual(
jsbeautifier.beautify(input, self.options), expectation)
if input and input.find('\n') != -1:
input = input.replace('\n', '\r\n')
self.assertMultiLineEqual(
jsbeautifier.beautify(input, self.options), expectation)
# Ensure support for auto eol detection
self.options.eol = 'auto'
self.assertMultiLineEqual(
jsbeautifier.beautify(input, self.options), expectation)
self.options.eol = '\n'
def wrap(self, text):
return self.wrapregex.sub(' \\1', text)
def bt(self, input, expectation=None):
if expectation is None:
expectation = input
self.decodesto(input, expectation)
# If we set raw, input should be unchanged
self.options.test_output_raw = True
if self.options.end_with_newline:
self.decodesto(input, input)
self.options.test_output_raw = False
current_indent_size = None
if self.options.js and self.options.js['indent_size']:
current_indent_size = self.options.js['indent_size']
if not current_indent_size:
current_indent_size = self.options.indent_size
if current_indent_size == 4 and input:
wrapped_input = '{\n%s\n foo = bar;\n}' % self.wrap(input)
wrapped_expect = '{\n%s\n foo = bar;\n}' % self.wrap(expectation)
self.decodesto(wrapped_input, wrapped_expect)
# If we set raw, input should be unchanged
self.options.test_output_raw = True
if self.options.end_with_newline:
self.decodesto(wrapped_input, wrapped_input)
self.options.test_output_raw = False
if __name__ == '__main__':
unittest.main()
| 32.408487
| 276
| 0.296488
|
133dd0c034293665e363659fecf8f02a16af5ed7
| 2,506
|
py
|
Python
|
analysis/scripts/.ipynb_checkpoints/project_functions-checkpoint.py
|
data301-2020-winter1/course-project-solo_322
|
ce8e5b4e59c5ef14b3bcf479dbb30a2c19604472
|
[
"MIT"
] | null | null | null |
analysis/scripts/.ipynb_checkpoints/project_functions-checkpoint.py
|
data301-2020-winter1/course-project-solo_322
|
ce8e5b4e59c5ef14b3bcf479dbb30a2c19604472
|
[
"MIT"
] | 1
|
2020-12-06T10:14:15.000Z
|
2020-12-06T10:14:15.000Z
|
analysis/scripts/project_functions.py
|
data301-2020-winter1/course-project-solo_322
|
ce8e5b4e59c5ef14b3bcf479dbb30a2c19604472
|
[
"MIT"
] | null | null | null |
#script that return a section of the complete database based on the input integer
#filters data accordingly
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import pandas_profiling
def load_and_process(url_path_csv, i):
df = pd.read_csv(url_path_csv)
if i == 0:
return df
elif i == 1:
df1 = (
df.loc[:, ['RespId', 'ppage', 'gender', 'educ', 'race', 'voter_category', 'Q2_1']]
.rename(columns={'RespId': 'id', 'voter_category': 'cat', 'Q2_1': 'voting_imp'})
.dropna(subset = ['id', 'ppage', 'gender', 'educ', 'race', 'cat', 'voting_imp'])
.drop(df.loc[df['Q2_1'] < 1].index)
.sort_values("id")
.reset_index(drop=True)
)
return df1
elif i == 2:
df2 = (
df.loc[:, ['ppage', 'gender', 'Q29_1', 'Q29_2', 'Q29_3', 'Q29_4', 'Q29_5', 'Q29_6', 'Q29_7', 'Q29_8', 'Q29_9', 'voter_category']]
.rename(columns={'voter_category': 'cat'})
.sort_values("ppage")
.reset_index(drop=True)
)
return df2
elif i == 3:
df3 = (
df.loc[:, ['Q16', 'ppage', 'gender']]
.rename(columns={'Q16': 'difficulty'})
.dropna(subset = ['difficulty', 'ppage', 'gender'])
.drop(df.loc[df['Q16'] < 1].index)
.sort_values("ppage")
.reset_index(drop=True)
)
return df3
elif i == 4:
df4 = (
df.loc[:, ['RespId', 'Q7', 'ppage', 'gender', 'voter_category']]
.rename(columns={'RespId': 'id', 'voter_category': 'cat', 'Q7': 'need_change'})
.drop(df.loc[df['Q7'] < 1].index)
.dropna(subset = ['need_change', 'id', 'gender', 'cat', 'ppage'])
.sort_values("id")
.reset_index(drop=True)
)
return df4
else:
df5 = (
df.loc[:, ['RespId', 'ppage', 'gender', 'educ', 'race', 'voter_category', 'Q2_1', 'Q29_1', 'Q29_2', 'Q29_3', 'Q29_4', 'Q29_5', 'Q29_6', 'Q29_7', 'Q29_8', 'Q29_9', 'Q16', 'Q7']]
.rename(columns={'RespId': 'id', 'voter_category': 'cat', 'Q2_1': 'voting_imp', 'Q16': 'difficulty', 'Q7': 'need_change'})
.dropna(subset = ['id', 'ppage', 'gender', 'educ', 'race', 'cat', 'voting_imp', 'difficulty', 'need_change'])
.sort_values("id")
.reset_index(drop=True)
)
return df5
#ignore these statements
#.drop(df.loc[df['Q2_1'] < 1].index)
#.drop(df.loc[df['Q16'] < 1].index)
#.drop(df.loc[df['Q7'] < 1].index)
| 35.295775
| 184
| 0.538707
|
cc53eb0fe1b9de93345a76024f8f3961dfcc3f89
| 3,454
|
py
|
Python
|
twitter/updater.py
|
jammie080/Twitter-Bot
|
0c53bd667db92991ce1f0d0a14985265d505078f
|
[
"BSD-2-Clause"
] | null | null | null |
twitter/updater.py
|
jammie080/Twitter-Bot
|
0c53bd667db92991ce1f0d0a14985265d505078f
|
[
"BSD-2-Clause"
] | null | null | null |
twitter/updater.py
|
jammie080/Twitter-Bot
|
0c53bd667db92991ce1f0d0a14985265d505078f
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from settings import config
import version
import time,os,requests
from os import rmdir, listdir
from os.path import join
from bs4 import BeautifulSoup
import re
import zipfile
from shutil import move
import shutil
class updater:
def __init__(self):
self.version = self.client_version()
self.serverVersion = self.server_version()
def check(self):
if self.version != self.serverVersion:
if self.version < self.serverVersion:
print "Update available"
print "Latest version {}".format(self.serverVersion)
print "Downloading update"
self.download('https://github.com/jammie080/Twitter-Bot/archive/master.zip')
src_dir = os.path.abspath(os.curdir)
if os.path.isfile(src_dir + '\Twitter-Bot.zip'):
os.remove('Twitter-Bot.zip')
os.rename('master.zip','Twitter-Bot.zip')
self.setup('Twitter-Bot.zip')
os.remove('Twitter-Bot.zip')
else:
print "Latest Update"
print "Current version {}".format(self.version)
def download(self,url):
local_filename = url.split('/')[-1]
# NOTE the stream=True parameter
browser = requests.get(url, stream=True)
with open(local_filename, 'wb') as f:
for chunk in browser.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
return local_filename
def backup(self):
pass
def setup(self,filename):
filelist = []
fh = open(filename, 'rb')
z = zipfile.ZipFile(fh)
for name in z.namelist():
src_dir = os.path.abspath(os.curdir)
z.extract(name, src_dir)
fh.close()
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
os.chdir(os.path.dirname(dname))
root_src_dir = src_dir + '\Twitter-Bot-Master'
root_dst_dir = os.path.dirname(dname)
if os.path.exists(src_dir + '\Twitter-Bot-Master'):
files = os.listdir(src_dir + '\Twitter-Bot-Master')
for src_dir, dirs, files in os.walk(root_src_dir):
dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1)
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
for file_ in files:
src_file = os.path.join(src_dir, file_)
dst_file = os.path.join(dst_dir, file_)
if os.path.exists(dst_file):
os.remove(dst_file)
shutil.move(src_file, dst_dir)
else:
shutil.rmtree(root_src_dir)
try:
shutil.rmtree(root_src_dir)
except:
rmdir(root_src_dir)
def client_version(self):
current_version = version.__version__
return current_version
def server_version(self):
browser = requests.get('https://github.com/jammie080/Twitter-Bot/releases/latest')
soup = BeautifulSoup(browser.content,"html5lib")
m = re.search(r"(v\d.\d.\d.\d|v\d.\d.\d)",soup.title.encode('utf-8'))
serverVersion = m.group()
return serverVersion
| 34.888889
| 92
| 0.564273
|
6bc68ff52929721adbb7ee0f77f293700165604e
| 400
|
py
|
Python
|
oceanpy/forces/coriolis.py
|
profxj/oceanpy
|
fb6324adf783cee40b47af6894465df00a6f6d09
|
[
"BSD-3-Clause"
] | 1
|
2020-07-01T14:53:45.000Z
|
2020-07-01T14:53:45.000Z
|
oceanpy/forces/coriolis.py
|
profxj/oceanpy
|
fb6324adf783cee40b47af6894465df00a6f6d09
|
[
"BSD-3-Clause"
] | 2
|
2020-04-04T18:44:08.000Z
|
2020-07-11T13:29:00.000Z
|
oceanpy/forces/coriolis.py
|
profxj/oceanpy
|
fb6324adf783cee40b47af6894465df00a6f6d09
|
[
"BSD-3-Clause"
] | 2
|
2020-07-01T14:53:59.000Z
|
2020-07-07T21:11:28.000Z
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Coriolis forces
"""
import numpy as np
import unyt
def coriolis_parameter(latitude):
f = 2 * unyt.Omega_earth * np.sin(latitude)
# Return
return f
def meridional_derivative(latitude):
beta = (2*unyt.Omega_earth/unyt.R_Earth) * np.cos(latitude)
# Return
return beta.to('1/km/s')
| 18.181818
| 63
| 0.67
|
ea30c069ed4c10d6f852bbe81e935bae1db01564
| 90
|
py
|
Python
|
server/apps.py
|
nathandarnell/sal
|
464414a2666e39bdf5b4b0033a84d5129c93c053
|
[
"Apache-2.0"
] | 215
|
2015-05-04T16:57:56.000Z
|
2022-03-07T12:58:12.000Z
|
server/apps.py
|
nathandarnell/sal
|
464414a2666e39bdf5b4b0033a84d5129c93c053
|
[
"Apache-2.0"
] | 243
|
2015-07-04T18:10:56.000Z
|
2022-02-27T18:52:40.000Z
|
server/apps.py
|
nathandarnell/sal
|
464414a2666e39bdf5b4b0033a84d5129c93c053
|
[
"Apache-2.0"
] | 90
|
2015-06-29T19:26:58.000Z
|
2022-02-17T19:03:00.000Z
|
from django.apps import AppConfig
class ServerAppConfig(AppConfig):
name = "server"
| 15
| 33
| 0.755556
|
84d8775b5e77c58cdab44fd7c101178ed2205754
| 1,581
|
py
|
Python
|
Sort/14_ten_replaced_numbers_in_array.py
|
Szymon-Budziak/ASD_exercises_solutions
|
36ccbdae03a6c7e4ad141a2b7b01bef9353574ee
|
[
"MIT"
] | 7
|
2021-12-28T23:38:42.000Z
|
2022-03-29T16:36:16.000Z
|
Sort/14_ten_replaced_numbers_in_array.py
|
Szymon-Budziak/ASD_exercises_solutions
|
36ccbdae03a6c7e4ad141a2b7b01bef9353574ee
|
[
"MIT"
] | null | null | null |
Sort/14_ten_replaced_numbers_in_array.py
|
Szymon-Budziak/ASD_exercises_solutions
|
36ccbdae03a6c7e4ad141a2b7b01bef9353574ee
|
[
"MIT"
] | 4
|
2021-06-29T20:21:52.000Z
|
2022-03-12T10:04:17.000Z
|
# We are given an array with n (n >= 11) natural numbers in the range [0, k]. 10 numbers from this array
# were replaced with random numbers outside this range (e.g. much greater or negative numbers). Find
# algorithm that sorts the array in the O(n) time.
from math import inf
def insertion_sort(T):
for i in range(1, len(T)):
key = T[i]
j = i - 1
while j >= 0 and T[j] > key:
T[j + 1] = T[j]
j -= 1
T[j + 1] = key
return T
def replaced_numbers(T, k):
normal_numbers = []
out_of_range_numbers = []
for i in range(len(T)):
if T[i] >= 0 and T[i] <= k:
normal_numbers.append(T[i])
else:
out_of_range_numbers.append(T[i])
insertion_sort(out_of_range_numbers)
result = merge(normal_numbers, out_of_range_numbers)
return result
def merge(array1, array2):
result = [0] * (len(array1) + len(array2))
array1.append(inf)
array2.append(inf)
i = j = k = 0
while array1[i] != inf and array2[j] != inf:
if array1[i] <= array2[j]:
result[k] = array1[i]
i += 1
k += 1
else:
result[k] = array2[j]
j += 1
k += 1
while array1[i] != inf:
result[k] = array1[i]
i += 1
k += 1
while array2[j] != inf:
result[k] = array2[j]
j += 1
k += 1
return result
T = [1, -100, 2, 3, 32, 6, 7, 7, -203, 8, 9, -42, 14, 15, 57, 16, 67, 18, 46, 19, 65, 19, 20, 91, 134, 21, 25]
k = 25
print(replaced_numbers(T, k))
| 26.79661
| 110
| 0.522454
|
c723ff7de45b63d5b8fce6cd150cbc308804788c
| 12,631
|
py
|
Python
|
CV_computer_vision_files/lambda_function.py
|
lineality/tensorflow_lite_in_aws_lambda_function
|
004d63a6e307c1bb6070b33bcf5472713febf7cc
|
[
"MIT"
] | null | null | null |
CV_computer_vision_files/lambda_function.py
|
lineality/tensorflow_lite_in_aws_lambda_function
|
004d63a6e307c1bb6070b33bcf5472713febf7cc
|
[
"MIT"
] | null | null | null |
CV_computer_vision_files/lambda_function.py
|
lineality/tensorflow_lite_in_aws_lambda_function
|
004d63a6e307c1bb6070b33bcf5472713febf7cc
|
[
"MIT"
] | null | null | null |
# code for AWS-Lambda-function to deploy model.tflite in AWS
# for CV computer vision, using PIL/Pillow for preprocessing
"""
# See:
https://github.com/lineality/deploy_TFlite_tensorflow_lite_in_aws_lambda_function
# To install the tensorflow lite runtime package follow official tensorfow docs:
https://www.tensorflow.org/lite/guide/python
# instruction code to create python env (for uploading to AWS):
# only tflite_runtime is needed, numpy is included with tflite
$ python3 -m venv env; source env/bin/activate
$ pip3 install --upgrade pip
# version 2.8 may cause AWS errors, try 2.7
# use: pip install tflite-runtime==2.7.0
$ pip3 install pillow
$ pip3 freeze > requirements.txt
# drill down to -> env/lib/python3.8/sitepackages
# this makes the main zip file
$ zip -r9 ../../../../function.zip .
# make a lambda_function.py file
# later update this with real code
$ touch lambda_function.py
# In project-root folder:
# add .py file to your zip file
$ zip -g ./function.zip -r lambda_function.py
# to update the .py file (depending on OS)
# edit file in the zipped archive
# or re-add a new .py to replace the old by repeating the same step from above
$ zip -g ./function.zip -r lambda_function.py
"""
"""
Workflow:
1. get user_input
2. S3: Connect to S3 (Make resource and client)
3. download model.tflite file from S3
4. download image file
5. PIL/Pillow: open image file (w/ Python Image Library) to specs
6. load model
7. make prediction
8. clear /tmp/
9. export result
"""
"""
Sample Ouput:
{
"statusCode": 200,
"about": "output_of_Tensorflow_ML_model",
"body": ?
}
"""
"""
Sample input:
{
"s3_file_path_AI_model": "FOLDER_NAME/FOLDER_NAME/model.tflite",
"s3_file_path_picture_file": "YOUR_FOLDER_NAME/PIC_NAME.jpeg",
"s3_bucket_name": "YOUR_AWS_S3_BUCKET_NAME"
}
"""
# import librarires
import boto3 # for AWS
import glob # for directory file search
import json
import numpy as np # for input and output processing
from PIL import Image # for image processing
import tflite_runtime.interpreter as tflite
from tflite_runtime.interpreter import Interpreter
###################
# Helper Functions
###################
# Helper Function
def get_file_from_S3_to_lambda_tmp(s3_resource, s3_bucket_name, s3_file_path, lambda_tmp_file_name):
# s3_resource.meta.client.download_file('YOUR_BUCKET_NAME', 'FILE_NAME.txt', '/tmp/FILE_NAME.txt')
s3_resource.meta.client.download_file( s3_bucket_name, s3_file_path, lambda_tmp_file_name )
return print("Model saved.")
# helper function to clear remaining .csv files from /tmp/ directory
def clear_tmp_directory():
"""
requires:
import os (to remove file)
import glob (to get file list)
"""
# use glob to get a list of remaining .csv files
remaining_files_list = glob.glob("/tmp/*.csv")
# File location
location = "/tmp/"
# iterate through list of remaining .csv files
for this_file in remaining_files_list:
# Remove this_file
os.remove(this_file)
# AGAIN use glob to get a list of remaining .csv files
remaining_files_list = glob.glob("/tmp/*.csv")
return print("""/tmp/ cleared. Check that directory is empty. remaining_files_list = """, remaining_files_list )
# helper function
def print_aws_tmp_files():
"""
requires:
import os (to remove file)
import glob (to get file list)
"""
# use glob to get a list of remaining .csv files
aws_tmp_files_list = glob.glob("/tmp/*")
return print( "/tmp/ files_list = ", aws_tmp_files_list )
################
# Main Function
################
def lambda_handler(event, context):
#################
# Get User Input
#################
# get s3_file_path_AI_model and path in s3
# Test if input exists and can be processed
try:
s3_file_path_AI_model = event["s3_file_path_AI_model"]
# terminal
print( s3_file_path_AI_model )
# slice out just the name of the model from the whole path
S3_file_name_AI_model = s3_file_path_AI_model.split('/')[-1]
except Exception as e:
output = f"""Error: No input for s3_file_path_AI_model
Error Message = '{str(e)}
"""
# print for terminal
print(output)
statusCode = 403
# End the lambda function
return {
'statusCode': statusCode,
'body': output
}
# get s3_bucket_name in s3
# Test if input exists and can be processed
try:
s3_bucket_name = event["s3_bucket_name"]
except Exception as e:
output = f"""Error: No input for s3_bucket_name
Error Message = '{str(e)}
"""
# print for terminal
print(output)
statusCode = 403
# End the lambda function
return {
'statusCode': statusCode,
'body': output
}
# get s3_file_path_picture_file in s3
# Test if input exists and can be processed
try:
s3_file_path_picture_file = event["s3_file_path_picture_file"]
# slice out just the name of the model from the whole path
S3_file_name_picture_file = s3_file_path_picture_file.split('/')[-1]
except Exception as e:
output = f"""Error: No input for s3_file_path_picture_file
Error Message = '{str(e)}
"""
# print for terminal
print(output)
statusCode = 403
# End the lambda function
return {
'statusCode': statusCode,
'body': output
}
####################################
# S3: Connect to S3 (Make resource)
####################################
try:
# make s3_resource
s3_resource = boto3.resource("s3")
# make S3 bucket-resource
s3_bucket = s3_resource.Bucket(s3_bucket_name)
except Exception as e:
output = f"""Error: Could not connect to AWS S3.
Error Message = '{str(e)}
"""
# print for terminal
print(output)
statusCode = 403
# End the lambda function
return {
'statusCode': statusCode,
'body': output
}
##########################################
# load files from S3 int AWS-python-/tmp/
##########################################
# AWS Files Name
model_aws_tmp_file_name = "/tmp/" + S3_file_name_AI_model
picture_file_aws_tmp_file_name = "/tmp/" + S3_file_name_picture_file
try:
############################
# Get AI Model file from S3
############################
"""
(Docs)
get_file_from_S3_to_lambda_tmp(s3_resource,
s3_bucket_name,
s3_file_path,
lambda_tmp_file_name
)
"""
# Get AI Model
get_file_from_S3_to_lambda_tmp(s3_resource,
s3_bucket_name,
s3_file_path_AI_model,
model_aws_tmp_file_name)
except Exception as e:
output = f"""Error: Could not get AI Model file from S3
Error Message = {str(e)}
"""
# print for terminal
print(output)
statusCode = 403
# End the lambda function
return {
'statusCode': statusCode,
'body': output
}
try:
###########################
# Get Picture file from S3
###########################
# Get Picture File
get_file_from_S3_to_lambda_tmp(s3_resource,
s3_bucket_name,
s3_file_path_picture_file,
picture_file_aws_tmp_file_name)
except Exception as e:
output = f"""Error: Could not get Picture file from S3
Error Message = {str(e)}
"""
# print for terminal
print(output)
statusCode = 403
# End the lambda function
return {
'statusCode': statusCode,
'body': output
}
# for terminal: see what files exist in aws /tmp/
print_aws_tmp_files()
##############
# Load Model
##############
try:
# Set model path (including directory)
model_path = "/tmp/" + S3_file_name_AI_model
# set up TF interpreter (point at .tflite model)
interpreter = Interpreter(model_path)
# for terminal
print("Model Loaded Successfully.")
except Exception as e:
output = f"""Error: Could not load model. Path = {model_path}
Error Message = {str(e)}
"""
# print for terminal
print(output)
statusCode = 403
# End the lambda function
return {
'statusCode': statusCode,
'body': output
}
###############
# Set up Model
###############
# set up interpreter
interpreter.allocate_tensors()
# Get input and output tensors.
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
# select test image
# img_path = 'PIC_NAME_HERE.1.jpg'
img_path = picture_file_aws_tmp_file_name
#################################
# PIL/Pillow image preprocessing
#################################
# load and resize image file
"""
equivilent of this from Keras:
img = image.load_img(img_path, target_size=(224, 224))
"""
img = Image.open(img_path)
img = img.resize((224, 224))
# image -> array
"""
equivilent of this from Keras:
img_array = image.img_to_array(img)
"""
img_array = np.asarray(img)
# already numpy
expanded_img_array = np.expand_dims(img_array, axis=0)
# already numpy
preprocessed_img = expanded_img_array / 255.
# set: input_data = preprocessed image
input_data = preprocessed_img
# type cast to float32
input_data = input_data.astype('float32')
#######################
# End of preprocessing
#######################
# y: using model, produce predicted y from X input
interpreter.set_tensor(input_details[0]['index'], input_data)
# Start interpreter
interpreter.invoke()
##################
# Make Prediction
##################
# Make Prediction
tflite_prediction_results = interpreter.get_tensor(output_details[0]['index'])
##############################
# Final Clean Up Lambda /tmp/
##############################
# Clear AWS Lambda Function /tmp/ directory
clear_tmp_directory()
############################
# process and format output
############################
"""
- remove brackets (remove from matrix/array), isolate just the number
- make type -> float
"""
# for terminal
print("1 Prediction: y =", tflite_prediction_results)
tflite_prediction_results = tflite_prediction_results[0]
# for terminal
print("2 Prediction: y =", tflite_prediction_results)
# reformat results: turn into string form of just two numbers
tflite_prediction_results = str(tflite_prediction_results)
tflite_prediction_results = tflite_prediction_results.replace("[", "")
tflite_prediction_results = tflite_prediction_results.replace("]", "")
tflite_prediction_results = tflite_prediction_results.split(" ")
# for terminal
print("3 Prediction: y =", tflite_prediction_results)
# get second probability: probability of damage
tflite_prediction_results = tflite_prediction_results[1]
# for terminal
print("4 Prediction: y =", type(tflite_prediction_results), tflite_prediction_results )
#tflite_prediction_results = float(tflite_prediction_results)
###############
# Final Output
###############
status_code = 200
output = tflite_prediction_results
return {
'statusCode': status_code,
'about': """Probability of Damage
Output of Tensor Flow Keras Transfer Learning
Computer Vision Neural Network Deep Learning Model
run on TFlite in a compact python 3.8 venv
with Python Image Libarary input image preprocessing""",
'body': output
}
| 25.312625
| 116
| 0.578339
|
eabde14f14acdeddea982d4718e13c3db8acd2a6
| 16,630
|
py
|
Python
|
huaweicloud-sdk-ims/huaweicloudsdkims/v2/model/create_image_request_body.py
|
wuchen-huawei/huaweicloud-sdk-python-v3
|
3683d703f4320edb2b8516f36f16d485cff08fc2
|
[
"Apache-2.0"
] | 1
|
2021-04-16T07:59:28.000Z
|
2021-04-16T07:59:28.000Z
|
huaweicloud-sdk-ims/huaweicloudsdkims/v2/model/create_image_request_body.py
|
wuchen-huawei/huaweicloud-sdk-python-v3
|
3683d703f4320edb2b8516f36f16d485cff08fc2
|
[
"Apache-2.0"
] | null | null | null |
huaweicloud-sdk-ims/huaweicloudsdkims/v2/model/create_image_request_body.py
|
wuchen-huawei/huaweicloud-sdk-python-v3
|
3683d703f4320edb2b8516f36f16d485cff08fc2
|
[
"Apache-2.0"
] | 1
|
2022-01-17T02:24:18.000Z
|
2022-01-17T02:24:18.000Z
|
# coding: utf-8
import pprint
import re
import six
class CreateImageRequestBody:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'data_images': 'list[CreateDataImage]',
'description': 'str',
'enterprise_project_id': 'str',
'image_tags': 'list[TagKeyValue]',
'instance_id': 'str',
'name': 'str',
'tags': 'list[str]',
'max_ram': 'int',
'min_ram': 'int',
'os_version': 'str',
'image_url': 'str',
'min_disk': 'int',
'is_config': 'bool',
'cmk_id': 'str',
'type': 'str',
'is_quick_import': 'bool',
'architecture': 'str',
'volume_id': 'str'
}
attribute_map = {
'data_images': 'data_images',
'description': 'description',
'enterprise_project_id': 'enterprise_project_id',
'image_tags': 'image_tags',
'instance_id': 'instance_id',
'name': 'name',
'tags': 'tags',
'max_ram': 'max_ram',
'min_ram': 'min_ram',
'os_version': 'os_version',
'image_url': 'image_url',
'min_disk': 'min_disk',
'is_config': 'is_config',
'cmk_id': 'cmk_id',
'type': 'type',
'is_quick_import': 'is_quick_import',
'architecture': 'architecture',
'volume_id': 'volume_id'
}
def __init__(self, data_images=None, description=None, enterprise_project_id=None, image_tags=None, instance_id=None, name=None, tags=None, max_ram=None, min_ram=None, os_version=None, image_url=None, min_disk=None, is_config=None, cmk_id=None, type=None, is_quick_import=None, architecture=None, volume_id=None):
"""CreateImageRequestBody - a model defined in huaweicloud sdk"""
self._data_images = None
self._description = None
self._enterprise_project_id = None
self._image_tags = None
self._instance_id = None
self._name = None
self._tags = None
self._max_ram = None
self._min_ram = None
self._os_version = None
self._image_url = None
self._min_disk = None
self._is_config = None
self._cmk_id = None
self._type = None
self._is_quick_import = None
self._architecture = None
self._volume_id = None
self.discriminator = None
if data_images is not None:
self.data_images = data_images
if description is not None:
self.description = description
if enterprise_project_id is not None:
self.enterprise_project_id = enterprise_project_id
if image_tags is not None:
self.image_tags = image_tags
if instance_id is not None:
self.instance_id = instance_id
self.name = name
if tags is not None:
self.tags = tags
if max_ram is not None:
self.max_ram = max_ram
if min_ram is not None:
self.min_ram = min_ram
if os_version is not None:
self.os_version = os_version
if image_url is not None:
self.image_url = image_url
if min_disk is not None:
self.min_disk = min_disk
if is_config is not None:
self.is_config = is_config
if cmk_id is not None:
self.cmk_id = cmk_id
if type is not None:
self.type = type
if is_quick_import is not None:
self.is_quick_import = is_quick_import
if architecture is not None:
self.architecture = architecture
if volume_id is not None:
self.volume_id = volume_id
@property
def data_images(self):
"""Gets the data_images of this CreateImageRequestBody.
需要转换的数据盘信息,其中,当使用云服务器上的数据盘进行私有数据盘镜像创建时,该字段必选。 如果不是用于制作数据盘镜像,该字段默认为空。
:return: The data_images of this CreateImageRequestBody.
:rtype: list[CreateDataImage]
"""
return self._data_images
@data_images.setter
def data_images(self, data_images):
"""Sets the data_images of this CreateImageRequestBody.
需要转换的数据盘信息,其中,当使用云服务器上的数据盘进行私有数据盘镜像创建时,该字段必选。 如果不是用于制作数据盘镜像,该字段默认为空。
:param data_images: The data_images of this CreateImageRequestBody.
:type: list[CreateDataImage]
"""
self._data_images = data_images
@property
def description(self):
"""Gets the description of this CreateImageRequestBody.
镜像描述信息。支持字母、数字、中文等,不支持回车、<、 >,长度不能超过1024个字符。默认为空。
:return: The description of this CreateImageRequestBody.
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this CreateImageRequestBody.
镜像描述信息。支持字母、数字、中文等,不支持回车、<、 >,长度不能超过1024个字符。默认为空。
:param description: The description of this CreateImageRequestBody.
:type: str
"""
self._description = description
@property
def enterprise_project_id(self):
"""Gets the enterprise_project_id of this CreateImageRequestBody.
表示当前镜像所属的企业项目。取值为0或无该值,表示属于default企业项目。取值为UUID,表示属于该UUID对应的企业项目。
:return: The enterprise_project_id of this CreateImageRequestBody.
:rtype: str
"""
return self._enterprise_project_id
@enterprise_project_id.setter
def enterprise_project_id(self, enterprise_project_id):
"""Sets the enterprise_project_id of this CreateImageRequestBody.
表示当前镜像所属的企业项目。取值为0或无该值,表示属于default企业项目。取值为UUID,表示属于该UUID对应的企业项目。
:param enterprise_project_id: The enterprise_project_id of this CreateImageRequestBody.
:type: str
"""
self._enterprise_project_id = enterprise_project_id
@property
def image_tags(self):
"""Gets the image_tags of this CreateImageRequestBody.
新规范的镜像标签列表。默认为空。tags和image_tags只能使用一个。
:return: The image_tags of this CreateImageRequestBody.
:rtype: list[TagKeyValue]
"""
return self._image_tags
@image_tags.setter
def image_tags(self, image_tags):
"""Sets the image_tags of this CreateImageRequestBody.
新规范的镜像标签列表。默认为空。tags和image_tags只能使用一个。
:param image_tags: The image_tags of this CreateImageRequestBody.
:type: list[TagKeyValue]
"""
self._image_tags = image_tags
@property
def instance_id(self):
"""Gets the instance_id of this CreateImageRequestBody.
需要转换的云服务器ID。使用instance_id字段,从云服务器制作私有镜像时,该字段填写云服务器ID。
:return: The instance_id of this CreateImageRequestBody.
:rtype: str
"""
return self._instance_id
@instance_id.setter
def instance_id(self, instance_id):
"""Sets the instance_id of this CreateImageRequestBody.
需要转换的云服务器ID。使用instance_id字段,从云服务器制作私有镜像时,该字段填写云服务器ID。
:param instance_id: The instance_id of this CreateImageRequestBody.
:type: str
"""
self._instance_id = instance_id
@property
def name(self):
"""Gets the name of this CreateImageRequestBody.
镜像名称
:return: The name of this CreateImageRequestBody.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this CreateImageRequestBody.
镜像名称
:param name: The name of this CreateImageRequestBody.
:type: str
"""
self._name = name
@property
def tags(self):
"""Gets the tags of this CreateImageRequestBody.
镜像标签列表。默认为空。tags和image_tags只能使用一个。
:return: The tags of this CreateImageRequestBody.
:rtype: list[str]
"""
return self._tags
@tags.setter
def tags(self, tags):
"""Sets the tags of this CreateImageRequestBody.
镜像标签列表。默认为空。tags和image_tags只能使用一个。
:param tags: The tags of this CreateImageRequestBody.
:type: list[str]
"""
self._tags = tags
@property
def max_ram(self):
"""Gets the max_ram of this CreateImageRequestBody.
表示镜像支持的最大内存,单位为MB。
:return: The max_ram of this CreateImageRequestBody.
:rtype: int
"""
return self._max_ram
@max_ram.setter
def max_ram(self, max_ram):
"""Sets the max_ram of this CreateImageRequestBody.
表示镜像支持的最大内存,单位为MB。
:param max_ram: The max_ram of this CreateImageRequestBody.
:type: int
"""
self._max_ram = max_ram
@property
def min_ram(self):
"""Gets the min_ram of this CreateImageRequestBody.
表示镜像支持的最小内存,单位为MB,默认为0,表示不受限制。
:return: The min_ram of this CreateImageRequestBody.
:rtype: int
"""
return self._min_ram
@min_ram.setter
def min_ram(self, min_ram):
"""Sets the min_ram of this CreateImageRequestBody.
表示镜像支持的最小内存,单位为MB,默认为0,表示不受限制。
:param min_ram: The min_ram of this CreateImageRequestBody.
:type: int
"""
self._min_ram = min_ram
@property
def os_version(self):
"""Gets the os_version of this CreateImageRequestBody.
操作系统版本。 使用上传至OBS桶中的外部镜像文件制作镜像时生效。 当“is_quick_import”的值为“true”时,即使用镜像文件快速导入方式导入系统盘镜像,则该参数为必填参数。
:return: The os_version of this CreateImageRequestBody.
:rtype: str
"""
return self._os_version
@os_version.setter
def os_version(self, os_version):
"""Sets the os_version of this CreateImageRequestBody.
操作系统版本。 使用上传至OBS桶中的外部镜像文件制作镜像时生效。 当“is_quick_import”的值为“true”时,即使用镜像文件快速导入方式导入系统盘镜像,则该参数为必填参数。
:param os_version: The os_version of this CreateImageRequestBody.
:type: str
"""
self._os_version = os_version
@property
def image_url(self):
"""Gets the image_url of this CreateImageRequestBody.
OBS桶中外部镜像文件地址。 在使用OBS桶的外部镜像文件制作镜像时生效且为必选字段。格式为<OBS桶名>:<OBS镜像文件名称>。
:return: The image_url of this CreateImageRequestBody.
:rtype: str
"""
return self._image_url
@image_url.setter
def image_url(self, image_url):
"""Sets the image_url of this CreateImageRequestBody.
OBS桶中外部镜像文件地址。 在使用OBS桶的外部镜像文件制作镜像时生效且为必选字段。格式为<OBS桶名>:<OBS镜像文件名称>。
:param image_url: The image_url of this CreateImageRequestBody.
:type: str
"""
self._image_url = image_url
@property
def min_disk(self):
"""Gets the min_disk of this CreateImageRequestBody.
最小系统盘大小。 在使用OBS桶的外部镜像文件制作镜像时生效且为必选字段。取值为40~1024GB。
:return: The min_disk of this CreateImageRequestBody.
:rtype: int
"""
return self._min_disk
@min_disk.setter
def min_disk(self, min_disk):
"""Sets the min_disk of this CreateImageRequestBody.
最小系统盘大小。 在使用OBS桶的外部镜像文件制作镜像时生效且为必选字段。取值为40~1024GB。
:param min_disk: The min_disk of this CreateImageRequestBody.
:type: int
"""
self._min_disk = min_disk
@property
def is_config(self):
"""Gets the is_config of this CreateImageRequestBody.
是否自动配置。 取值为true或false。 如果需要后台自动配置,取值为true,否则为false。默认取值为false。
:return: The is_config of this CreateImageRequestBody.
:rtype: bool
"""
return self._is_config
@is_config.setter
def is_config(self, is_config):
"""Sets the is_config of this CreateImageRequestBody.
是否自动配置。 取值为true或false。 如果需要后台自动配置,取值为true,否则为false。默认取值为false。
:param is_config: The is_config of this CreateImageRequestBody.
:type: bool
"""
self._is_config = is_config
@property
def cmk_id(self):
"""Gets the cmk_id of this CreateImageRequestBody.
创建加密镜像的用户主密钥,具体取值请参考《密钥管理服务用户指南》获取。
:return: The cmk_id of this CreateImageRequestBody.
:rtype: str
"""
return self._cmk_id
@cmk_id.setter
def cmk_id(self, cmk_id):
"""Sets the cmk_id of this CreateImageRequestBody.
创建加密镜像的用户主密钥,具体取值请参考《密钥管理服务用户指南》获取。
:param cmk_id: The cmk_id of this CreateImageRequestBody.
:type: str
"""
self._cmk_id = cmk_id
@property
def type(self):
"""Gets the type of this CreateImageRequestBody.
镜像的类型。 取值为ECS、BMS、FusionCompute、Ironic。默认使用“ECS”。 ECS/FusionCompute:表示是ECS服务器的镜像。 BMS/Ironic:表示是BMS服务器的镜像。
:return: The type of this CreateImageRequestBody.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this CreateImageRequestBody.
镜像的类型。 取值为ECS、BMS、FusionCompute、Ironic。默认使用“ECS”。 ECS/FusionCompute:表示是ECS服务器的镜像。 BMS/Ironic:表示是BMS服务器的镜像。
:param type: The type of this CreateImageRequestBody.
:type: str
"""
self._type = type
@property
def is_quick_import(self):
"""Gets the is_quick_import of this CreateImageRequestBody.
是否使用镜像文件快速导入方式,导入系统盘镜像。 是,配置为true。 否,配置为false。 关于镜像文件快速导入的约束与限制请参见镜像文件快速导入。
:return: The is_quick_import of this CreateImageRequestBody.
:rtype: bool
"""
return self._is_quick_import
@is_quick_import.setter
def is_quick_import(self, is_quick_import):
"""Sets the is_quick_import of this CreateImageRequestBody.
是否使用镜像文件快速导入方式,导入系统盘镜像。 是,配置为true。 否,配置为false。 关于镜像文件快速导入的约束与限制请参见镜像文件快速导入。
:param is_quick_import: The is_quick_import of this CreateImageRequestBody.
:type: bool
"""
self._is_quick_import = is_quick_import
@property
def architecture(self):
"""Gets the architecture of this CreateImageRequestBody.
镜像的架构类型。取值包括: x86 arm 默认使用“x86”。 当架构类型为arm时,镜像引导方式将自动转为UEFI的引导方式。
:return: The architecture of this CreateImageRequestBody.
:rtype: str
"""
return self._architecture
@architecture.setter
def architecture(self, architecture):
"""Sets the architecture of this CreateImageRequestBody.
镜像的架构类型。取值包括: x86 arm 默认使用“x86”。 当架构类型为arm时,镜像引导方式将自动转为UEFI的引导方式。
:param architecture: The architecture of this CreateImageRequestBody.
:type: str
"""
self._architecture = architecture
@property
def volume_id(self):
"""Gets the volume_id of this CreateImageRequestBody.
数据盘的卷ID。当数据盘创建系统盘镜像时,该参数必选
:return: The volume_id of this CreateImageRequestBody.
:rtype: str
"""
return self._volume_id
@volume_id.setter
def volume_id(self, volume_id):
"""Sets the volume_id of this CreateImageRequestBody.
数据盘的卷ID。当数据盘创建系统盘镜像时,该参数必选
:param volume_id: The volume_id of this CreateImageRequestBody.
:type: str
"""
self._volume_id = volume_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CreateImageRequestBody):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 29.226714
| 317
| 0.623391
|
2e20a54ed54873a3ba24825b4ee55f674661fdec
| 38,217
|
py
|
Python
|
Python/Structure/Building/BuildingGeneratorV_0_4/add_door.py
|
Perregrinne/Blend-of-things
|
b2bb1c122a21324d50a8b5337c142844739717c1
|
[
"MIT"
] | null | null | null |
Python/Structure/Building/BuildingGeneratorV_0_4/add_door.py
|
Perregrinne/Blend-of-things
|
b2bb1c122a21324d50a8b5337c142844739717c1
|
[
"MIT"
] | null | null | null |
Python/Structure/Building/BuildingGeneratorV_0_4/add_door.py
|
Perregrinne/Blend-of-things
|
b2bb1c122a21324d50a8b5337c142844739717c1
|
[
"MIT"
] | null | null | null |
bl_info = {
"name": "Door Generator",
"description": "Generate doors and a boolean cutout",
"author": "Austin Jacob",
"version": (1, 0, 0),
"blender": (2, 79, 0),
"location": "View3D > Add > Mesh",
"warning": "", # used for warning icon and text in addons panel
"wiki_url": "",
"tracker_url": "",
"category": "Add Mesh"}
import bpy
import bmesh
import mathutils
from bpy_extras import object_utils
from bpy.props import (
BoolProperty,
BoolVectorProperty,
FloatProperty,
FloatVectorProperty,
IntProperty,
EnumProperty,
)
class AddDoor(bpy.types.Operator):
"""Add a simple door mesh, door frame, and boolean box"""
bl_idname = "mesh.door_add"
bl_label = "Add Door"
bl_options = {'REGISTER', 'UNDO'}
length = FloatProperty(
name="Length",
description="Length",
min=0.01,
default=1.25,
)
door_width = FloatProperty(
name="Door Width",
description="Width of the door",
min=0.01,
default=0.05,
)
frame_width = FloatProperty(
name="Frame Width",
description="Width of the door frame",
min=0.125,
default=0.15,
)
frame_thick = FloatProperty(
name="Frame Thickness",
description="Thickness of the door frame",
min=0.01,
default=0.0325,
)
frame_inner_thick = FloatProperty(
name="Frame Inner Thickness",
description="Thickness of the inner door frame (that the door sits against)",
min=0.001,
default=0.01,
)
frame_inner_width = FloatProperty(
name="Frame Inner Width",
description="Width of the inner door frame",
min=0.001,
default=0.025,
)
door_shift = FloatProperty(
name="Door Shift",
description="Moves the door and inner door frame forward or backward",
default=0.0,
)
has_glass = BoolProperty(
name="Glass Door",
description="Whether or not to have glass in the door",
default=False,
)
door_glass_thick = FloatProperty(
name="Glass Thickness",
description="Thickness of the glass window in the door",
min=0.001,
default=0.0125,
)
door_glass_w = FloatProperty(
name="Glass Width",
description="Width of the door glass",
min=0.001,
default=0.5,
)
door_glass_h = FloatProperty(
name="Glass Height",
description="Height of the door glass",
min=0.001,
default=1.0,
)
door_glass_x = FloatProperty(
name="Glass X Position",
description="Left/Right shift of the door glass",
default=0.0,
)
door_glass_y = FloatProperty(
name="Glass Y Position",
description="Up/Down shift of the door glass",
default=0.0,
)
height = FloatProperty(
name="Height",
description="Box Depth",
min=0.01,
default=2.25,
)
gap = FloatProperty(
name="Gap",
description="Doors do not sit perfectly inside the frame",
min=0.0,
default=0.00325,
)
floor_gap = FloatProperty(
name="Floor Gap",
description="Hinged doors have a small gap between them and the door",
min=0.0,
default=0.0125,
)
hinge_diameter = FloatProperty(
name="Hinge Diameter",
description="Diameter of the door hinge",
min=0.001,
default=0.0075,
)
hinge_height = FloatProperty(
name="Hinge Height",
description="Height of the door hinge",
min=0.001,
default=0.0675,
)
hinge_cyl_height = FloatProperty(
name="Hinge Pin Height",
description="Height of the pin of the door hinge",
min=0.001,
default=0.0725,
)
hinge_width = FloatProperty(
name="Hinge Width",
description="Width of the door hinge (excluding hinge diameter)",
min=0.001,
default=0.04,
)
hinge_segments = IntProperty(
name="Hinge Segments",
description="How many segments make up the hinge cylinder",
min=4,
default=12,
)
#Perhaps make garage doors, someday
door_style = EnumProperty(
name="Door Style",
description="Type of roof the building has",
items=(('HINGE', 'Hinged', 'A door that swings on a hinge'),
('SLIDE', 'Sliding', 'A sliding door'),
('ROTATING', 'Revolving', 'A revolving door'),
('NONE', 'None', 'Just makes the hidden boolean cutout box')),
default='HINGE',
)
open_direction_hinge = EnumProperty(
name="Open Direction",
description="Where the hinge is, and which way the door opens from outside",
items=(('LI', 'Left-Inward', 'The hinge is on the left and the door swings inward'),
('LO', 'Left-Outward', 'The hinge is on the left and the door swings outward'),
('RI', 'Right-Inward', 'The hinge is on the right and the door swings inward'),
('RO', 'Right-Outward', 'The hinge is on the right and the door swings outward')),
default='RI',
)
open_direction_slide = EnumProperty(
name="Open Direction",
description="Which way the sliding door opens",
items=(('LEFT', 'Left', 'From the outside, the door slides left'),
('RIGHT', 'Right', 'From the outside, the door slides right')),
default='LEFT',
)
layers = BoolVectorProperty(
name="Layers",
description="Object Layers",
size=20,
options={'HIDDEN', 'SKIP_SAVE'},
)
# generic transform props
view_align = BoolProperty(
name="Align to View",
default=False,
)
location = FloatVectorProperty(
name="Location",
subtype='TRANSLATION',
)
rotation = FloatVectorProperty(
name="Rotation",
subtype='EULER',
)
#Display the menu sidebar
def draw(self, context):
layout = self.layout
box = layout.box()
col = box.column()
col.label(text="Dimensions", icon="ARROW_LEFTRIGHT")
col.prop(self, "length")
col.prop(self, "door_width")
col.prop(self, "frame_width")
col.prop(self, "frame_thick")
col.prop(self, "height")
box = layout.box()
col = box.column()
col.label(text="Style and Orientation", icon="MOD_WIREFRAME")
col.prop(self, "door_style")
col.prop(self, "gap")
if self.door_style == 'HINGE':
col.prop(self, "open_direction_hinge")
col.prop(self, "floor_gap")
col.prop(self, "hinge_segments")
col.prop(self, "hinge_diameter")
col.prop(self, "hinge_height")
col.prop(self, "hinge_cyl_height")
col.prop(self, "hinge_width")
col.prop(self, "frame_inner_thick")
col.prop(self, "frame_inner_width")
col.prop(self, "door_shift")
elif self.door_style == 'SLIDE':
col.prop(self, "open_direction_slide")
box = layout.box()
col = box.column()
col.label(text="Glass", icon="MOD_MIRROR")
col.prop(self, "has_glass")
if self.has_glass == True:
col.prop(self, "door_glass_thick")
col.prop(self, "door_glass_w")
col.prop(self, "door_glass_h")
col.prop(self, "door_glass_x")
col.prop(self, "door_glass_y")
box = layout.box()
col = box.column()
col.label(text="Transformations", icon="NDOF_TURN")
col.prop(self, "location")
col.prop(self, "rotation")
#Use the values from the menu
#to generate everything below
def execute(self, context):
#Rename the variables:
l = self.length
dw = self.door_width
fw = self.frame_width
ft = self.frame_thick
h = self.height
loc = self.location
rot = self.rotation
style = self.door_style
hinge = self.open_direction_hinge
slide = self.open_direction_slide
has_glass = self.has_glass
gap = self.gap
fgap = self.floor_gap
hh = self.hinge_height
hw = self.hinge_width
hd = self.hinge_diameter
hs = self.hinge_segments
hc = self.hinge_cyl_height
it = self.frame_inner_thick
iw = self.frame_inner_width
ds = self.door_shift
glass_w = self.door_glass_w
glass_h = self.door_glass_h
glass_x = self.door_glass_x
glass_y = self.door_glass_y
glass_t = self.door_glass_thick
#Create the boolean object to cut
#a hole in the wall to fit a door
mesh1 = bpy.data.meshes.new("Door_Boolean")
door_obj = bpy.data.objects.new("Door_Bool_Obj", mesh1)
scene = bpy.context.scene
scene.objects.link(door_obj)
bm1 = bmesh.new()
verts = [(+((l / 2.0) + ft), +(fw / 2.0), 0.0001),
(+((l / 2.0) + ft), -(fw / 2.0), 0.0001),
(-((l / 2.0) + ft), -(fw / 2.0), 0.0001),
(-((l / 2.0) + ft), +(fw / 2.0), 0.0001),
(+((l / 2.0) + ft), +(fw / 2.0), h + ft),
(+((l / 2.0) + ft), -(fw / 2.0), h + ft),
(-((l / 2.0) + ft), -(fw / 2.0), h + ft),
(-((l / 2.0) + ft), +(fw / 2.0), h + ft),
]
faces = [(0, 1, 2, 3),
(4, 7, 6, 5),
(0, 4, 5, 1),
(1, 5, 6, 2),
(2, 6, 7, 3),
(4, 0, 3, 7),
]
for v_co in verts:
bm1.verts.new(v_co)
bm1.verts.ensure_lookup_table()
for f_idx in faces:
bm1.faces.new([bm1.verts[i] for i in f_idx])
bm1.to_mesh(mesh1)
mesh1.update()
#We wan to hide it so
#we can see the door.
door_obj.hide = True
#Now create the door frame
mesh2 = bpy.data.meshes.new("Door_Frame")
door_frame_obj = bpy.data.objects.new("Door_Frame_Obj", mesh2)
scene = bpy.context.scene
scene.objects.link(door_frame_obj)
bm2 = bmesh.new()
if style == 'HINGE':
verts = [(-((l / 2.0) + ft), +(fw / 2.0), 0.0),
(+((l / 2.0) + ft), +(fw / 2.0), 0.0),
(-((l / 2.0) + ft), -(fw / 2.0), 0.0),
(+((l / 2.0) + ft), -(fw / 2.0), 0.0),
(-((l / 2.0) + ft), +(fw / 2.0), h + ft),
(+((l / 2.0) + ft), +(fw / 2.0), h + ft),
(-((l / 2.0) + ft), -(fw / 2.0), h + ft),
(+((l / 2.0) + ft), -(fw / 2.0), h + ft),
(-(l / 2.0), +(fw / 2.0), 0.0),
(+(l / 2.0), +(fw / 2.0), 0.0),
(-(l / 2.0), -(fw / 2.0), 0.0),
(+(l / 2.0), -(fw / 2.0), 0.0),
(-(l / 2.0), +(fw / 2.0), h),
(+(l / 2.0), +(fw / 2.0), h),
(-(l / 2.0), -(fw / 2.0), h),
(+(l / 2.0), -(fw / 2.0), h),
(-(l / 2.0), ((fw / 2.0) + ds - dw), 0.0),
(+(l / 2.0), ((fw / 2.0) + ds - dw), 0.0),
(-(l / 2.0), ((fw / 2.0) - iw + ds - dw), 0.0),
(+(l / 2.0), ((fw / 2.0) - iw + ds - dw), 0.0),
(-(l / 2.0), ((fw / 2.0) + ds - dw), h),
(+(l / 2.0), ((fw / 2.0) + ds - dw), h),
(-(l / 2.0), ((fw / 2.0) - iw + ds - dw), h),
(+(l / 2.0), ((fw / 2.0) - iw + ds - dw), h),
(-((l / 2.0) - it), ((fw / 2.0) + ds - dw), 0.0),
(+((l / 2.0) - it), ((fw / 2.0) + ds - dw), 0.0),
(-((l / 2.0) - it), ((fw / 2.0) - iw + ds - dw), 0.0),
(+((l / 2.0) - it), ((fw / 2.0) - iw + ds - dw), 0.0),
(-((l / 2.0) - it), ((fw / 2.0) + ds - dw), h - it),
(+((l / 2.0) - it), ((fw / 2.0) + ds - dw), h - it),
(-((l / 2.0) - it), ((fw / 2.0) - iw + ds - dw), h - it),
(+((l / 2.0) - it), ((fw / 2.0) - iw + ds - dw), h - it),
]
faces = [(6, 2, 10, 14),
(6, 14, 15, 7),
(15, 11, 3, 7),
(7, 3, 1, 5),
(6, 4, 0, 2),
(6, 7, 5, 4),
(15, 13, 9, 11),
(12, 13, 15, 14),
(12, 14, 10, 8),
(4, 12, 8, 0),
(5, 13, 12, 4),
(5, 1, 9, 13),
(22, 18, 26, 30),
(22, 30, 31, 23),
(31, 27, 19, 23),
(23, 19, 17, 21),
(22, 20, 16, 18),
(22, 23, 21, 20),
(31, 29, 25, 27),
(28, 29, 31, 30),
(28, 30, 26, 24),
(20, 28, 24, 16),
(21, 29, 28, 20),
(21, 17, 25, 29),
]
elif style == 'SLIDE':
verts = [(-((l / 2.0) + ft), +(fw / 2.0), 0.0),
(+((l / 2.0) + ft), +(fw / 2.0), 0.0),
(-((l / 2.0) + ft), -(fw / 2.0), 0.0),
(+((l / 2.0) + ft), -(fw / 2.0), 0.0),
(-((l / 2.0) + ft), +(fw / 2.0), h + ft),
(+((l / 2.0) + ft), +(fw / 2.0), h + ft),
(-((l / 2.0) + ft), -(fw / 2.0), h + ft),
(+((l / 2.0) + ft), -(fw / 2.0), h + ft),
(-(l / 2.0), +(fw / 2.0), 0.0),
(+(l / 2.0), +(fw / 2.0), 0.0),
(-(l / 2.0), -(fw / 2.0), 0.0),
(+(l / 2.0), -(fw / 2.0), 0.0),
(-(l / 2.0), +(fw / 2.0), h),
(+(l / 2.0), +(fw / 2.0), h),
(-(l / 2.0), -(fw / 2.0), h),
(+(l / 2.0), -(fw / 2.0), h),
(-((l / 2.0) + (ft / 2.0)), +((fw / 2.0) - (dw / 2.0)), 0.0),
(+((l / 2.0) + (ft / 2.0)), +((fw / 2.0) - (dw / 2.0)), 0.0),
(-((l / 2.0) + (ft / 2.0)), -((fw / 2.0) - (dw / 2.0)), 0.0),
(+((l / 2.0) + (ft / 2.0)), -((fw / 2.0) - (dw / 2.0)), 0.0),
(-((l / 2.0) + (ft / 2.0)), +((fw / 2.0) - (dw / 2.0)), h + (ft / 2.0)),
(+((l / 2.0) + (ft / 2.0)), +((fw / 2.0) - (dw / 2.0)), h + (ft / 2.0)),
(-((l / 2.0) + (ft / 2.0)), -((fw / 2.0) - (dw / 2.0)), h + (ft / 2.0)),
(+((l / 2.0) + (ft / 2.0)), -((fw / 2.0) - (dw / 2.0)), h + (ft / 2.0)),
(-(l / 2.0), +((fw / 2.0) - (dw / 2.0)), 0.0),
(+(l / 2.0), +((fw / 2.0) - (dw / 2.0)), 0.0),
(-(l / 2.0), -((fw / 2.0) - (dw / 2.0)), 0.0),
(+(l / 2.0), -((fw / 2.0) - (dw / 2.0)), 0.0),
(-(l / 2.0), +((fw / 2.0) - (dw / 2.0)), h),
(+(l / 2.0), +((fw / 2.0) - (dw / 2.0)), h),
(-(l / 2.0), -((fw / 2.0) - (dw / 2.0)), h),
(+(l / 2.0), -((fw / 2.0) - (dw / 2.0)), h),
]
faces = [(6, 2, 10, 14),
(6, 14, 15, 7),
(15, 11, 3, 7),
(7, 3, 1, 5),
(6, 4, 0, 2),
(6, 7, 5, 4),
(4, 12, 8, 0),
(5, 13, 12, 4),
(5, 1, 9, 13),
(12, 28, 24, 8),
(28, 20, 16, 24),
(20, 22, 18, 16),
(22, 30, 26, 18),
(30, 14, 10, 26),
(29, 13, 9, 25),
(21, 29, 25, 17),
(23, 21, 17, 19),
(31, 23, 19, 27),
(15, 31, 27, 11),
(30, 22, 23, 31),
(20, 28, 29, 21),
(21, 23, 22, 20),
(12, 13, 29, 28),
(31, 15, 14, 30),
]
for v_co in verts:
bm2.verts.new(v_co)
bm2.verts.ensure_lookup_table()
for f_idx in faces:
bm2.faces.new([bm2.verts[i] for i in f_idx])
if style == 'HINGE':
#Flip the door around based on the
#direction it is supposed to face:
if hinge == 'LI' or hinge == 'LO':
for v in bm2.verts:
v.co.x *= -1
if hinge == 'LO' or hinge == 'RO':
for v in bm2.verts:
v.co.y *= -1
#recalculate normals:
bmesh.ops.recalc_face_normals(bm2, faces=bm2.faces)
bm2.to_mesh(mesh2)
mesh2.update()
#Now make the door
mesh3 = bpy.data.meshes.new("Door")
door_obj = bpy.data.objects.new("Door_Obj", mesh3)
scene = bpy.context.scene
scene.objects.link(door_obj)
bm3 = bmesh.new()
#Make the door hinges if
#the door style is HINGE
dg = gap / 2.0
if style == 'HINGE':
#shift hinges by the door_gap / 2.0
verts = [(((-l / 2.0) - 0.0005 + dg), (+hd + (fw / 2.0)), (h / 6.0) - (hh / 2.0)),
(((-l / 2.0) - 0.0005 + dg), (-hw + (fw / 2.0)), (h / 6.0) - (hh / 2.0)),
(((-l / 2.0) - (hd / 2.0) + dg), (+hd + (fw / 2.0)), (h / 6.0) - (hh / 2.0)),
(((-l / 2.0) - (hd / 2.0) + dg), (-hw + (fw / 2.0)), (h / 6.0) - (hh / 2.0)),
(((-l / 2.0) - 0.0005 + dg), (+hd + (fw / 2.0)), (h / 6.0) + (hh / 2.0)),
(((-l / 2.0) - 0.0005 + dg), (-hw + (fw / 2.0)), (h / 6.0) + (hh / 2.0)),
(((-l / 2.0) - (hd / 2.0) + dg), (+hd + (fw / 2.0)), (h / 6.0) + (hh / 2.0)),
(((-l / 2.0) - (hd / 2.0) + dg), (-hw + (fw / 2.0)), (h / 6.0) + (hh / 2.0)),
(((-l / 2.0) + (hd / 2.0) + dg), (+hd + (fw / 2.0)), (h / 6.0) - (hh / 2.0)),
(((-l / 2.0) + (hd / 2.0) + dg), (-hw + (fw / 2.0)), (h / 6.0) - (hh / 2.0)),
(((-l / 2.0) + 0.0005 + dg), (+hd + (fw / 2.0)), (h / 6.0) - (hh / 2.0)),
(((-l / 2.0) + 0.0005 + dg), (-hw + (fw / 2.0)), (h / 6.0) - (hh / 2.0)),
(((-l / 2.0) + (hd / 2.0) + dg), (+hd + (fw / 2.0)), (h / 6.0) + (hh / 2.0)),
(((-l / 2.0) + (hd / 2.0) + dg), (-hw + (fw / 2.0)), (h / 6.0) + (hh / 2.0)),
(((-l / 2.0) + 0.0005 + dg), (+hd + (fw / 2.0)), (h / 6.0) + (hh / 2.0)),
(((-l / 2.0) + 0.0005 + dg), (-hw + (fw / 2.0)), (h / 6.0) + (hh / 2.0)),
(+((l / 2.0) - gap), ((fw / 2.0) + ds), fgap),
(+((l / 2.0) - gap), ((fw / 2.0) - dw + ds), fgap),
(-((l / 2.0) - gap), ((fw / 2.0) - dw + ds), fgap),
(-((l / 2.0) - gap), ((fw / 2.0) + ds), fgap),
(+((l / 2.0) - gap), ((fw / 2.0) + ds), +(h - gap)),
(+((l / 2.0) - gap), ((fw / 2.0) - dw + ds), +(h - gap)),
(-((l / 2.0) - gap), ((fw / 2.0) - dw + ds), +(h - gap)),
(-((l / 2.0) - gap), ((fw / 2.0) + ds), +(h - gap)),
(((-l / 2.0) - 0.0005 + dg), (+hd + (fw / 2.0)), (h / 2.0) - (hh / 2.0)),
(((-l / 2.0) - 0.0005 + dg), (-hw + (fw / 2.0)), (h / 2.0) - (hh / 2.0)),
(((-l / 2.0) - (hd / 2.0) + dg), (+hd + (fw / 2.0)), (h / 2.0) - (hh / 2.0)),
(((-l / 2.0) - (hd / 2.0) + dg), (-hw + (fw / 2.0)), (h / 2.0) - (hh / 2.0)),
(((-l / 2.0) - 0.0005 + dg), (+hd + (fw / 2.0)), (h / 2.0) + (hh / 2.0)),
(((-l / 2.0) - 0.0005 + dg), (-hw + (fw / 2.0)), (h / 2.0) + (hh / 2.0)),
(((-l / 2.0) - (hd / 2.0) + dg), (+hd + (fw / 2.0)), (h / 2.0) + (hh / 2.0)),
(((-l / 2.0) - (hd / 2.0) + dg), (-hw + (fw / 2.0)), (h / 2.0) + (hh / 2.0)),
(((-l / 2.0) + (hd / 2.0) + dg), (+hd + (fw / 2.0)), (h / 2.0) - (hh / 2.0)),
(((-l / 2.0) + (hd / 2.0) + dg), (-hw + (fw / 2.0)), (h / 2.0) - (hh / 2.0)),
(((-l / 2.0) + 0.0005 + dg), (+hd + (fw / 2.0)), (h / 2.0) - (hh / 2.0)),
(((-l / 2.0) + 0.0005 + dg), (-hw + (fw / 2.0)), (h / 2.0) - (hh / 2.0)),
(((-l / 2.0) + (hd / 2.0) + dg), (+hd + (fw / 2.0)), (h / 2.0) + (hh / 2.0)),
(((-l / 2.0) + (hd / 2.0) + dg), (-hw + (fw / 2.0)), (h / 2.0) + (hh / 2.0)),
(((-l / 2.0) + 0.0005 + dg), (+hd + (fw / 2.0)), (h / 2.0) + (hh / 2.0)),
(((-l / 2.0) + 0.0005 + dg), (-hw + (fw / 2.0)), (h / 2.0) + (hh / 2.0)),
(((-l / 2.0) - 0.0005 + dg), (+hd + (fw / 2.0)), ((5 * h) / 6.0) - (hh / 2.0)),
(((-l / 2.0) - 0.0005 + dg), (-hw + (fw / 2.0)), ((5 * h) / 6.0) - (hh / 2.0)),
(((-l / 2.0) - (hd / 2.0) + dg), (+hd + (fw / 2.0)), ((5 * h) / 6.0) - (hh / 2.0)),
(((-l / 2.0) - (hd / 2.0) + dg), (-hw + (fw / 2.0)), ((5 * h) / 6.0) - (hh / 2.0)),
(((-l / 2.0) - 0.0005 + dg), (+hd + (fw / 2.0)), ((5 * h) / 6.0) + (hh / 2.0)),
(((-l / 2.0) - 0.0005 + dg), (-hw + (fw / 2.0)), ((5 * h) / 6.0) + (hh / 2.0)),
(((-l / 2.0) - (hd / 2.0) + dg), (+hd + (fw / 2.0)), ((5 * h) / 6.0) + (hh / 2.0)),
(((-l / 2.0) - (hd / 2.0) + dg), (-hw + (fw / 2.0)), ((5 * h) / 6.0) + (hh / 2.0)),
(((-l / 2.0) + (hd / 2.0) + dg), (+hd + (fw / 2.0)), ((5 * h) / 6.0) - (hh / 2.0)),
(((-l / 2.0) + (hd / 2.0) + dg), (-hw + (fw / 2.0)), ((5 * h) / 6.0) - (hh / 2.0)),
(((-l / 2.0) + 0.0005 + dg), (+hd + (fw / 2.0)), ((5 * h) / 6.0) - (hh / 2.0)),
(((-l / 2.0) + 0.0005 + dg), (-hw + (fw / 2.0)), ((5 * h) / 6.0) - (hh / 2.0)),
(((-l / 2.0) + (hd / 2.0) + dg), (+hd + (fw / 2.0)), ((5 * h) / 6.0) + (hh / 2.0)),
(((-l / 2.0) + (hd / 2.0) + dg), (-hw + (fw / 2.0)), ((5 * h) / 6.0) + (hh / 2.0)),
(((-l / 2.0) + 0.0005 + dg), (+hd + (fw / 2.0)), ((5 * h) / 6.0) + (hh / 2.0)),
(((-l / 2.0) + 0.0005 + dg), (-hw + (fw / 2.0)), ((5 * h) / 6.0) + (hh / 2.0)),
]
faces = [(0, 1, 3, 2),
(4, 6, 7, 5),
(5, 1, 0, 4),
(6, 4, 0, 2),
(7, 6, 2, 3),
(5, 7, 3, 1),
(8, 9, 11, 10),
(12, 14, 15, 13),
(13, 9, 8, 12),
(14, 12, 8, 10),
(15, 14, 10, 11),
(13, 15, 11, 9),
(16, 17, 18, 19),
(20, 23, 22, 21),
(16, 20, 21, 17),
(17, 21, 22, 18),
(18, 22, 23, 19),
(20, 16, 19, 23),
(24, 25, 27, 26),
(28, 30, 31, 29),
(29, 25, 24, 28),
(30, 28, 24, 26),
(31, 30, 26, 27),
(29, 31, 27, 25),
(32, 33, 35, 34),
(36, 38, 39, 37),
(37, 33, 32, 36),
(38, 36, 32, 34),
(39, 38, 34, 35),
(37, 39, 35, 33),
(40, 41, 43, 42),
(44, 46, 47, 45),
(45, 41, 40, 44),
(46, 44, 40, 42),
(47, 46, 42, 43),
(45, 47, 43, 41),
(48, 49, 51, 50),
(52, 54, 55, 53),
(53, 49, 48, 52),
(54, 52, 48, 50),
(55, 54, 50, 51),
(53, 55, 51, 49),
]
for v_co in verts:
bm3.verts.new(v_co)
bm3.verts.ensure_lookup_table()
for f_idx in faces:
bm3.faces.new([bm3.verts[i] for i in f_idx])
if style == 'HINGE':
#Flip the door around based on the
#direction it is supposed to face:
if hinge == 'LI' or hinge == 'LO':
for v in bm3.verts:
v.co.x *= -1
if hinge == 'LO' or hinge == 'RO':
for v in bm3.verts:
v.co.y *= -1
#recalculate normals:
bmesh.ops.recalc_face_normals(bm3, faces=bm3.faces)
bm3.to_mesh(mesh3)
mesh3.update()
#Now for the hinge pins
mesh4 = bpy.data.meshes.new("Hinge")
hinge_obj = bpy.data.objects.new("Hinge_Obj", mesh4)
scene.objects.link(hinge_obj)
bm4 = bmesh.new()
#Bottom hinge pin:
cyl_loc = mathutils.Matrix.Translation((((-l / 2.0) + dg), (+(fw / 2.0) + hd), (h / 6.0)))
bmesh.ops.create_cone(bm4, cap_ends=False, cap_tris=False, segments=hs, diameter1=hd, diameter2=hd, depth=hc, matrix=cyl_loc, calc_uvs=False)
#Middle hinge pin:
cyl_loc = mathutils.Matrix.Translation((((-l / 2.0) + dg), (+(fw / 2.0) + hd), (h / 2.0)))
bmesh.ops.create_cone(bm4, cap_ends=False, cap_tris=False, segments=hs, diameter1=hd, diameter2=hd, depth=hc, matrix=cyl_loc, calc_uvs=False)
#Top hinge pin:
cyl_loc = mathutils.Matrix.Translation((((-l / 2.0) + dg), (+(fw / 2.0) + hd), ((5 * h) / 6.0)))
bmesh.ops.create_cone(bm4, cap_ends=False, cap_tris=False, segments=hs, diameter1=hd, diameter2=hd, depth=hc, matrix=cyl_loc, calc_uvs=False)
#Now smooth all faces
for f in bm4.faces:
f.smooth = True
#Add the top and bottom faces
hinge_top=mathutils.Matrix.Translation((((-l / 2.0) + dg), (+(fw / 2.0) + hd), ((h / 6.0) + (hc / 2.0))))
hinge_bottom=mathutils.Matrix.Translation((((-l / 2.0) + dg), (+(fw / 2.0) + hd), ((h / 6.0) - (hc / 2.0))))
bmesh.ops.create_circle(bm4, cap_ends=True, cap_tris=False, segments=hs, diameter=hd, matrix=hinge_top, calc_uvs=False)
bmesh.ops.create_circle(bm4, cap_ends=True, cap_tris=False, segments=hs, diameter=hd, matrix=hinge_bottom, calc_uvs=False)
#Middle hinge pin faces:
hinge_top=mathutils.Matrix.Translation((((-l / 2.0) + dg), (+(fw / 2.0) + hd), ((h / 2.0) + (hc / 2.0))))
hinge_bottom=mathutils.Matrix.Translation((((-l / 2.0) + dg), (+(fw / 2.0) + hd), ((h / 2.0) - (hc / 2.0))))
bmesh.ops.create_circle(bm4, cap_ends=True, cap_tris=False, segments=hs, diameter=hd, matrix=hinge_top, calc_uvs=False)
bmesh.ops.create_circle(bm4, cap_ends=True, cap_tris=False, segments=hs, diameter=hd, matrix=hinge_bottom, calc_uvs=False)
#Top hinge pin faces:
hinge_top=mathutils.Matrix.Translation((((-l / 2.0) + dg), (+(fw / 2.0) + hd), (((5 * h) / 6.0) + (hc / 2.0))))
hinge_bottom=mathutils.Matrix.Translation((((-l / 2.0) + dg), (+(fw / 2.0) + hd), (((5 * h) / 6.0) - (hc / 2.0))))
bmesh.ops.create_circle(bm4, cap_ends=True, cap_tris=False, segments=hs, diameter=hd, matrix=hinge_top, calc_uvs=False)
bmesh.ops.create_circle(bm4, cap_ends=True, cap_tris=False, segments=hs, diameter=hd, matrix=hinge_bottom, calc_uvs=False)
#Flip the door around based on the
#direction it is supposed to face:
if hinge == 'LI' or hinge == 'LO':
for v in bm4.verts:
v.co.x *= -1
if hinge == 'LO' or hinge == 'RO':
for v in bm4.verts:
v.co.y *= -1
#recalculate normals:
bmesh.ops.recalc_face_normals(bm4, faces=bm4.faces)
#make the bmesh data into mesh data
bm4.to_mesh(mesh4)
mesh4.update()
#Combine the hinge object with the door
bpy.ops.object.select_all(action='DESELECT')
hinge_obj.select=True
door_obj.select=True
bpy.context.scene.objects.active = door_obj
bpy.ops.object.join()
bpy.ops.object.select_all(action='DESELECT')
#Otherwise, if it's a SLIDE door:
elif style == 'SLIDE':
verts = [(+((l / 2.0) + (ft / 2.0) - dg), +((fw / 2.0) - dg - ft), 0.0),
(+((l / 2.0) + (ft / 2.0) - dg), -((fw / 2.0) - dg - ft), 0.0),
(-((l / 2.0) + (ft / 2.0) - dg), +((fw / 2.0) - dg - ft), 0.0),
(-((l / 2.0) + (ft / 2.0) - dg), -((fw / 2.0) - dg - ft), 0.0),
(+((l / 2.0) + (ft / 2.0) - dg), +((fw / 2.0) - dg - ft), +(h + (ft / 2.0) - dg)),
(+((l / 2.0) + (ft / 2.0) - dg), -((fw / 2.0) - dg - ft), +(h + (ft / 2.0) - dg)),
(-((l / 2.0) + (ft / 2.0) - dg), +((fw / 2.0) - dg - ft), +(h + (ft / 2.0) - dg)),
(-((l / 2.0) + (ft / 2.0) - dg), -((fw / 2.0) - dg - ft), +(h + (ft / 2.0) - dg)),
]
faces = [(0, 1, 3, 2),
(4, 6, 7, 5),
(5, 1, 0, 4),
(6, 4, 0, 2),
(7, 6, 2, 3),
(5, 7, 3, 1),
]
for v_co in verts:
bm3.verts.new(v_co)
bm3.verts.ensure_lookup_table()
for f_idx in faces:
bm3.faces.new([bm3.verts[i] for i in f_idx])
bm3.to_mesh(mesh3)
mesh3.update()
#boolean out space for glass if needed:
if has_glass == True:
#If they want the whole door to be
#glass then just swap the material
if glass_w < l and glass_h < h:
mesh5 = bpy.data.meshes.new("Glass_Bool")
glass_bool_obj = bpy.data.objects.new("Glass_Bool_Obj", mesh5)
scene.objects.link(glass_bool_obj)
bm5 = bmesh.new()
verts = [(((glass_w / 2.0) + glass_x), +((fw + 0.125) / 2.0), +((-glass_h / 2.0) + glass_y + (h / 2.0))),
(((glass_w / 2.0) + glass_x), -((fw + 0.125) / 2.0), +((-glass_h / 2.0) + glass_y + (h / 2.0))),
(((-glass_w / 2.0) + glass_x), +((fw + 0.125) / 2.0), +((-glass_h / 2.0) + glass_y + (h / 2.0))),
(((-glass_w / 2.0) + glass_x), -((fw + 0.125) / 2.0), +((-glass_h / 2.0) + glass_y + (h / 2.0))),
(((glass_w / 2.0) + glass_x), +((fw + 0.125) / 2.0), +((glass_h / 2.0) + glass_y + (h / 2.0))),
(((glass_w / 2.0) + glass_x), -((fw + 0.125) / 2.0), +((glass_h / 2.0) + glass_y + (h / 2.0))),
(((-glass_w / 2.0) + glass_x), +((fw + 0.125) / 2.0), +((glass_h / 2.0) + glass_y + (h / 2.0))),
(((-glass_w / 2.0) + glass_x), -((fw + 0.125) / 2.0), +((glass_h / 2.0) + glass_y + (h / 2.0))),
]
faces = [(0, 1, 3, 2),
(4, 6, 7, 5),
(5, 1, 0, 4),
(6, 4, 0, 2),
(7, 6, 2, 3),
(5, 7, 3, 1),
]
for v_co in verts:
bm5.verts.new(v_co)
bm5.verts.ensure_lookup_table()
for f_idx in faces:
bm5.faces.new([bm5.verts[i] for i in f_idx])
bm5.to_mesh(mesh5)
mesh5.update()
bpy.ops.object.select_all(action='DESELECT')
bpy.context.scene.objects.active = door_obj
door_obj.data = door_obj.data.copy()
cut = door_obj.modifiers.new("cut_window", type='BOOLEAN')
cut.operation = 'DIFFERENCE'
cut.object = glass_bool_obj
bpy.ops.object.modifier_apply(apply_as='DATA', modifier=cut.name)
bpy.ops.object.select_all(action='DESELECT')
glass_bool_obj.select = True
bpy.ops.object.delete()
#Make the glass that goes in that space
mesh6 = bpy.data.meshes.new("Glass")
glass_obj = bpy.data.objects.new("Glass_Obj", mesh6)
scene.objects.link(glass_obj)
bm6 = bmesh.new()
if style == 'HINGE':
verts = [(((glass_w / 2.0) + glass_x), ((glass_t / 2.0) + dw + ds), ((glass_h / 2.0) + glass_y + (h / 2.0))),
(((glass_w / 2.0) + glass_x), ((glass_t / 2.0) + dw + ds), ((-glass_h / 2.0) + glass_y + (h / 2.0))),
(((-glass_w / 2.0) + glass_x), ((glass_t / 2.0) + dw + ds), ((-glass_h / 2.0) + glass_y + (h / 2.0))),
(((-glass_w / 2.0) + glass_x), ((glass_t / 2.0) + dw + ds), ((glass_h / 2.0) + glass_y + (h / 2.0))),
(((glass_w / 2.0) + glass_x), ((-glass_t / 2.0) + dw + ds), ((glass_h / 2.0) + glass_y + (h / 2.0))),
(((glass_w / 2.0) + glass_x), ((-glass_t / 2.0) + dw + ds), ((-glass_h / 2.0) + glass_y + (h / 2.0))),
(((-glass_w / 2.0) + glass_x), ((-glass_t / 2.0) + dw + ds), ((-glass_h / 2.0) + glass_y + (h / 2.0))),
(((-glass_w / 2.0) + glass_x), ((-glass_t / 2.0) + dw + ds), ((glass_h / 2.0) + glass_y + (h / 2.0))),
]
elif style == 'SLIDE':
verts = [(((glass_w / 2.0) + glass_x), (glass_t / 2.0), ((glass_h / 2.0) + glass_y + (h / 2.0))),
(((glass_w / 2.0) + glass_x), (glass_t / 2.0), ((-glass_h / 2.0) + glass_y + (h / 2.0))),
(((-glass_w / 2.0) + glass_x), (glass_t / 2.0), ((-glass_h / 2.0) + glass_y + (h / 2.0))),
(((-glass_w / 2.0) + glass_x), (glass_t / 2.0), ((glass_h / 2.0) + glass_y + (h / 2.0))),
(((glass_w / 2.0) + glass_x), (-glass_t / 2.0), ((glass_h / 2.0) + glass_y + (h / 2.0))),
(((glass_w / 2.0) + glass_x), (-glass_t / 2.0), ((-glass_h / 2.0) + glass_y + (h / 2.0))),
(((-glass_w / 2.0) + glass_x), (-glass_t / 2.0), ((-glass_h / 2.0) + glass_y + (h / 2.0))),
(((-glass_w / 2.0) + glass_x), (-glass_t / 2.0), ((glass_h / 2.0) + glass_y + (h / 2.0))),
]
faces = [(0, 1, 2, 3),
(7, 6, 5, 4),
]
for v_co in verts:
bm6.verts.new(v_co)
bm6.verts.ensure_lookup_table()
for f_idx in faces:
bm6.faces.new([bm6.verts[i] for i in f_idx])
bm6.to_mesh(mesh6)
mesh6.update()
bpy.ops.object.select_all(action='DESELECT')
door_obj.select = True
glass_obj.select = True
bpy.context.scene.objects.active = door_obj
bpy.ops.object.join()
bpy.ops.object.select_all(action='DESELECT')
return {'FINISHED'}
def menu_func(self, context):
self.layout.operator(AddDoor.bl_idname, icon='MOD_WIREFRAME')
def register():
bpy.utils.register_class(AddDoor)
bpy.types.INFO_MT_mesh_add.append(menu_func)
def unregister():
bpy.utils.unregister_class(AddDoor)
bpy.types.INFO_MT_mesh_add.remove(menu_func)
if __name__ == "__main__":
register()
| 44.855634
| 153
| 0.3752
|
9ffd3248224554e4fcdbde46b76b6f8866c94e5e
| 6,726
|
py
|
Python
|
testCases/200325-smFinals/smAGCbase2.ltd.py
|
thadhaines/PSLTDSim
|
1bc598f3733c1369c164f54249e5f7757e6bf466
|
[
"MIT"
] | null | null | null |
testCases/200325-smFinals/smAGCbase2.ltd.py
|
thadhaines/PSLTDSim
|
1bc598f3733c1369c164f54249e5f7757e6bf466
|
[
"MIT"
] | null | null | null |
testCases/200325-smFinals/smAGCbase2.ltd.py
|
thadhaines/PSLTDSim
|
1bc598f3733c1369c164f54249e5f7757e6bf466
|
[
"MIT"
] | null | null | null |
# LTD simulation models / perturbances
# Attribute name case sensitive.
# Commented and empty lines are ignored during parsing.
# Double quoted variable names in model parameters also ignored
CTRLtimeScale = 60*60 # ninutes
# Perturbances
mirror.sysPerturbances = [
# AGC steps
#'gen 2 2 : step Pm 2 -150 rel',
'gen 5 : step Pm 2 -150 rel',
# ramp non-gov gens
#'gen 2 2 : ramp Pm 600 2700 150 rel', # 45 min ramp up
#'gen 2 2 : ramp Pm 3900 2700 -150 rel', # 45 min ramp down
#'gen 5 : ramp Pm 600 2700 300 rel', # 45 min ramp up
#'gen 5 : ramp Pm 3900 2700 -300 rel', # 45 min ramp down
]
#mirror.NoiseAgent = ltd.perturbance.LoadNoiseAgent(mirror, 0.03, True)
# Balancing Authorities
mirror.sysBA = {
'BA1':{
'Area':1,
'B': "1.0 : perload", # MW/0.1 Hz
'AGCActionTime': 30.00, # seconds
'ACEgain' : 0.0,
'AGCType':'TLB : 0', # Tie-Line Bias
'UseAreaDroop' : False,
'AreaDroop' : 0.05,
'IncludeIACE' : True,
'IACEconditional': False,
'IACEwindow' : 15, # seconds - size of window - 0 for non window
'IACEscale' : 1/5,
'IACEdeadband' : 0, # Hz
'ACEFiltering': 'PI : 0.04 0.0001',
'AGCDeadband' : None, # MW? -> not implemented
'GovDeadbandType' : 'none', # step, None, ramp, nldroop
'GovDeadband' : .036, # Hz
'GovAlpha' : 0.016, # Hz - for nldroop
'GovBeta' : 0.036, # Hz - for nldroop
'CtrlGens': ['gen 1 : 0.5 : rampA',
'gen 2 1 : 0.5 : rampA',
]
},
'BA2':{
'Area':2,
'B': "1.0 : perload", # MW/0.1 Hz
'AGCActionTime': 45.00, # seconds
'ACEgain' : 0.0,
'AGCType':'TLB : 0', # Tie-Line Bias
'UseAreaDroop' : False,
'AreaDroop' : 0.05,
'IncludeIACE' : True,
'IACEconditional': False,
'IACEwindow' : 15, # seconds - size of window - 0 for non window
'IACEscale' : 1/5,
'IACEdeadband' : 0, # Hz
'ACEFiltering': 'PI : 0.04 0.0001',
'AGCDeadband' : None, # MW? -> not implemented
'GovDeadbandType' : 'none', # step, None, ramp, nldroop
'GovDeadband' : .036, # Hz
'GovAlpha' : 0.016, # Hz - for nldroop
'GovBeta' : 0.036, # Hz - for nldroop
'CtrlGens': ['gen 3 : 1.0 : rampA',]
},
}
"""
# Definite Time Controller Definitions
mirror.DTCdict = {
'bus8caps' : {
'RefAgents' : {
'ra1' : 'bus 8 : Vm',
'ra2' : 'branch 8 9 1 : Qbr', # branches defined from, to, ckID
},# end Referenc Agents
'TarAgents' : {
'tar1' : 'shunt 8 2 : St',
'tar2' : 'shunt 8 3 : St',
'tar3' : 'shunt 8 4 : St',
'tar4' : 'shunt 8 5 : St',
'tar5' : 'shunt 8 6 : St',
}, # end Target Agents
'Timers' : {
'set' :{ # set shunts
'logic' : "(ra1 < 1.0) or (ra2 < -26)",
'actTime' : 30, # seconds of true logic before act
'act' : "anyOFFTar = 1", # set any target off target = 1
},# end set
'reset' :{ # reset shunts
'logic' : "(ra1 > 1.04) or (ra2 > 26)",
'actTime' : 30, # seconds of true logic before act
'act' : "anyONTar = 0", # set any target On target = 0
},# end reset
'hold' : 90, # minimum time between actions
}, # end timers
},# end bus8caps
'bus9caps' : {
'RefAgents' : {
'ra1' : 'bus 9 : Vm',
'ra2' : 'branch 8 9 1 : Qbr', # branches defined from, to, ckID
},# end Referenc Agents
'TarAgents' : {
'tar1' : 'shunt 9 2 : St',
'tar2' : 'shunt 9 3 : St',
'tar3' : 'shunt 9 4 : St',
'tar4' : 'shunt 9 5 : St',
'tar5' : 'shunt 9 6 : St',
}, # end Target Agents
'Timers' : {
'set' :{ # set shunts
'logic' : "(ra1 < 1.0) or (ra2 > 13.3)",
'actTime' : 80, # seconds of true logic before act
'act' : "anyOFFTar = 1", # set any target off target = 1
},# end set
'reset' :{ # reset shunts
'logic' : "(ra1 > 1.04) or (ra2 < -13.3)",
'actTime' : 80, # seconds of true logic before act
'act' : "anyONTar = 0", # set any target On target = 0
},# end reset
'hold' : 120, # minimum time between actions
}, # end timers
},# end bus8caps
}# end DTCdict
# Load and Generation Cycle Agents
mirror.sysGenerationControl = {
'BPATDispatch' : {
'Area': 1,
'startTime' : 2,
'timeScale' : CTRLtimeScale,
'rampType' : 'per', # relative percent change
'CtrlGens': [
"gen 1 : 0.5",
"gen 2 1 : 0.5",
],
# Data from: 12/11/2019 PACE
'forcast' : [
#(time , Precent change from previous value)
(0, 0.0),
(1, 5.8),
(2, 8.8),
(3, 9.9),
(4, 4.0),
],
}, #end of generation controller def
'CAISODispatch' : {
'Area': 2,
'startTime' : 2,
'timeScale' : CTRLtimeScale,
'rampType' : 'per', # relative percent change
'CtrlGens': [
"gen 4 : 1.0",
],
# Data from: 12/11/2019 PACE
'forcast' : [
#(time , Precent change from previous value)
(0, 0.0),
(1, 0.7),
(2, 7.5),
(3, 11.2),
(4, 4.4),
],
}, #end of generation controller def
}
mirror.sysLoadControl = {
'BPATDemand' : {
'Area': 1,
'startTime' : 2,
'timeScale' : CTRLtimeScale,
'rampType' : 'per', # relative percent change
# Data from: 12/11/2019 BPAT
'demand' : [
#(time , Precent change from previous value)
(0, 0.000),
(1, 3.2),
(2, 8.2),
(3, 9.3),
(4, 3.8),
] ,
}, # end of demand agent def
'CAISODemand' : {
'Area': 2,
'startTime' : 2,
'timeScale' : CTRLtimeScale,
'rampType' : 'per', # relative percent change
# Data from: 12/11/2019 CAISO
'demand' : [
#(time , Precent change from previous value)
(0, 0.000),
(1, 3.0),
(2, 7.0),
(3, 10.5),
(4, 4.4),
] ,
},# end of demand load control definition
}# end of loac control definitions
"""
| 33.133005
| 75
| 0.462385
|
3e918677d155a868d64c61b537bedcb78bcce47a
| 43,757
|
py
|
Python
|
mlflow/projects/__init__.py
|
cestum/mlflow
|
c1b95a9c0021b1512a429bfe4bc75183bb0df9ef
|
[
"Apache-2.0"
] | null | null | null |
mlflow/projects/__init__.py
|
cestum/mlflow
|
c1b95a9c0021b1512a429bfe4bc75183bb0df9ef
|
[
"Apache-2.0"
] | null | null | null |
mlflow/projects/__init__.py
|
cestum/mlflow
|
c1b95a9c0021b1512a429bfe4bc75183bb0df9ef
|
[
"Apache-2.0"
] | null | null | null |
"""
The ``mlflow.projects`` module provides an API for running MLflow projects locally or remotely.
"""
from __future__ import print_function
from distutils import dir_util
import hashlib
import json
import yaml
import os
import sys
import re
import shutil
from six.moves import urllib
import subprocess
import tempfile
import logging
import posixpath
import docker
import mlflow.tracking as tracking
import mlflow.tracking.fluent as fluent
from mlflow.projects.submitted_run import LocalSubmittedRun, SubmittedRun
from mlflow.projects import _project_spec
from mlflow.exceptions import ExecutionException, MlflowException
from mlflow.entities import RunStatus, SourceType
from mlflow.tracking.fluent import _get_experiment_id
from mlflow.tracking.context.default_context import _get_user
from mlflow.tracking.context.git_context import _get_git_commit
import mlflow.projects.databricks
from mlflow.utils import process
from mlflow.store.local_artifact_repo import LocalArtifactRepository
from mlflow.store.s3_artifact_repo import S3ArtifactRepository
from mlflow.store.azure_blob_artifact_repo import AzureBlobArtifactRepository
from mlflow.store.gcs_artifact_repo import GCSArtifactRepository
from mlflow.store.hdfs_artifact_repo import HdfsArtifactRepository
from mlflow.store.artifact_repository_registry import get_artifact_repository
from mlflow.utils.file_utils import path_to_local_sqlite_uri, path_to_local_file_uri, \
get_local_path_or_none
from mlflow.utils.mlflow_tags import MLFLOW_PROJECT_ENV, MLFLOW_DOCKER_IMAGE_URI, \
MLFLOW_DOCKER_IMAGE_ID, MLFLOW_USER, MLFLOW_SOURCE_NAME, MLFLOW_SOURCE_TYPE, \
MLFLOW_GIT_COMMIT, MLFLOW_GIT_REPO_URL, MLFLOW_GIT_BRANCH, LEGACY_MLFLOW_GIT_REPO_URL, \
LEGACY_MLFLOW_GIT_BRANCH_NAME, MLFLOW_PROJECT_ENTRY_POINT, MLFLOW_PARENT_RUN_ID, \
MLFLOW_PROJECT_BACKEND
from mlflow.utils import databricks_utils, file_utils
# TODO: this should be restricted to just Git repos and not S3 and stuff like that
_GIT_URI_REGEX = re.compile(r"^[^/]*:")
_FILE_URI_REGEX = re.compile(r"^file://.+")
_ZIP_URI_REGEX = re.compile(r".+\.zip$")
# Environment variable indicating a path to a conda installation. MLflow will default to running
# "conda" if unset
MLFLOW_CONDA_HOME = "MLFLOW_CONDA_HOME"
_GENERATED_DOCKERFILE_NAME = "Dockerfile.mlflow-autogenerated"
_PROJECT_TAR_ARCHIVE_NAME = "mlflow-project-docker-build-context"
_MLFLOW_DOCKER_TRACKING_DIR_PATH = "/mlflow/tmp/mlruns"
_MLFLOW_DOCKER_WORKDIR_PATH = "/mlflow/projects/code/"
_logger = logging.getLogger(__name__)
def _resolve_experiment_id(experiment_name=None, experiment_id=None):
"""
Resolve experiment.
Verifies either one or other is specified - cannot be both selected.
If ``experiment_name`` is provided and does not exist, an experiment
of that name is created and its id is returned.
:param experiment_name: Name of experiment under which to launch the run.
:param experiment_id: ID of experiment under which to launch the run.
:return: str
"""
if experiment_name and experiment_id:
raise MlflowException("Specify only one of 'experiment_name' or 'experiment_id'.")
if experiment_id:
return str(experiment_id)
if experiment_name:
client = tracking.MlflowClient()
exp = client.get_experiment_by_name(experiment_name)
if exp:
return exp.experiment_id
else:
print("INFO: '{}' does not exist. Creating a new experiment".format(experiment_name))
return client.create_experiment(experiment_name)
return _get_experiment_id()
def _run(uri, experiment_id, entry_point="main", version=None, parameters=None,
backend=None, backend_config=None, use_conda=True,
storage_dir=None, synchronous=True, run_id=None):
"""
Helper that delegates to the project-running method corresponding to the passed-in backend.
Returns a ``SubmittedRun`` corresponding to the project run.
"""
parameters = parameters or {}
work_dir = _fetch_project(uri=uri, force_tempdir=False, version=version)
project = _project_spec.load_project(work_dir)
_validate_execution_environment(project, backend)
project.get_entry_point(entry_point)._validate_parameters(parameters)
if run_id:
active_run = tracking.MlflowClient().get_run(run_id)
else:
active_run = _create_run(uri, experiment_id, work_dir, entry_point)
# Consolidate parameters for logging.
# `storage_dir` is `None` since we want to log actual path not downloaded local path
entry_point_obj = project.get_entry_point(entry_point)
final_params, extra_params = entry_point_obj.compute_parameters(parameters, storage_dir=None)
for key, value in (list(final_params.items()) + list(extra_params.items())):
tracking.MlflowClient().log_param(active_run.info.run_id, key, value)
repo_url = _get_git_repo_url(work_dir)
if repo_url is not None:
for tag in [MLFLOW_GIT_REPO_URL, LEGACY_MLFLOW_GIT_REPO_URL]:
tracking.MlflowClient().set_tag(active_run.info.run_id, tag, repo_url)
# Add branch name tag if a branch is specified through -version
if _is_valid_branch_name(work_dir, version):
for tag in [MLFLOW_GIT_BRANCH, LEGACY_MLFLOW_GIT_BRANCH_NAME]:
tracking.MlflowClient().set_tag(active_run.info.run_id, tag, version)
if backend == "databricks":
tracking.MlflowClient().set_tag(active_run.info.run_id, MLFLOW_PROJECT_BACKEND,
"databricks")
from mlflow.projects.databricks import run_databricks
return run_databricks(
remote_run=active_run,
uri=uri, entry_point=entry_point, work_dir=work_dir, parameters=parameters,
experiment_id=experiment_id, cluster_spec=backend_config)
elif backend == "local" or backend is None:
command = []
command_separator = " "
# If a docker_env attribute is defined in MLproject then it takes precedence over conda yaml
# environments, so the project will be executed inside a docker container.
if project.docker_env:
tracking.MlflowClient().set_tag(active_run.info.run_id, MLFLOW_PROJECT_ENV,
"docker")
tracking.MlflowClient().set_tag(active_run.info.run_id, MLFLOW_PROJECT_BACKEND,
"local")
_validate_docker_env(project)
_validate_docker_installation()
image = _build_docker_image(work_dir=work_dir,
repository_uri=project.name,
base_image=project.docker_env.get('image'),
run_id=active_run.info.run_id)
command += _get_docker_command(
image=image,
active_run=active_run,
options=project.docker_env.get('options'))
# Synchronously create a conda environment (even though this may take some time)
# to avoid failures due to multiple concurrent attempts to create the same conda env.
elif use_conda:
tracking.MlflowClient().set_tag(active_run.info.run_id, MLFLOW_PROJECT_ENV, "conda")
tracking.MlflowClient().set_tag(active_run.info.run_id, MLFLOW_PROJECT_BACKEND, "local")
command_separator = " && "
conda_env_name = _get_or_create_conda_env(project.conda_env_path)
command += _get_conda_command(conda_env_name)
# In synchronous mode, run the entry point command in a blocking fashion, sending status
# updates to the tracking server when finished. Note that the run state may not be
# persisted to the tracking server if interrupted
if synchronous:
command += _get_entry_point_command(project, entry_point, parameters, storage_dir)
command = command_separator.join(command)
return _run_entry_point(command, work_dir, experiment_id,
run_id=active_run.info.run_id)
# Otherwise, invoke `mlflow run` in a subprocess
return _invoke_mlflow_run_subprocess(
work_dir=work_dir, entry_point=entry_point, parameters=parameters,
experiment_id=experiment_id,
use_conda=use_conda, storage_dir=storage_dir, run_id=active_run.info.run_id)
elif backend == "kubernetes":
from mlflow.projects import kubernetes as kb
tracking.MlflowClient().set_tag(active_run.info.run_id, MLFLOW_PROJECT_ENV, "docker")
tracking.MlflowClient().set_tag(active_run.info.run_id, MLFLOW_PROJECT_BACKEND,
"kubernetes")
_validate_docker_env(project)
_validate_docker_installation()
kube_config = _parse_kubernetes_config(backend_config)
image = _build_docker_image(work_dir=work_dir,
repository_uri=kube_config["repository-uri"],
base_image=project.docker_env.get('image'),
run_id=active_run.info.run_id)
image_digest = kb.push_image_to_registry(image.tags[0])
submitted_run = kb.run_kubernetes_job(project.name,
active_run,
image.tags[0],
image_digest,
_get_entry_point_command(project, entry_point,
parameters, storage_dir),
_get_run_env_vars(
run_id=active_run.info.run_uuid,
experiment_id=active_run.info.experiment_id),
kube_config['kube-context'],
kube_config['kube-job-template'])
return submitted_run
supported_backends = ["local", "databricks", "kubernetes"]
raise ExecutionException("Got unsupported execution mode %s. Supported "
"values: %s" % (backend, supported_backends))
def run(uri, entry_point="main", version=None, parameters=None,
experiment_name=None, experiment_id=None,
backend=None, backend_config=None, use_conda=True,
storage_dir=None, synchronous=True, run_id=None):
"""
Run an MLflow project. The project can be local or stored at a Git URI.
You can run the project locally or remotely on a Databricks.
For information on using this method in chained workflows, see `Building Multistep Workflows
<../projects.html#building-multistep-workflows>`_.
:raises ``ExecutionException``: If a run launched in blocking mode is unsuccessful.
:param uri: URI of project to run. A local filesystem path
or a Git repository URI (e.g. https://github.com/mlflow/mlflow-example)
pointing to a project directory containing an MLproject file.
:param entry_point: Entry point to run within the project. If no entry point with the specified
name is found, runs the project file ``entry_point`` as a script,
using "python" to run ``.py`` files and the default shell (specified by
environment variable ``$SHELL``) to run ``.sh`` files.
:param version: For Git-based projects, either a commit hash or a branch name.
:param experiment_name: Name of experiment under which to launch the run.
:param experiment_id: ID of experiment under which to launch the run.
:param backend: Execution backend for the run: "local", "databricks", or "kubernetes"
(experimental). If running against Databricks, will run against a Databricks
workspace determined as follows: if a Databricks tracking URI of the form
``databricks://profile`` has been set (e.g. by setting the
MLFLOW_TRACKING_URI environment variable), will run against the workspace
specified by <profile>. Otherwise, runs against the workspace specified by
the default Databricks CLI profile.
:param backend_config: A dictionary, or a path to a JSON file (must end in '.json'), which will
be passed as config to the backend. The exact content which should be
provided is different for each execution backend and is documented
at https://www.mlflow.org/docs/latest/projects.html.
:param use_conda: If True (the default), create a new Conda environment for the run and
install project dependencies within that environment. Otherwise, run the
project in the current environment without installing any project
dependencies.
:param storage_dir: Used only if ``backend`` is "local". MLflow downloads artifacts from
distributed URIs passed to parameters of type ``path`` to subdirectories of
``storage_dir``.
:param synchronous: Whether to block while waiting for a run to complete. Defaults to True.
Note that if ``synchronous`` is False and ``backend`` is "local", this
method will return, but the current process will block when exiting until
the local run completes. If the current process is interrupted, any
asynchronous runs launched via this method will be terminated.
:param run_id: Note: this argument is used internally by the MLflow project APIs and should
not be specified. If specified, the run ID will be used instead of
creating a new run.
:return: :py:class:`mlflow.projects.SubmittedRun` exposing information (e.g. run ID)
about the launched run.
"""
cluster_spec_dict = backend_config
if (backend_config and type(backend_config) != dict
and os.path.splitext(backend_config)[-1] == ".json"):
with open(backend_config, 'r') as handle:
try:
cluster_spec_dict = json.load(handle)
except ValueError:
_logger.error(
"Error when attempting to load and parse JSON cluster spec from file %s",
backend_config)
raise
if backend == "databricks":
mlflow.projects.databricks.before_run_validations(mlflow.get_tracking_uri(), backend_config)
experiment_id = _resolve_experiment_id(experiment_name=experiment_name,
experiment_id=experiment_id)
submitted_run_obj = _run(
uri=uri, experiment_id=experiment_id, entry_point=entry_point, version=version,
parameters=parameters, backend=backend, backend_config=cluster_spec_dict,
use_conda=use_conda, storage_dir=storage_dir, synchronous=synchronous, run_id=run_id)
if synchronous:
_wait_for(submitted_run_obj)
return submitted_run_obj
def _wait_for(submitted_run_obj):
"""Wait on the passed-in submitted run, reporting its status to the tracking server."""
run_id = submitted_run_obj.run_id
active_run = None
# Note: there's a small chance we fail to report the run's status to the tracking server if
# we're interrupted before we reach the try block below
try:
active_run = tracking.MlflowClient().get_run(run_id) if run_id is not None else None
if submitted_run_obj.wait():
_logger.info("=== Run (ID '%s') succeeded ===", run_id)
_maybe_set_run_terminated(active_run, "FINISHED")
else:
_maybe_set_run_terminated(active_run, "FAILED")
raise ExecutionException("Run (ID '%s') failed" % run_id)
except KeyboardInterrupt:
_logger.error("=== Run (ID '%s') interrupted, cancelling run ===", run_id)
submitted_run_obj.cancel()
_maybe_set_run_terminated(active_run, "FAILED")
raise
def _parse_subdirectory(uri):
# Parses a uri and returns the uri and subdirectory as separate values.
# Uses '#' as a delimiter.
subdirectory = ''
parsed_uri = uri
if '#' in uri:
subdirectory = uri[uri.find('#') + 1:]
parsed_uri = uri[:uri.find('#')]
if subdirectory and '.' in subdirectory:
raise ExecutionException("'.' is not allowed in project subdirectory paths.")
return parsed_uri, subdirectory
def _get_storage_dir(storage_dir):
if storage_dir is not None and not os.path.exists(storage_dir):
os.makedirs(storage_dir)
return tempfile.mkdtemp(dir=storage_dir)
def _get_git_repo_url(work_dir):
from git import Repo
from git.exc import GitCommandError, InvalidGitRepositoryError
try:
repo = Repo(work_dir, search_parent_directories=True)
remote_urls = [remote.url for remote in repo.remotes]
if len(remote_urls) == 0:
return None
except GitCommandError:
return None
except InvalidGitRepositoryError:
return None
return remote_urls[0]
def _expand_uri(uri):
if _is_local_uri(uri):
return os.path.abspath(uri)
return uri
def _is_file_uri(uri):
"""Returns True if the passed-in URI is a file:// URI."""
return _FILE_URI_REGEX.match(uri)
def _is_local_uri(uri):
"""Returns True if the passed-in URI should be interpreted as a path on the local filesystem."""
return not _GIT_URI_REGEX.match(uri)
def _is_zip_uri(uri):
"""Returns True if the passed-in URI points to a ZIP file."""
return _ZIP_URI_REGEX.match(uri)
def _is_valid_branch_name(work_dir, version):
"""
Returns True if the ``version`` is the name of a branch in a Git project.
``work_dir`` must be the working directory in a git repo.
"""
if version is not None:
from git import Repo
from git.exc import GitCommandError
repo = Repo(work_dir, search_parent_directories=True)
try:
return repo.git.rev_parse("--verify", "refs/heads/%s" % version) != ''
except GitCommandError:
return False
return False
def _fetch_project(uri, force_tempdir, version=None):
"""
Fetch a project into a local directory, returning the path to the local project directory.
:param force_tempdir: If True, will fetch the project into a temporary directory. Otherwise,
will fetch ZIP or Git projects into a temporary directory but simply
return the path of local projects (i.e. perform a no-op for local
projects).
"""
parsed_uri, subdirectory = _parse_subdirectory(uri)
use_temp_dst_dir = force_tempdir or _is_zip_uri(parsed_uri) or not _is_local_uri(parsed_uri)
dst_dir = tempfile.mkdtemp() if use_temp_dst_dir else parsed_uri
if use_temp_dst_dir:
_logger.info("=== Fetching project from %s into %s ===", uri, dst_dir)
if _is_zip_uri(parsed_uri):
if _is_file_uri(parsed_uri):
parsed_file_uri = urllib.parse.urlparse(urllib.parse.unquote(parsed_uri))
parsed_uri = os.path.join(parsed_file_uri.netloc, parsed_file_uri.path)
_unzip_repo(zip_file=(
parsed_uri if _is_local_uri(parsed_uri) else _fetch_zip_repo(parsed_uri)),
dst_dir=dst_dir)
elif _is_local_uri(uri):
if version is not None:
raise ExecutionException("Setting a version is only supported for Git project URIs")
if use_temp_dst_dir:
dir_util.copy_tree(src=parsed_uri, dst=dst_dir)
else:
assert _GIT_URI_REGEX.match(parsed_uri), "Non-local URI %s should be a Git URI" % parsed_uri
_fetch_git_repo(parsed_uri, version, dst_dir)
res = os.path.abspath(os.path.join(dst_dir, subdirectory))
if not os.path.exists(res):
raise ExecutionException("Could not find subdirectory %s of %s" % (subdirectory, dst_dir))
return res
def _unzip_repo(zip_file, dst_dir):
import zipfile
with zipfile.ZipFile(zip_file) as zip_in:
zip_in.extractall(dst_dir)
def _fetch_zip_repo(uri):
import requests
from io import BytesIO
# TODO (dbczumar): Replace HTTP resolution via ``requests.get`` with an invocation of
# ```mlflow.data.download_uri()`` when the API supports the same set of available stores as
# the artifact repository (Azure, FTP, etc). See the following issue:
# https://github.com/mlflow/mlflow/issues/763.
response = requests.get(uri)
try:
response.raise_for_status()
except requests.HTTPError as error:
raise ExecutionException("Unable to retrieve ZIP file. Reason: %s" % str(error))
return BytesIO(response.content)
def _fetch_git_repo(uri, version, dst_dir):
"""
Clone the git repo at ``uri`` into ``dst_dir``, checking out commit ``version`` (or defaulting
to the head commit of the repository's master branch if version is unspecified).
Assumes authentication parameters are specified by the environment, e.g. by a Git credential
helper.
"""
# We defer importing git until the last moment, because the import requires that the git
# executable is availble on the PATH, so we only want to fail if we actually need it.
import git
repo = git.Repo.init(dst_dir)
origin = repo.create_remote("origin", uri)
origin.fetch()
if version is not None:
try:
repo.git.checkout(version)
except git.exc.GitCommandError as e:
raise ExecutionException("Unable to checkout version '%s' of git repo %s"
"- please ensure that the version exists in the repo. "
"Error: %s" % (version, uri, e))
else:
repo.create_head("master", origin.refs.master)
repo.heads.master.checkout()
def _get_conda_env_name(conda_env_path, env_id=None):
conda_env_contents = open(conda_env_path).read() if conda_env_path else ""
if env_id:
conda_env_contents += env_id
return "mlflow-%s" % hashlib.sha1(conda_env_contents.encode("utf-8")).hexdigest()
def _get_conda_bin_executable(executable_name):
"""
Return path to the specified executable, assumed to be discoverable within the 'bin'
subdirectory of a conda installation.
The conda home directory (expected to contain a 'bin' subdirectory) is configurable via the
``mlflow.projects.MLFLOW_CONDA_HOME`` environment variable. If
``mlflow.projects.MLFLOW_CONDA_HOME`` is unspecified, this method simply returns the passed-in
executable name.
"""
conda_home = os.environ.get(MLFLOW_CONDA_HOME)
if conda_home:
return os.path.join(conda_home, "bin/%s" % executable_name)
# Use CONDA_EXE as per https://github.com/conda/conda/issues/7126
if "CONDA_EXE" in os.environ:
conda_bin_dir = os.path.dirname(os.environ["CONDA_EXE"])
return os.path.join(conda_bin_dir, executable_name)
return executable_name
def _get_or_create_conda_env(conda_env_path, env_id=None):
"""
Given a `Project`, creates a conda environment containing the project's dependencies if such a
conda environment doesn't already exist. Returns the name of the conda environment.
:param conda_env_path: Path to a conda yaml file.
:param env_id: Optional string that is added to the contents of the yaml file before
calculating the hash. It can be used to distinguish environments that have the
same conda dependencies but are supposed to be different based on the context.
For example, when serving the model we may install additional dependencies to the
environment after the environment has been activated.
"""
conda_path = _get_conda_bin_executable("conda")
try:
process.exec_cmd([conda_path, "--help"], throw_on_error=False)
except EnvironmentError:
raise ExecutionException("Could not find Conda executable at {0}. "
"Ensure Conda is installed as per the instructions "
"at https://conda.io/docs/user-guide/install/index.html. You can "
"also configure MLflow to look for a specific Conda executable "
"by setting the {1} environment variable to the path of the Conda "
"executable".format(conda_path, MLFLOW_CONDA_HOME))
(_, stdout, _) = process.exec_cmd([conda_path, "env", "list", "--json"])
env_names = [os.path.basename(env) for env in json.loads(stdout)['envs']]
project_env_name = _get_conda_env_name(conda_env_path, env_id)
if project_env_name not in env_names:
_logger.info('=== Creating conda environment %s ===', project_env_name)
if conda_env_path:
process.exec_cmd([conda_path, "env", "create", "-n", project_env_name, "--file",
conda_env_path], stream_output=True)
else:
process.exec_cmd(
[conda_path, "create", "-n", project_env_name, "python"], stream_output=True)
return project_env_name
def _maybe_set_run_terminated(active_run, status):
"""
If the passed-in active run is defined and still running (i.e. hasn't already been terminated
within user code), mark it as terminated with the passed-in status.
"""
if active_run is None:
return
run_id = active_run.info.run_id
cur_status = tracking.MlflowClient().get_run(run_id).info.status
if RunStatus.is_terminated(cur_status):
return
tracking.MlflowClient().set_terminated(run_id, status)
def _get_entry_point_command(project, entry_point, parameters, storage_dir):
"""
Returns the shell command to execute in order to run the specified entry point.
:param project: Project containing the target entry point
:param entry_point: Entry point to run
:param parameters: Parameters (dictionary) for the entry point command
:param storage_dir: Base local directory to use for downloading remote artifacts passed to
arguments of type 'path'. If None, a temporary base directory is used.
"""
storage_dir_for_run = _get_storage_dir(storage_dir)
_logger.info(
"=== Created directory %s for downloading remote URIs passed to arguments of"
" type 'path' ===",
storage_dir_for_run)
commands = []
commands.append(
project.get_entry_point(entry_point).compute_command(parameters, storage_dir_for_run))
return commands
def _run_entry_point(command, work_dir, experiment_id, run_id):
"""
Run an entry point command in a subprocess, returning a SubmittedRun that can be used to
query the run's status.
:param command: Entry point command to run
:param work_dir: Working directory in which to run the command
:param run_id: MLflow run ID associated with the entry point execution.
"""
env = os.environ.copy()
env.update(_get_run_env_vars(run_id, experiment_id))
_logger.info("=== Running command '%s' in run with ID '%s' === ", command, run_id)
# in case os name is not 'nt', we are not running on windows. It introduces
# bash command otherwise.
if os.name != "nt":
process = subprocess.Popen(["bash", "-c", command], close_fds=True, cwd=work_dir, env=env)
else:
process = subprocess.Popen(command, close_fds=True, cwd=work_dir, env=env)
return LocalSubmittedRun(run_id, process)
def _build_mlflow_run_cmd(
uri, entry_point, storage_dir, use_conda, run_id, parameters):
"""
Build and return an array containing an ``mlflow run`` command that can be invoked to locally
run the project at the specified URI.
"""
mlflow_run_arr = ["mlflow", "run", uri, "-e", entry_point, "--run-id", run_id]
if storage_dir is not None:
mlflow_run_arr.extend(["--storage-dir", storage_dir])
if not use_conda:
mlflow_run_arr.append("--no-conda")
for key, value in parameters.items():
mlflow_run_arr.extend(["-P", "%s=%s" % (key, value)])
return mlflow_run_arr
def _run_mlflow_run_cmd(mlflow_run_arr, env_map):
"""
Invoke ``mlflow run`` in a subprocess, which in turn runs the entry point in a child process.
Returns a handle to the subprocess. Popen launched to invoke ``mlflow run``.
"""
final_env = os.environ.copy()
final_env.update(env_map)
# Launch `mlflow run` command as the leader of its own process group so that we can do a
# best-effort cleanup of all its descendant processes if needed
if sys.platform == "win32":
return subprocess.Popen(
mlflow_run_arr, env=final_env, universal_newlines=True,
creationflags=subprocess.CREATE_NEW_PROCESS_GROUP)
else:
return subprocess.Popen(
mlflow_run_arr, env=final_env, universal_newlines=True, preexec_fn=os.setsid)
def _create_run(uri, experiment_id, work_dir, entry_point):
"""
Create a ``Run`` against the current MLflow tracking server, logging metadata (e.g. the URI,
entry point, and parameters of the project) about the run. Return an ``ActiveRun`` that can be
used to report additional data about the run (metrics/params) to the tracking server.
"""
if _is_local_uri(uri):
source_name = tracking.utils._get_git_url_if_present(_expand_uri(uri))
else:
source_name = _expand_uri(uri)
source_version = _get_git_commit(work_dir)
existing_run = fluent.active_run()
if existing_run:
parent_run_id = existing_run.info.run_id
else:
parent_run_id = None
tags = {
MLFLOW_USER: _get_user(),
MLFLOW_SOURCE_NAME: source_name,
MLFLOW_SOURCE_TYPE: SourceType.to_string(SourceType.PROJECT),
MLFLOW_PROJECT_ENTRY_POINT: entry_point
}
if source_version is not None:
tags[MLFLOW_GIT_COMMIT] = source_version
if parent_run_id is not None:
tags[MLFLOW_PARENT_RUN_ID] = parent_run_id
active_run = tracking.MlflowClient().create_run(experiment_id=experiment_id, tags=tags)
return active_run
def _get_run_env_vars(run_id, experiment_id):
"""
Returns a dictionary of environment variable key-value pairs to set in subprocess launched
to run MLflow projects.
"""
return {
tracking._RUN_ID_ENV_VAR: run_id,
tracking._TRACKING_URI_ENV_VAR: tracking.get_tracking_uri(),
tracking._EXPERIMENT_ID_ENV_VAR: str(experiment_id),
}
def _invoke_mlflow_run_subprocess(
work_dir, entry_point, parameters, experiment_id, use_conda, storage_dir, run_id):
"""
Run an MLflow project asynchronously by invoking ``mlflow run`` in a subprocess, returning
a SubmittedRun that can be used to query run status.
"""
_logger.info("=== Asynchronously launching MLflow run with ID %s ===", run_id)
mlflow_run_arr = _build_mlflow_run_cmd(
uri=work_dir, entry_point=entry_point, storage_dir=storage_dir, use_conda=use_conda,
run_id=run_id, parameters=parameters)
mlflow_run_subprocess = _run_mlflow_run_cmd(
mlflow_run_arr, _get_run_env_vars(run_id, experiment_id))
return LocalSubmittedRun(run_id, mlflow_run_subprocess)
def _get_conda_command(conda_env_name):
# Checking for newer conda versions
if 'CONDA_EXE' in os.environ or 'MLFLOW_CONDA_HOME' in os.environ:
conda_path = _get_conda_bin_executable("conda")
activate_conda_env = ['source ' + os.path.dirname(conda_path) +
'/../etc/profile.d/conda.sh']
activate_conda_env += ["conda activate {0} 1>&2".format(conda_env_name)]
else:
activate_path = _get_conda_bin_executable("activate")
# in case os name is not 'nt', we are not running on windows. It introduces
# bash command otherwise.
if os.name != "nt":
return ["source %s %s 1>&2" % (activate_path, conda_env_name)]
else:
return ["conda %s %s 1>&2" % (activate_path, conda_env_name)]
return activate_conda_env
def _validate_execution_environment(project, backend):
if project.docker_env and backend == "databricks":
raise ExecutionException(
"Running docker-based projects on Databricks is not yet supported.")
def _get_local_uri_or_none(uri):
if uri == "databricks":
return None, None
parsed_uri = urllib.parse.urlparse(uri)
if not parsed_uri.netloc and parsed_uri.scheme in ("", "file", "sqlite"):
path = urllib.request.url2pathname(parsed_uri.path)
if parsed_uri.scheme == "sqlite":
uri = path_to_local_sqlite_uri(_MLFLOW_DOCKER_TRACKING_DIR_PATH)
else:
uri = path_to_local_file_uri(_MLFLOW_DOCKER_TRACKING_DIR_PATH)
return path, uri
else:
return None, None
def _get_docker_command(image, active_run, options=[]):
docker_path = "docker"
cmd = [docker_path, "run", "--rm"]
env_vars = _get_run_env_vars(run_id=active_run.info.run_id,
experiment_id=active_run.info.experiment_id)
tracking_uri = tracking.get_tracking_uri()
tracking_cmds, tracking_envs = _get_docker_tracking_cmd_and_envs(tracking_uri)
artifact_cmds, artifact_envs = \
_get_docker_artifact_storage_cmd_and_envs(active_run.info.artifact_uri)
cmd += tracking_cmds + artifact_cmds
env_vars.update(tracking_envs)
env_vars.update(artifact_envs)
for key, value in env_vars.items():
cmd += ["-e", "{key}={value}".format(key=key, value=value)]
for opt in options:
cmd += ["{flag}".format(flag=opt['flag']), "{value}".format(value=opt['value'])]
cmd += [image.tags[0]]
return cmd
def _validate_docker_installation():
"""
Verify if Docker is installed on host machine.
"""
try:
docker_path = "docker"
process.exec_cmd([docker_path, "--help"], throw_on_error=False)
except EnvironmentError:
raise ExecutionException("Could not find Docker executable. "
"Ensure Docker is installed as per the instructions "
"at https://docs.docker.com/install/overview/.")
def _validate_docker_env(project):
if not project.name:
raise ExecutionException("Project name in MLProject must be specified when using docker "
"for image tagging.")
if not project.docker_env.get('image'):
raise ExecutionException("Project with docker environment must specify the docker image "
"to use via an 'image' field under the 'docker_env' field.")
def _parse_kubernetes_config(backend_config):
"""
Creates build context tarfile containing Dockerfile and project code, returning path to tarfile
"""
if not backend_config:
raise ExecutionException("Backend_config file not found.")
kube_config = backend_config.copy()
if 'kube-job-template-path' not in backend_config.keys():
raise ExecutionException("'kube-job-template-path' attribute must be specified in "
"backend_config.")
kube_job_template = backend_config['kube-job-template-path']
if os.path.exists(kube_job_template):
with open(kube_job_template, 'r') as job_template:
yaml_obj = yaml.safe_load(job_template.read())
kube_job_template = yaml_obj
kube_config['kube-job-template'] = kube_job_template
else:
raise ExecutionException("Could not find 'kube-job-template-path': {}".format(
kube_job_template))
if 'kube-context' not in backend_config.keys():
raise ExecutionException("Could not find kube-context in backend_config.")
if 'repository-uri' not in backend_config.keys():
raise ExecutionException("Could not find 'repository-uri' in backend_config.")
return kube_config
def _create_docker_build_ctx(work_dir, dockerfile_contents):
"""
Creates build context tarfile containing Dockerfile and project code, returning path to tarfile
"""
directory = tempfile.mkdtemp()
try:
dst_path = os.path.join(directory, "mlflow-project-contents")
shutil.copytree(src=work_dir, dst=dst_path)
with open(os.path.join(dst_path, _GENERATED_DOCKERFILE_NAME), "w") as handle:
handle.write(dockerfile_contents)
_, result_path = tempfile.mkstemp()
file_utils.make_tarfile(
output_filename=result_path,
source_dir=dst_path, archive_name=_PROJECT_TAR_ARCHIVE_NAME)
finally:
shutil.rmtree(directory)
return result_path
def _build_docker_image(work_dir, repository_uri, base_image, run_id):
"""
Build a docker image containing the project in `work_dir`, using the base image.
"""
image_uri = _get_docker_image_uri(repository_uri=repository_uri, work_dir=work_dir)
dockerfile = (
"FROM {imagename}\n"
"COPY {build_context_path}/ {workdir}\n"
"WORKDIR {workdir}\n"
).format(imagename=base_image,
build_context_path=_PROJECT_TAR_ARCHIVE_NAME,
workdir=_MLFLOW_DOCKER_WORKDIR_PATH)
build_ctx_path = _create_docker_build_ctx(work_dir, dockerfile)
with open(build_ctx_path, 'rb') as docker_build_ctx:
_logger.info("=== Building docker image %s ===", image_uri)
client = docker.from_env()
image, _ = client.images.build(
tag=image_uri, forcerm=True,
dockerfile=posixpath.join(_PROJECT_TAR_ARCHIVE_NAME, _GENERATED_DOCKERFILE_NAME),
fileobj=docker_build_ctx, custom_context=True, encoding="gzip")
try:
os.remove(build_ctx_path)
except Exception: # pylint: disable=broad-except
_logger.info("Temporary docker context file %s was not deleted.", build_ctx_path)
tracking.MlflowClient().set_tag(run_id,
MLFLOW_DOCKER_IMAGE_URI,
image_uri)
tracking.MlflowClient().set_tag(run_id,
MLFLOW_DOCKER_IMAGE_ID,
image.id)
return image
def _get_docker_image_uri(repository_uri, work_dir):
"""
Returns an appropriate Docker image URI for a project based on the git hash of the specified
working directory.
:param repository_uri: The URI of the Docker repository with which to tag the image. The
repository URI is used as the prefix of the image URI.
:param work_dir: Path to the working directory in which to search for a git commit hash
"""
repository_uri = repository_uri if repository_uri else "docker-project"
# Optionally include first 7 digits of git SHA in tag name, if available.
git_commit = _get_git_commit(work_dir)
version_string = ":" + git_commit[:7] if git_commit else ""
return repository_uri + version_string
def _get_local_artifact_cmd_and_envs(artifact_repo):
artifact_dir = artifact_repo.artifact_dir
container_path = artifact_dir
if not os.path.isabs(container_path):
container_path = os.path.join(_MLFLOW_DOCKER_WORKDIR_PATH, container_path)
container_path = os.path.normpath(container_path)
abs_artifact_dir = os.path.abspath(artifact_dir)
return ["-v", "%s:%s" % (abs_artifact_dir, container_path)], {}
def _get_s3_artifact_cmd_and_envs(artifact_repo):
# pylint: disable=unused-argument
aws_path = posixpath.expanduser("~/.aws")
volumes = []
if posixpath.exists(aws_path):
volumes = ["-v", "%s:%s" % (str(aws_path), "/.aws")]
envs = {
"AWS_SECRET_ACCESS_KEY": os.environ.get("AWS_SECRET_ACCESS_KEY"),
"AWS_ACCESS_KEY_ID": os.environ.get("AWS_ACCESS_KEY_ID"),
"MLFLOW_S3_ENDPOINT_URL": os.environ.get("MLFLOW_S3_ENDPOINT_URL")
}
envs = dict((k, v) for k, v in envs.items() if v is not None)
return volumes, envs
def _get_azure_blob_artifact_cmd_and_envs(artifact_repo):
# pylint: disable=unused-argument
envs = {
"AZURE_STORAGE_CONNECTION_STRING": os.environ.get("AZURE_STORAGE_CONNECTION_STRING"),
"AZURE_STORAGE_ACCESS_KEY": os.environ.get("AZURE_STORAGE_ACCESS_KEY")
}
envs = dict((k, v) for k, v in envs.items() if v is not None)
return [], envs
def _get_gcs_artifact_cmd_and_envs(artifact_repo):
# pylint: disable=unused-argument
cmds = []
envs = {}
if "GOOGLE_APPLICATION_CREDENTIALS" in os.environ:
credentials_path = os.environ["GOOGLE_APPLICATION_CREDENTIALS"]
cmds = ["-v", "{}:/.gcs".format(credentials_path)]
envs["GOOGLE_APPLICATION_CREDENTIALS"] = "/.gcs"
return cmds, envs
def _get_hdfs_artifact_cmd_and_envs(artifact_repo):
# pylint: disable=unused-argument
cmds = []
envs = {
"MLFLOW_HDFS_DRIVER": os.environ.get("MLFLOW_HDFS_DRIVER"),
"MLFLOW_KERBEROS_TICKET_CACHE": os.environ.get("MLFLOW_KERBEROS_TICKET_CACHE"),
"MLFLOW_KERBEROS_USER": os.environ.get("MLFLOW_KERBEROS_USER"),
"MLFLOW_PYARROW_EXTRA_CONF": os.environ.get("MLFLOW_PYARROW_EXTRA_CONF")
}
envs = dict((k, v) for k, v in envs.items() if v is not None)
if "MLFLOW_KERBEROS_TICKET_CACHE" in envs:
ticket_cache = envs["MLFLOW_KERBEROS_TICKET_CACHE"]
cmds = ["-v", "{}:{}".format(ticket_cache, ticket_cache)]
return cmds, envs
_artifact_storages = {
LocalArtifactRepository: _get_local_artifact_cmd_and_envs,
S3ArtifactRepository: _get_s3_artifact_cmd_and_envs,
AzureBlobArtifactRepository: _get_azure_blob_artifact_cmd_and_envs,
HdfsArtifactRepository: _get_hdfs_artifact_cmd_and_envs,
GCSArtifactRepository: _get_gcs_artifact_cmd_and_envs,
}
def _get_docker_artifact_storage_cmd_and_envs(artifact_uri):
artifact_repo = get_artifact_repository(artifact_uri)
_get_cmd_and_envs = _artifact_storages.get(type(artifact_repo))
if _get_cmd_and_envs is not None:
return _get_cmd_and_envs(artifact_repo)
else:
return [], {}
def _get_docker_tracking_cmd_and_envs(tracking_uri):
cmds = []
env_vars = dict()
local_path, container_tracking_uri = _get_local_uri_or_none(tracking_uri)
if local_path is not None:
cmds = ["-v", "%s:%s" % (local_path, _MLFLOW_DOCKER_TRACKING_DIR_PATH)]
env_vars[tracking._TRACKING_URI_ENV_VAR] = container_tracking_uri
if tracking.utils._is_databricks_uri(tracking_uri):
db_profile = mlflow.tracking.utils.get_db_profile_from_uri(tracking_uri)
config = databricks_utils.get_databricks_host_creds(db_profile)
# We set these via environment variables so that only the current profile is exposed, rather
# than all profiles in ~/.databrickscfg; maybe better would be to mount the necessary
# part of ~/.databrickscfg into the container
env_vars[tracking._TRACKING_URI_ENV_VAR] = 'databricks'
env_vars['DATABRICKS_HOST'] = config.host
if config.username:
env_vars['DATABRICKS_USERNAME'] = config.username
if config.password:
env_vars['DATABRICKS_PASSWORD'] = config.password
if config.token:
env_vars['DATABRICKS_TOKEN'] = config.token
if config.ignore_tls_verification:
env_vars['DATABRICKS_INSECURE'] = config.ignore_tls_verification
return cmds, env_vars
__all__ = [
"run",
"SubmittedRun"
]
| 45.297101
| 100
| 0.678589
|
c1a73811329626b6966f9908741ec5b7a3e1e790
| 15,662
|
py
|
Python
|
cirq-google/cirq_google/serialization/op_deserializer_test.py
|
dabacon/Cirq
|
54286063f679d67501ff1b905cd16b879feaae27
|
[
"Apache-2.0"
] | 3,326
|
2018-07-18T23:17:21.000Z
|
2022-03-29T22:28:24.000Z
|
cirq-google/cirq_google/serialization/op_deserializer_test.py
|
bradyb/Cirq
|
610b0d4ea3a7862169610797266734c844ddcc1f
|
[
"Apache-2.0"
] | 3,443
|
2018-07-18T21:07:28.000Z
|
2022-03-31T20:23:21.000Z
|
cirq-google/cirq_google/serialization/op_deserializer_test.py
|
bradyb/Cirq
|
610b0d4ea3a7862169610797266734c844ddcc1f
|
[
"Apache-2.0"
] | 865
|
2018-07-18T23:30:24.000Z
|
2022-03-30T11:43:23.000Z
|
# Copyright 2019 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict, List
import pytest
import sympy
from google.protobuf import json_format
import cirq
import cirq_google as cg
from cirq_google.api import v2
DEFAULT_TOKEN = 'test_tag'
def op_proto(json_dict: Dict) -> v2.program_pb2.Operation:
op = v2.program_pb2.Operation()
json_format.ParseDict(json_dict, op)
return op
@cirq.value_equality
class GateWithAttribute(cirq.SingleQubitGate):
def __init__(self, val, not_req=None):
self.val = val
self.not_req = not_req
def _value_equality_values_(self):
return (self.val,)
def base_deserializer():
return cg.GateOpDeserializer(
serialized_gate_id='my_gate',
gate_constructor=GateWithAttribute,
args=[
cg.DeserializingArg(
serialized_name='my_val',
constructor_arg_name='val',
)
],
)
TEST_CASES = [
(float, 1.0, {'arg_value': {'float_value': 1.0}}),
(str, 'abc', {'arg_value': {'string_value': 'abc'}}),
(float, 1, {'arg_value': {'float_value': 1.0}}),
(List[bool], [True, False], {'arg_value': {'bool_values': {'values': [True, False]}}}),
(sympy.Symbol, sympy.Symbol('x'), {'symbol': 'x'}),
(
float,
sympy.Symbol('x') - sympy.Symbol('y'),
{
'func': {
'type': 'add',
'args': [
{'symbol': 'x'},
{
'func': {
'type': 'mul',
'args': [{'arg_value': {'float_value': -1.0}}, {'symbol': 'y'}],
}
},
],
}
},
),
]
@pytest.mark.parametrize(('val_type', 'val', 'arg_value'), TEST_CASES)
def test_from_proto(val_type, val, arg_value):
deserializer = base_deserializer()
serialized = op_proto(
{'gate': {'id': 'my_gate'}, 'args': {'my_val': arg_value}, 'qubits': [{'id': '1_2'}]}
)
q = cirq.GridQubit(1, 2)
result = deserializer.from_proto(serialized, arg_function_language='linear')
assert result == GateWithAttribute(val)(q)
def test_from_proto_required_missing():
deserializer = base_deserializer()
serialized = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'not_my_val': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
}
)
with pytest.raises(Exception, match='my_val'):
deserializer.from_proto(serialized)
def test_from_proto_unknown_function():
deserializer = base_deserializer()
serialized = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {
'my_val': {
'func': {
'type': 'UNKNOWN_OPERATION',
'args': [
{'symbol': 'x'},
{'arg_value': {'float_value': -1.0}},
],
}
}
},
'qubits': [{'id': '1_2'}],
}
)
with pytest.raises(ValueError, match='Unrecognized function type'):
_ = deserializer.from_proto(serialized)
def test_from_proto_value_type_not_recognized():
deserializer = base_deserializer()
serialized = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {
'my_val': {
'arg_value': {},
}
},
'qubits': [{'id': '1_2'}],
}
)
with pytest.raises(ValueError, match='Unrecognized value type'):
_ = deserializer.from_proto(serialized)
def test_from_proto_function_argument_not_set():
deserializer = base_deserializer()
serialized = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {
'my_val': {
'func': {
'type': 'mul',
'args': [
{'symbol': 'x'},
{},
],
}
}
},
'qubits': [{'id': '1_2'}],
}
)
with pytest.raises(ValueError, match='A multiplication argument is missing'):
_ = deserializer.from_proto(serialized, arg_function_language='linear')
def test_from_proto_value_func():
deserializer = cg.GateOpDeserializer(
serialized_gate_id='my_gate',
gate_constructor=GateWithAttribute,
args=[
cg.DeserializingArg(
serialized_name='my_val', constructor_arg_name='val', value_func=lambda x: x + 1
)
],
)
serialized = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
}
)
q = cirq.GridQubit(1, 2)
result = deserializer.from_proto(serialized)
assert result == GateWithAttribute(1.125)(q)
def test_from_proto_not_required_ok():
deserializer = cg.GateOpDeserializer(
serialized_gate_id='my_gate',
gate_constructor=GateWithAttribute,
args=[
cg.DeserializingArg(
serialized_name='my_val',
constructor_arg_name='val',
),
cg.DeserializingArg(
serialized_name='not_req', constructor_arg_name='not_req', required=False
),
],
)
serialized = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
}
)
q = cirq.GridQubit(1, 2)
result = deserializer.from_proto(serialized)
assert result == GateWithAttribute(0.125)(q)
def test_from_proto_missing_required_arg():
deserializer = cg.GateOpDeserializer(
serialized_gate_id='my_gate',
gate_constructor=GateWithAttribute,
args=[
cg.DeserializingArg(
serialized_name='my_val',
constructor_arg_name='val',
),
cg.DeserializingArg(
serialized_name='not_req', constructor_arg_name='not_req', required=False
),
],
)
serialized = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'not_req': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
}
)
with pytest.raises(ValueError):
deserializer.from_proto(serialized)
def test_from_proto_required_arg_not_assigned():
deserializer = cg.GateOpDeserializer(
serialized_gate_id='my_gate',
gate_constructor=GateWithAttribute,
args=[
cg.DeserializingArg(
serialized_name='my_val',
constructor_arg_name='val',
),
cg.DeserializingArg(
serialized_name='not_req', constructor_arg_name='not_req', required=False
),
],
)
serialized = op_proto(
{'gate': {'id': 'my_gate'}, 'args': {'my_val': {}}, 'qubits': [{'id': '1_2'}]}
)
with pytest.raises(ValueError):
deserializer.from_proto(serialized)
def test_defaults():
deserializer = cg.GateOpDeserializer(
serialized_gate_id='my_gate',
gate_constructor=GateWithAttribute,
args=[
cg.DeserializingArg(serialized_name='my_val', constructor_arg_name='val', default=1.0),
cg.DeserializingArg(
serialized_name='not_req',
constructor_arg_name='not_req',
default='hello',
required=False,
),
],
)
serialized = op_proto({'gate': {'id': 'my_gate'}, 'args': {}, 'qubits': [{'id': '1_2'}]})
g = GateWithAttribute(1.0)
g.not_req = 'hello'
assert deserializer.from_proto(serialized) == g(cirq.GridQubit(1, 2))
def test_token():
deserializer = base_deserializer()
serialized = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 1.25}}},
'qubits': [{'id': '1_2'}],
'token_value': 'abc123',
}
)
op = GateWithAttribute(1.25)(cirq.GridQubit(1, 2))
op = op.with_tags(cg.CalibrationTag('abc123'))
assert deserializer.from_proto(serialized) == op
def test_token_with_references():
deserializer = base_deserializer()
serialized = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 1.25}}},
'qubits': [{'id': '1_2'}],
'token_constant_index': 1,
}
)
op = GateWithAttribute(1.25)(cirq.GridQubit(1, 2))
op = op.with_tags(cg.CalibrationTag('abc123'))
constants = []
constant = v2.program_pb2.Constant()
constant.string_value = 'my_token'
constants.append(constant)
constant = v2.program_pb2.Constant()
constant.string_value = 'abc123'
constants.append(constant)
assert deserializer.from_proto(serialized, constants=constants) == op
with pytest.raises(ValueError, match='Proto has references to constants table'):
deserializer.from_proto(serialized)
def default_circuit_proto():
op1 = v2.program_pb2.Operation()
op1.gate.id = 'x_pow'
op1.args['half_turns'].arg_value.string_value = 'k'
op1.qubits.add().id = '1_1'
op2 = v2.program_pb2.Operation()
op2.gate.id = 'x_pow'
op2.args['half_turns'].arg_value.float_value = 1.0
op2.qubits.add().id = '1_2'
op2.token_constant_index = 0
return v2.program_pb2.Circuit(
scheduling_strategy=v2.program_pb2.Circuit.MOMENT_BY_MOMENT,
moments=[
v2.program_pb2.Moment(
operations=[op1, op2],
),
],
)
def default_circuit():
return cirq.FrozenCircuit(
cirq.X(cirq.GridQubit(1, 1)) ** sympy.Symbol('k'),
cirq.X(cirq.GridQubit(1, 2)).with_tags(DEFAULT_TOKEN),
cirq.measure(cirq.GridQubit(1, 1), key='m'),
)
def test_circuit_op_from_proto_errors():
deserializer = cg.CircuitOpDeserializer()
serialized = v2.program_pb2.CircuitOperation(circuit_constant_index=1)
constants = [
v2.program_pb2.Constant(string_value=DEFAULT_TOKEN),
v2.program_pb2.Constant(circuit_value=default_circuit_proto()),
]
deserialized_constants = [DEFAULT_TOKEN, default_circuit()]
with pytest.raises(ValueError, match='CircuitOp deserialization requires a constants list'):
deserializer.from_proto(serialized)
with pytest.raises(ValueError, match='CircuitOp deserialization requires a constants list'):
deserializer.from_proto(serialized, constants=constants)
with pytest.raises(ValueError, match='CircuitOp deserialization requires a constants list'):
deserializer.from_proto(serialized, deserialized_constants=deserialized_constants)
bad_deserialized_constants = [DEFAULT_TOKEN]
with pytest.raises(ValueError, match='does not appear in the deserialized_constants list'):
deserializer.from_proto(
serialized, constants=constants, deserialized_constants=bad_deserialized_constants
)
bad_deserialized_constants = [DEFAULT_TOKEN, 2]
with pytest.raises(ValueError, match='Constant at index 1 was expected to be a circuit'):
deserializer.from_proto(
serialized, constants=constants, deserialized_constants=bad_deserialized_constants
)
def test_circuit_op_arg_key_errors():
deserializer = cg.CircuitOpDeserializer()
arg_map = v2.program_pb2.ArgMapping()
p1 = arg_map.entries.add()
p1.key.arg_value.float_value = 1.0
p1.value.arg_value.float_value = 2.0
serialized = v2.program_pb2.CircuitOperation(circuit_constant_index=1, arg_map=arg_map)
constants = [
v2.program_pb2.Constant(string_value=DEFAULT_TOKEN),
v2.program_pb2.Constant(circuit_value=default_circuit_proto()),
]
deserialized_constants = [DEFAULT_TOKEN, default_circuit()]
with pytest.raises(ValueError, match='Invalid key parameter type'):
deserializer.from_proto(
serialized, constants=constants, deserialized_constants=deserialized_constants
)
def test_circuit_op_arg_val_errors():
deserializer = cg.CircuitOpDeserializer()
arg_map = v2.program_pb2.ArgMapping()
p1 = arg_map.entries.add()
p1.key.arg_value.string_value = 'k'
p1.value.arg_value.bool_values.values.extend([True, False])
serialized = v2.program_pb2.CircuitOperation(circuit_constant_index=1, arg_map=arg_map)
constants = [
v2.program_pb2.Constant(string_value=DEFAULT_TOKEN),
v2.program_pb2.Constant(circuit_value=default_circuit_proto()),
]
deserialized_constants = [DEFAULT_TOKEN, default_circuit()]
with pytest.raises(ValueError, match='Invalid value parameter type'):
deserializer.from_proto(
serialized, constants=constants, deserialized_constants=deserialized_constants
)
@pytest.mark.parametrize('repetitions', [1, 5, ['a', 'b', 'c']])
def test_circuit_op_from_proto(repetitions):
deserializer = cg.CircuitOpDeserializer()
repetition_spec = v2.program_pb2.RepetitionSpecification()
if isinstance(repetitions, int):
repetition_ids = None
repetition_spec.repetition_count = repetitions
else:
repetition_ids = repetitions
repetitions = len(repetition_ids)
for rep_id in repetition_ids:
repetition_spec.repetition_ids.ids.append(rep_id)
qubit_map = v2.program_pb2.QubitMapping()
q_p1 = qubit_map.entries.add()
q_p1.key.id = '1_1'
q_p1.value.id = '1_2'
measurement_key_map = v2.program_pb2.MeasurementKeyMapping()
meas_p1 = measurement_key_map.entries.add()
meas_p1.key.string_key = 'm'
meas_p1.value.string_key = 'results'
arg_map = v2.program_pb2.ArgMapping()
arg_p1 = arg_map.entries.add()
arg_p1.key.arg_value.string_value = 'k'
arg_p1.value.arg_value.float_value = 1.0
serialized = v2.program_pb2.CircuitOperation(
circuit_constant_index=1,
repetition_specification=repetition_spec,
qubit_map=qubit_map,
measurement_key_map=measurement_key_map,
arg_map=arg_map,
)
constants = [
v2.program_pb2.Constant(string_value=DEFAULT_TOKEN),
v2.program_pb2.Constant(circuit_value=default_circuit_proto()),
]
deserialized_constants = [DEFAULT_TOKEN, default_circuit()]
actual = deserializer.from_proto(
serialized, constants=constants, deserialized_constants=deserialized_constants
)
expected = cirq.CircuitOperation(
circuit=default_circuit(),
qubit_map={cirq.GridQubit(1, 1): cirq.GridQubit(1, 2)},
measurement_key_map={'m': 'results'},
param_resolver={'k': 1.0},
repetitions=repetitions,
repetition_ids=repetition_ids,
)
assert actual == expected
| 32.292784
| 99
| 0.60452
|
57c69f3ec04ec723d218635d045965b273833a7f
| 8,961
|
py
|
Python
|
cinder/tests/unit/api/contrib/test_volume_manage.py
|
HybridF5/cinder
|
dbbe27f850b7a5ec8e614401442a28ee67bdad0c
|
[
"Apache-2.0"
] | null | null | null |
cinder/tests/unit/api/contrib/test_volume_manage.py
|
HybridF5/cinder
|
dbbe27f850b7a5ec8e614401442a28ee67bdad0c
|
[
"Apache-2.0"
] | 1
|
2021-03-21T11:38:29.000Z
|
2021-03-21T11:38:29.000Z
|
cinder/tests/unit/api/contrib/test_volume_manage.py
|
HybridF5/cinder
|
dbbe27f850b7a5ec8e614401442a28ee67bdad0c
|
[
"Apache-2.0"
] | 1
|
2021-03-21T11:37:47.000Z
|
2021-03-21T11:37:47.000Z
|
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_serialization import jsonutils
import webob
from cinder import context
from cinder import exception
from cinder import test
from cinder.tests.unit.api import fakes
from cinder.tests.unit import fake_volume
def app():
# no auth, just let environ['cinder.context'] pass through
api = fakes.router.APIRouter()
mapper = fakes.urlmap.URLMap()
mapper['/v2'] = api
return mapper
def db_service_get_by_host_and_topic(context, host, topic):
"""Replacement for db.service_get_by_host_and_topic.
We stub the db.service_get_by_host_and_topic method to return something
for a specific host, and raise an exception for anything else. We don't
use the returned data (the code under test just use the call to check for
existence of a host, so the content returned doesn't matter.
"""
if host == 'host_ok':
return {}
raise exception.ServiceNotFound(service_id=host)
# Some of the tests check that volume types are correctly validated during a
# volume manage operation. This data structure represents an existing volume
# type.
fake_vt = {'id': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
'name': 'good_fakevt'}
def vt_get_volume_type_by_name(context, name):
"""Replacement for cinder.volume.volume_types.get_volume_type_by_name.
Overrides cinder.volume.volume_types.get_volume_type_by_name to return
the volume type based on inspection of our fake structure, rather than
going to the Cinder DB.
"""
if name == fake_vt['name']:
return fake_vt
raise exception.VolumeTypeNotFoundByName(volume_type_name=name)
def vt_get_volume_type(context, vt_id):
"""Replacement for cinder.volume.volume_types.get_volume_type.
Overrides cinder.volume.volume_types.get_volume_type to return the
volume type based on inspection of our fake structure, rather than going
to the Cinder DB.
"""
if vt_id == fake_vt['id']:
return fake_vt
raise exception.VolumeTypeNotFound(volume_type_id=vt_id)
def api_manage(*args, **kwargs):
"""Replacement for cinder.volume.api.API.manage_existing.
Overrides cinder.volume.api.API.manage_existing to return some fake volume
data structure, rather than initiating a real volume managing.
Note that we don't try to replicate any passed-in information (e.g. name,
volume type) in the returned structure.
"""
ctx = context.RequestContext('admin', 'fake', True)
vol = {
'status': 'creating',
'display_name': 'fake_name',
'availability_zone': 'nova',
'tenant_id': 'fake',
'id': 'ffffffff-0000-ffff-0000-ffffffffffff',
'volume_type': None,
'snapshot_id': None,
'user_id': 'fake',
'size': 0,
'attach_status': 'detached',
'volume_type_id': None}
return fake_volume.fake_volume_obj(ctx, **vol)
@mock.patch('cinder.db.service_get_by_host_and_topic',
db_service_get_by_host_and_topic)
@mock.patch('cinder.volume.volume_types.get_volume_type_by_name',
vt_get_volume_type_by_name)
@mock.patch('cinder.volume.volume_types.get_volume_type',
vt_get_volume_type)
class VolumeManageTest(test.TestCase):
"""Test cases for cinder/api/contrib/volume_manage.py
The API extension adds a POST /os-volume-manage API that is passed a cinder
host name, and a driver-specific reference parameter. If everything
is passed correctly, then the cinder.volume.api.API.manage_existing method
is invoked to manage an existing storage object on the host.
In this set of test cases, we are ensuring that the code correctly parses
the request structure and raises the correct exceptions when things are not
right, and calls down into cinder.volume.api.API.manage_existing with the
correct arguments.
"""
def setUp(self):
super(VolumeManageTest, self).setUp()
def _get_resp(self, body):
"""Helper to execute an os-volume-manage API call."""
req = webob.Request.blank('/v2/fake/os-volume-manage')
req.method = 'POST'
req.headers['Content-Type'] = 'application/json'
req.environ['cinder.context'] = context.RequestContext('admin',
'fake',
True)
req.body = jsonutils.dump_as_bytes(body)
res = req.get_response(app())
return res
@mock.patch('cinder.volume.api.API.manage_existing', wraps=api_manage)
@mock.patch(
'cinder.api.openstack.wsgi.Controller.validate_name_and_description')
def test_manage_volume_ok(self, mock_validate, mock_api_manage):
"""Test successful manage volume execution.
Tests for correct operation when valid arguments are passed in the
request body. We ensure that cinder.volume.api.API.manage_existing got
called with the correct arguments, and that we return the correct HTTP
code to the caller.
"""
body = {'volume': {'host': 'host_ok',
'ref': 'fake_ref'}}
res = self._get_resp(body)
self.assertEqual(202, res.status_int, res)
# Check that the manage API was called with the correct arguments.
self.assertEqual(1, mock_api_manage.call_count)
args = mock_api_manage.call_args[0]
self.assertEqual(args[1], body['volume']['host'])
self.assertEqual(args[2], body['volume']['ref'])
self.assertTrue(mock_validate.called)
def test_manage_volume_missing_host(self):
"""Test correct failure when host is not specified."""
body = {'volume': {'ref': 'fake_ref'}}
res = self._get_resp(body)
self.assertEqual(400, res.status_int)
def test_manage_volume_missing_ref(self):
"""Test correct failure when the ref is not specified."""
body = {'volume': {'host': 'host_ok'}}
res = self._get_resp(body)
self.assertEqual(400, res.status_int)
pass
@mock.patch('cinder.volume.api.API.manage_existing', api_manage)
@mock.patch(
'cinder.api.openstack.wsgi.Controller.validate_name_and_description')
def test_manage_volume_volume_type_by_uuid(self, mock_validate):
"""Tests for correct operation when a volume type is specified by ID.
We wrap cinder.volume.api.API.manage_existing so that managing is not
actually attempted.
"""
body = {'volume': {'host': 'host_ok',
'ref': 'fake_ref',
'volume_type':
'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'}}
res = self._get_resp(body)
self.assertEqual(202, res.status_int, res)
self.assertTrue(mock_validate.called)
pass
@mock.patch('cinder.volume.api.API.manage_existing', api_manage)
@mock.patch(
'cinder.api.openstack.wsgi.Controller.validate_name_and_description')
def test_manage_volume_volume_type_by_name(self, mock_validate):
"""Tests for correct operation when a volume type is specified by name.
We wrap cinder.volume.api.API.manage_existing so that managing is not
actually attempted.
"""
body = {'volume': {'host': 'host_ok',
'ref': 'fake_ref',
'volume_type': 'good_fakevt'}}
res = self._get_resp(body)
self.assertEqual(202, res.status_int, res)
self.assertTrue(mock_validate.called)
pass
def test_manage_volume_bad_volume_type_by_uuid(self):
"""Test failure on nonexistent volume type specified by ID."""
body = {'volume': {'host': 'host_ok',
'ref': 'fake_ref',
'volume_type':
'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'}}
res = self._get_resp(body)
self.assertEqual(404, res.status_int, res)
pass
def test_manage_volume_bad_volume_type_by_name(self):
"""Test failure on nonexistent volume type specified by name."""
body = {'volume': {'host': 'host_ok',
'ref': 'fake_ref',
'volume_type': 'bad_fakevt'}}
res = self._get_resp(body)
self.assertEqual(404, res.status_int, res)
pass
| 39.650442
| 79
| 0.659971
|
9bd003f3b68839d1cdcf3e2e6f690745e2a11efb
| 2,334
|
py
|
Python
|
QAMAS/_build/jupyter_execute/Abstract.py
|
ebenjaminrandall/QAMAS_book
|
8a6b78fbf3564ce313380619900f2de5fcbe9035
|
[
"MIT"
] | 1
|
2021-05-18T00:57:56.000Z
|
2021-05-18T00:57:56.000Z
|
QAMAS/_build/jupyter_execute/Abstract.py
|
ebenjaminrandall/QAMAS_book
|
8a6b78fbf3564ce313380619900f2de5fcbe9035
|
[
"MIT"
] | null | null | null |
QAMAS/_build/jupyter_execute/Abstract.py
|
ebenjaminrandall/QAMAS_book
|
8a6b78fbf3564ce313380619900f2de5fcbe9035
|
[
"MIT"
] | 1
|
2021-02-05T21:07:49.000Z
|
2021-02-05T21:07:49.000Z
|
#!/usr/bin/env python
# coding: utf-8
# # Title Page
#
# ## Authors
#
# E. Benjamin Randall$^{1}$, Marcus Hock$^{2}$, Rachel Lopez$^{1}$, Bahador Marzban$^{1}$, Collin Marshall$^{1}$, Daniel A. Beard$^{1*}$
#
# $^{1}$ *Department of Molecular and Integrative Physiology, University of Michigan, Ann Arbor, MI*
#
# $^{2}$ *Department of Bioengineering, University of California at San Diego, San Diego, CA*
#
# *Corresponding author
#
# *Email addresses*: ebrandal@umich.edu (E.B. Randall), m1hock@eng.ucsd.edu (M. Hock), ralopez@umich.edu (R. Lopez), bmarzban@umich.edu (B. Marzban), colmar@umich.edu (C. Marshall), beardda@umich.edu (D.A. Beard).
#
#
# ## Abstract
#
# We present a computational framework for analyzing and simulating mitochondrial ATP synthesis using basic thermodynamic and kinetic principles. The framework invokes detailed descriptions of the thermodynamic driving forces associated with the processes of the electron transport chain, mitochondrial ATP synthetase, and phosphate and adenine nucleotide transporters. Assembling models of these discrete processes into an integrated model of mitochondrial ATP synthesis, we illustrate how to analyze and simulate in vitro respirometry experiments and how models identified from in vitro experimental data effectively explain cardiac respiratory control in vivo. Computer codes for these analyses are embedded as Python scripts in a Jupyter Book to facilitate easy adoption and modification of the concepts developed here. This accessible framework may also prove useful in supporting educational applications. All source codes are available on at <a href="https://beards-lab.github.io/QAMAS_book/">https://beards-lab.github.io/QAMAS_book/</a>.
#
#
# ## Highlights
#
# - A kinetic and thermodynamic framework for mitochondrial energetics is developed.
# - The framework is applied to simulate ATP synthesis and respiratory control.
# - We illustrate how respiratory control in vitro translates to energetics in vivo.
# - Computer codes are available at DOI: 10.5281/zenodo.4919564.
#
#
# ## Funding
#
# This work supported by NIH grant HL144657.
#
# In[ ]:
#
# ```{toctree}
# :hidden:
# :titlesonly:
#
#
# Abbreviations
# Introduction
# Principles
# BuildingModel
# InVitroModel
# InVivoModel
# Summary
# References
# ```
#
| 39.559322
| 1,046
| 0.74593
|
e1ef4888014a5e621f0436d3c0ac387b0d4df652
| 3,417
|
py
|
Python
|
scripts/costmap.py
|
JMU-CS354/move_base_354
|
bcbfe38c6aaa235d88b4a1483db84592362b9311
|
[
"BSD-2-Clause"
] | 1
|
2021-12-14T15:42:31.000Z
|
2021-12-14T15:42:31.000Z
|
scripts/costmap.py
|
JMU-CS354/move_base_354
|
bcbfe38c6aaa235d88b4a1483db84592362b9311
|
[
"BSD-2-Clause"
] | null | null | null |
scripts/costmap.py
|
JMU-CS354/move_base_354
|
bcbfe38c6aaa235d88b4a1483db84592362b9311
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python
"""Node for generating a costmap for global planning.
Subscribed Topics:
map - (nav_msgs/OccupancyGrid) Standard ROS occupancy grid
Published Topics:
costmap - (nav_msgs/OccupancyGrid)
Costmap cell values may be:
-1 - unknown
100 - blocked
1-99 - cost to traverse
Parameters:
robot_radius - (float, default: .18)
Radius of the robot for the purpose of obstacle dilation.
~sigma - (float, default: .1)
Parameter determining fall-off rate of cost penalty for passing
near blocked cells. Larger values result in more conservative
navigation.
"""
import numpy as np
import rospy
import map_utils
import cv2
from nav_msgs.msg import OccupancyGrid
class CostmapMaker(object):
"""
Node for creating a costmap from an occupancy grid.
"""
def __init__(self):
""" Initialize the particle. """
rospy.init_node('costmap_node')
rospy.Subscriber("map", OccupancyGrid, self.map_callback)
self.costmap_pub = rospy.Publisher('costmap', OccupancyGrid,
latch=True, queue_size=10)
self.robot_radius = rospy.get_param('robot_radius', .18)
self.sigma = rospy.get_param('~sigma', .1)
rospy.spin()
def map_callback(self, map_msg):
""" Create and publish the costmap. """
world_map = map_utils.Map(map_msg)
cost_map = map_utils.Map(map_msg)
# first inflate the obstacles...
dilation_size = int(self.robot_radius / world_map.resolution) * 2 + 1
rospy.loginfo("{}".format(dilation_size))
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (dilation_size,
dilation_size))
grid_occupied = np.zeros(world_map.grid.shape)
occupied_indices = world_map.grid == 100
grid_occupied[occupied_indices] = 1
rospy.loginfo("dilating costmap...")
grid_dilated = cv2.dilate(grid_occupied, kernel)
indices = np.logical_and(grid_dilated == 1, world_map.grid != -1)
# fully dilated grid...
cost_map.grid[grid_dilated == 1] = 100
rospy.loginfo('building KDTree')
from sklearn.neighbors import KDTree
occupied_points = []
all_positions = []
for i in range(cost_map.grid.shape[0]):
for j in range(cost_map.grid.shape[1]):
all_positions.append(cost_map.cell_position(i, j))
if cost_map.grid[i, j] == 100:
occupied_points.append(cost_map.cell_position(i, j))
kdt = KDTree(occupied_points)
dists = kdt.query(all_positions, k=1)[0][:]
probs = np.exp(-(dists**2) / (2 * self.sigma**2))
dist_costs = probs.reshape(cost_map.grid.shape) * 100
dist_costs = np.array(dist_costs, dtype='int8')
indices = np.logical_and(dist_costs > cost_map.grid,
dist_costs > 0)
cost_map.grid[indices] = dist_costs[indices]
# no cell should have zero cost...
cost_map.grid[cost_map.grid == 0] = 1
# import matplotlib.pyplot as plt
# plt.imshow(world_map.grid,interpolation='none')
# plt.show()
rospy.loginfo("publishing costmap...")
self.costmap_pub.publish(cost_map.to_message())
if __name__ == "__main__":
CostmapMaker()
| 32.855769
| 78
| 0.617793
|
74b7642069d3d964caaf90503651baf03cd404d8
| 7,058
|
py
|
Python
|
src/application-insights/azext_applicationinsights/vendored_sdks/mgmt_applicationinsights/v2017_10_01/models/_models_py3.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 8
|
2021-01-13T23:44:08.000Z
|
2021-03-17T10:13:36.000Z
|
src/application-insights/azext_applicationinsights/vendored_sdks/mgmt_applicationinsights/v2017_10_01/models/_models_py3.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 226
|
2019-07-24T07:57:21.000Z
|
2019-10-15T01:07:24.000Z
|
src/application-insights/azext_applicationinsights/vendored_sdks/mgmt_applicationinsights/v2017_10_01/models/_models_py3.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 5
|
2020-05-09T17:47:09.000Z
|
2020-10-01T19:52:06.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
from msrest.exceptions import HttpOperationError
class Resource(Model):
"""An Azure resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Azure resource Id.
:vartype id: str
:ivar name: Azure resource name.
:vartype name: str
:ivar type: Azure resource type.
:vartype type: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(Resource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
class ApplicationInsightsComponentPricingPlan(Resource):
"""An Application Insights component pricing plan.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Azure resource Id.
:vartype id: str
:ivar name: Azure resource name.
:vartype name: str
:ivar type: Azure resource type.
:vartype type: str
:param plan_type: Pricing Plan Type Name.
:type plan_type: str
:param cap: Daily data volume cap in GB.
:type cap: float
:ivar reset_hour: Daily data volume cap UTC reset hour.
:vartype reset_hour: int
:param warning_threshold: Reserved, not used for now.
:type warning_threshold: int
:param stop_send_notification_when_hit_threshold: Reserved, not used for
now.
:type stop_send_notification_when_hit_threshold: bool
:param stop_send_notification_when_hit_cap: Do not send a notification
email when the daily data volume cap is met.
:type stop_send_notification_when_hit_cap: bool
:ivar max_history_cap: Maximum daily data volume cap that the user can set
for this component.
:vartype max_history_cap: float
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'reset_hour': {'readonly': True},
'max_history_cap': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'plan_type': {'key': 'properties.planType', 'type': 'str'},
'cap': {'key': 'properties.cap', 'type': 'float'},
'reset_hour': {'key': 'properties.resetHour', 'type': 'int'},
'warning_threshold': {'key': 'properties.warningThreshold', 'type': 'int'},
'stop_send_notification_when_hit_threshold': {'key': 'properties.stopSendNotificationWhenHitThreshold', 'type': 'bool'},
'stop_send_notification_when_hit_cap': {'key': 'properties.stopSendNotificationWhenHitCap', 'type': 'bool'},
'max_history_cap': {'key': 'properties.maxHistoryCap', 'type': 'float'},
}
def __init__(self, *, plan_type: str=None, cap: float=None, warning_threshold: int=None, stop_send_notification_when_hit_threshold: bool=None, stop_send_notification_when_hit_cap: bool=None, **kwargs) -> None:
super(ApplicationInsightsComponentPricingPlan, self).__init__(**kwargs)
self.plan_type = plan_type
self.cap = cap
self.reset_hour = None
self.warning_threshold = warning_threshold
self.stop_send_notification_when_hit_threshold = stop_send_notification_when_hit_threshold
self.stop_send_notification_when_hit_cap = stop_send_notification_when_hit_cap
self.max_history_cap = None
class CloudError(Model):
"""An error response from the Batch service.
:param error: Cloud error body.
:type error:
~azure.mgmt.applicationinsights.v2017_10_01.models.CloudErrorBody
"""
_attribute_map = {
'error': {'key': 'error', 'type': 'CloudErrorBody'},
}
def __init__(self, *, error=None, **kwargs) -> None:
super(CloudError, self).__init__(**kwargs)
self.error = error
class CloudErrorException(HttpOperationError):
"""Server responsed with exception of type: 'CloudError'.
:param deserialize: A deserializer
:param response: Server response to be deserialized.
"""
def __init__(self, deserialize, response, *args):
super(CloudErrorException, self).__init__(deserialize, response, 'CloudError', *args)
class CloudErrorBody(Model):
"""An error response from the Batch service.
:param code: An identifier for the error. Codes are invariant and are
intended to be consumed programmatically.
:type code: str
:param message: A message describing the error, intended to be suitable
for display in a user interface.
:type message: str
:param target: The target of the particular error. For example, the name
of the property in error.
:type target: str
:param details: A list of additional details about the error.
:type details:
list[~azure.mgmt.applicationinsights.v2017_10_01.models.CloudErrorBody]
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[CloudErrorBody]'},
}
def __init__(self, *, code: str=None, message: str=None, target: str=None, details=None, **kwargs) -> None:
super(CloudErrorBody, self).__init__(**kwargs)
self.code = code
self.message = message
self.target = target
self.details = details
class EASubscriptionMigrationDate(Model):
"""Subscription migrate date information properties.
:param is_grand_fatherable_subscription: Is subscription in the grand
fatherable subscription list.
:type is_grand_fatherable_subscription: bool
:param opted_in_date: Time to start using new pricing model.
:type opted_in_date: datetime
"""
_attribute_map = {
'is_grand_fatherable_subscription': {'key': 'isGrandFatherableSubscription', 'type': 'bool'},
'opted_in_date': {'key': 'optedInDate', 'type': 'iso-8601'},
}
def __init__(self, *, is_grand_fatherable_subscription: bool=None, opted_in_date=None, **kwargs) -> None:
super(EASubscriptionMigrationDate, self).__init__(**kwargs)
self.is_grand_fatherable_subscription = is_grand_fatherable_subscription
self.opted_in_date = opted_in_date
| 36.760417
| 213
| 0.652026
|
4391d333c1f54ab5eb24c2e37b21dedd39be1947
| 4,997
|
py
|
Python
|
BaseTools/Source/Python/GenFds/DataSection.py
|
kokweich/slimbootloader
|
6fd1141c75a33894e3a7937dbc55859e4a6dacae
|
[
"BSD-2-Clause-NetBSD",
"PSF-2.0",
"BSD-2-Clause",
"Apache-2.0",
"MIT",
"BSD-2-Clause-Patent"
] | 9
|
2021-07-26T17:02:51.000Z
|
2021-12-30T10:49:46.000Z
|
BaseTools/Source/Python/GenFds/DataSection.py
|
kokweich/slimbootloader
|
6fd1141c75a33894e3a7937dbc55859e4a6dacae
|
[
"BSD-2-Clause-NetBSD",
"PSF-2.0",
"BSD-2-Clause",
"Apache-2.0",
"MIT",
"BSD-2-Clause-Patent"
] | null | null | null |
BaseTools/Source/Python/GenFds/DataSection.py
|
kokweich/slimbootloader
|
6fd1141c75a33894e3a7937dbc55859e4a6dacae
|
[
"BSD-2-Clause-NetBSD",
"PSF-2.0",
"BSD-2-Clause",
"Apache-2.0",
"MIT",
"BSD-2-Clause-Patent"
] | null | null | null |
## @file
# process data section generation
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
from . import Section
from .GenFdsGlobalVariable import GenFdsGlobalVariable
import subprocess
from .Ffs import SectionSuffix
import Common.LongFilePathOs as os
from CommonDataClass.FdfClass import DataSectionClassObject
from Common.Misc import PeImageClass
from Common.LongFilePathSupport import CopyLongFilePath
from Common.DataType import *
## generate data section
#
#
class DataSection (DataSectionClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
DataSectionClassObject.__init__(self)
## GenSection() method
#
# Generate compressed section
#
# @param self The object pointer
# @param OutputPath Where to place output file
# @param ModuleName Which module this section belongs to
# @param SecNum Index of section
# @param KeyStringList Filter for inputs of section generation
# @param FfsInf FfsInfStatement object that contains this section data
# @param Dict dictionary contains macro and its value
# @retval tuple (Generated file name list, section alignment)
#
def GenSection(self, OutputPath, ModuleName, SecNum, keyStringList, FfsFile = None, Dict = None, IsMakefile = False):
#
# Prepare the parameter of GenSection
#
if Dict is None:
Dict = {}
if FfsFile is not None:
self.SectFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.SectFileName)
self.SectFileName = GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict, FfsFile.CurrentArch)
else:
self.SectFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.SectFileName)
self.SectFileName = GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict)
"""Check Section file exist or not !"""
if not os.path.exists(self.SectFileName):
self.SectFileName = os.path.join (GenFdsGlobalVariable.WorkSpaceDir,
self.SectFileName)
"""Copy Map file to Ffs output"""
Filename = GenFdsGlobalVariable.MacroExtend(self.SectFileName)
if Filename[(len(Filename)-4):] == '.efi':
MapFile = Filename.replace('.efi', '.map')
CopyMapFile = os.path.join(OutputPath, ModuleName + '.map')
if IsMakefile:
if GenFdsGlobalVariable.CopyList == []:
GenFdsGlobalVariable.CopyList = [(MapFile, CopyMapFile)]
else:
GenFdsGlobalVariable.CopyList.append((MapFile, CopyMapFile))
else:
if os.path.exists(MapFile):
if not os.path.exists(CopyMapFile) or (os.path.getmtime(MapFile) > os.path.getmtime(CopyMapFile)):
CopyLongFilePath(MapFile, CopyMapFile)
#Get PE Section alignment when align is set to AUTO
if self.Alignment == 'Auto' and self.SecType in (BINARY_FILE_TYPE_TE, BINARY_FILE_TYPE_PE32):
self.Alignment = "0"
NoStrip = True
if self.SecType in (BINARY_FILE_TYPE_TE, BINARY_FILE_TYPE_PE32):
if self.KeepReloc is not None:
NoStrip = self.KeepReloc
if not NoStrip:
FileBeforeStrip = os.path.join(OutputPath, ModuleName + '.efi')
if not os.path.exists(FileBeforeStrip) or \
(os.path.getmtime(self.SectFileName) > os.path.getmtime(FileBeforeStrip)):
CopyLongFilePath(self.SectFileName, FileBeforeStrip)
StrippedFile = os.path.join(OutputPath, ModuleName + '.stripped')
GenFdsGlobalVariable.GenerateFirmwareImage(
StrippedFile,
[GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict)],
Strip=True,
IsMakefile = IsMakefile
)
self.SectFileName = StrippedFile
if self.SecType == BINARY_FILE_TYPE_TE:
TeFile = os.path.join( OutputPath, ModuleName + 'Te.raw')
GenFdsGlobalVariable.GenerateFirmwareImage(
TeFile,
[GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict)],
Type='te',
IsMakefile = IsMakefile
)
self.SectFileName = TeFile
OutputFile = os.path.join (OutputPath, ModuleName + SUP_MODULE_SEC + SecNum + SectionSuffix.get(self.SecType))
OutputFile = os.path.normpath(OutputFile)
GenFdsGlobalVariable.GenerateSection(OutputFile, [self.SectFileName], Section.Section.SectionType.get(self.SecType), IsMakefile = IsMakefile)
FileList = [OutputFile]
return FileList, self.Alignment
| 42.347458
| 149
| 0.638583
|
9d71d438b9ca1d48c2d321ace5be6b017ac88641
| 5,467
|
py
|
Python
|
lazyxml/parser.py
|
druagoon/lazyxml
|
b4cc363c565fe800219ebdf03cdfc47ae3cc16d7
|
[
"MIT"
] | 7
|
2015-06-14T12:25:13.000Z
|
2020-04-23T23:12:02.000Z
|
lazyxml/parser.py
|
druagoon/lazyxml
|
b4cc363c565fe800219ebdf03cdfc47ae3cc16d7
|
[
"MIT"
] | 1
|
2019-05-17T14:37:13.000Z
|
2020-11-18T17:26:55.000Z
|
lazyxml/parser.py
|
druagoon/lazyxml
|
b4cc363c565fe800219ebdf03cdfc47ae3cc16d7
|
[
"MIT"
] | 11
|
2017-10-07T00:39:42.000Z
|
2021-02-25T09:04:53.000Z
|
# -*- coding: utf-8 -*-
import collections
try:
import xml.etree.cElementTree as ET
except ImportError:
import xml.etree.ElementTree as ET
from . import utils
from .consts import Default, Regex
class Parser(object):
"""Simple xml parser
"""
def __init__(self, encoding=None, unescape=False, strip_root=True,
strip_attr=True, strip=True, errors='strict'):
"""Constructor for Parser, with sensible defaults.
:param str encoding: xml content encoding. if not set, will guess from xml header declare if possible.
:param bool unescape: unescape xml html entity character. Default to ``False``.
:param bool strip_root: strip root. Default to ``True``.
:param bool strip_attr: strip tag attrs. Default to ``True``.
:param bool strip: strip whitespace. Default to ``True``.
:param string errors: xml content decode error handling scheme. Default to ``strict``.
"""
self.__encoding = encoding
self.__unescape = unescape
self.__strip_root = strip_root
self.__strip_attr = strip_attr
self.__strip = strip
self.__errors = errors
def xml2dict(self, content):
"""Convert xml content to dict.
.. warning::
**DEPRECATED:** :meth:`xml2dict` is deprecated. Please use :meth:`xml2object` instead.
.. deprecated:: 1.2
"""
return self.xml2object(content)
def xml2object(self, content):
"""Convert xml content to python object.
:param content: xml content
:rtype: dict
.. versionadded:: 1.2
"""
content = self.xml_filter(content)
element = ET.fromstring(content)
tree = self.parse(element) if self.__strip_attr else self.parse_full(element)
if not self.__strip_root:
node = self.get_node(element)
if not self.__strip_attr:
tree['attrs'] = node['attr']
return {node['tag']: tree}
return tree
def xml_filter(self, content):
"""Filter and preprocess xml content
:param content: xml content
:rtype: str
"""
content = utils.strip_whitespace(content, True) if self.__strip else content.strip()
if not self.__encoding:
self.__encoding = self.guess_xml_encoding(content) or Default.ENCODING
if self.__encoding.lower() != Default.ENCODING:
content = self.strip_xml_header(content.decode(self.__encoding, errors=self.__errors))
if self.__unescape:
content = utils.html_entity_decode(content)
return content
@staticmethod
def guess_xml_encoding(content):
"""Guess encoding from xml header declaration.
:param content: xml content
:rtype: str or None
"""
matchobj = Regex.XML_ENCODING.match(content)
return matchobj and matchobj.group(1).lower()
@staticmethod
def strip_xml_header(content):
"""Strip xml header
:param content: xml content
:rtype: str
"""
return Regex.XML_HEADER.sub('', content)
@classmethod
def parse(cls, element):
"""Parse xml element.
:param element: an :class:`~xml.etree.ElementTree.Element` instance
:rtype: dict
"""
values = {}
for child in element:
node = cls.get_node(child)
subs = cls.parse(child)
value = subs or node['value']
if node['tag'] not in values:
values[node['tag']] = value
else:
if not isinstance(values[node['tag']], list):
values[node['tag']] = [values.pop(node['tag'])]
values[node['tag']].append(value)
return values
@classmethod
def parse_full(cls, element):
"""Parse xml element include the node attributes.
:param element: an :class:`~xml.etree.ElementTree.Element` instance
:rtype: dict
.. versionadded:: 1.2.1
"""
values = collections.defaultdict(dict)
for child in element:
node = cls.get_node(child)
subs = cls.parse_full(child)
value = subs or {'values': node['value']}
value['attrs'] = node['attr']
if node['tag'] not in values['values']:
values['values'][node['tag']] = value
else:
if not isinstance(values['values'][node['tag']], list):
values['values'][node['tag']] = [values['values'].pop(node['tag'])]
values['values'][node['tag']].append(value)
return values
@classmethod
def get_node(cls, element):
"""Get node info.
Parse element and get the element tag info. Include tag name, value, attribute, namespace.
:param element: an :class:`~xml.etree.ElementTree.Element` instance
:rtype: dict
"""
ns, tag = cls.split_namespace(element.tag)
return {
'tag': tag,
'value': (element.text or '').strip(),
'attr': element.attrib,
'namespace': ns
}
@staticmethod
def split_namespace(tag):
"""Split tag namespace.
:param tag: tag name
:return: a pair of (namespace, tag)
:rtype: tuple
"""
matchobj = Regex.XML_NS.search(tag)
return matchobj.groups() if matchobj else ('', tag)
| 32.349112
| 110
| 0.582404
|
12b9ce8fe2aac5d1dfd0383555bd3a87a347b91f
| 281
|
py
|
Python
|
example_package/logger/read_content.py
|
shadhini/py_pckg_tut
|
37f0dd9f093c4f088590adf64899f231f16dfc77
|
[
"MIT"
] | null | null | null |
example_package/logger/read_content.py
|
shadhini/py_pckg_tut
|
37f0dd9f093c4f088590adf64899f231f16dfc77
|
[
"MIT"
] | null | null | null |
example_package/logger/read_content.py
|
shadhini/py_pckg_tut
|
37f0dd9f093c4f088590adf64899f231f16dfc77
|
[
"MIT"
] | null | null | null |
import logging
import logging.config
import yaml
import pkgutil
def read_content():
data = pkgutil.get_data(__name__, "logger_config.yaml")
print("data:", repr(data))
text = pkgutil.get_data(__name__, "logger_config.yaml").decode()
print("text:", repr(text))
| 17.5625
| 68
| 0.704626
|
ed021f9d38a5e5aca4e7819e52e90e75d7d6bcd9
| 4,365
|
py
|
Python
|
contrib/seeds/generate-seeds.py
|
UFO-ETL/ufo
|
e85dde0c8b12c1bf3357003afb77ea85b1476d6b
|
[
"MIT"
] | null | null | null |
contrib/seeds/generate-seeds.py
|
UFO-ETL/ufo
|
e85dde0c8b12c1bf3357003afb77ea85b1476d6b
|
[
"MIT"
] | null | null | null |
contrib/seeds/generate-seeds.py
|
UFO-ETL/ufo
|
e85dde0c8b12c1bf3357003afb77ea85b1476d6b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2014-2017 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Script to generate list of seed nodes for chainparams.cpp.
This script expects two text files in the directory that is passed as an
argument:
nodes_main.txt
nodes_test.txt
These files must consist of lines in the format
<ip>
<ip>:<port>
[<ipv6>]
[<ipv6>]:<port>
<onion>.onion
0xDDBBCCAA (IPv4 little-endian old pnSeeds format)
The output will be two data structures with the peers in binary format:
static SeedSpec6 pnSeed6_main[]={
...
}
static SeedSpec6 pnSeed6_test[]={
...
}
These should be pasted into `src/chainparamsseeds.h`.
'''
from base64 import b32decode
from binascii import a2b_hex
import sys
import os
import re
# ipv4 in ipv6 prefix
pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff])
# tor-specific ipv6 prefix
pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43])
def name_to_ipv6(addr):
if len(addr)>6 and addr.endswith('.onion'):
vchAddr = b32decode(addr[0:-6], True)
if len(vchAddr) != 16-len(pchOnionCat):
raise ValueError('Invalid onion %s' % vchAddr)
return pchOnionCat + vchAddr
elif '.' in addr: # IPv4
return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
return bytearray(sub[0] + ([0] * nullbytes) + sub[1])
elif addr.startswith('0x'): # IPv4-in-little-endian
return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:])))
else:
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s, defaultport):
match = re.match(r'\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: # ipv6
host = match.group(1)
port = match.group(2)
elif s.count(':') > 1: # ipv6, no port
host = s
port = ''
else:
(host,_,port) = s.partition(':')
if not port:
port = defaultport
else:
port = int(port)
host = name_to_ipv6(host)
return (host,port)
def process_nodes(g, f, structname, defaultport):
g.write('static SeedSpec6 %s[] = {\n' % structname)
first = True
for line in f:
comment = line.find('#')
if comment != -1:
line = line[0:comment]
line = line.strip()
if not line:
continue
if not first:
g.write(',\n')
first = False
(host,port) = parse_spec(line, defaultport)
hoststr = ','.join(('0x%02x' % b) for b in host)
g.write(' {{%s}, %i}' % (hoststr, port))
g.write('\n};\n')
def main():
if len(sys.argv)<2:
print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr)
sys.exit(1)
g = sys.stdout
indir = sys.argv[1]
g.write('#ifndef UFO_CHAINPARAMSSEEDS_H\n')
g.write('#define UFO_CHAINPARAMSSEEDS_H\n')
g.write('/**\n')
g.write(' * List of fixed seed nodes for the UFO network\n')
g.write(' * AUTOGENERATED by contrib/seeds/generate-seeds.py\n')
g.write(' *\n')
g.write(' * Each line contains a 16-byte IPv6 address and a port.\n')
g.write(' * IPv4 as well as onion addresses are wrapped inside an IPv6 address accordingly.\n')
g.write(' */\n')
with open(os.path.join(indir,'nodes_main.txt'), 'r', encoding="utf8") as f:
process_nodes(g, f, 'pnSeed6_main', 8333)
g.write('\n')
with open(os.path.join(indir,'nodes_test.txt'), 'r', encoding="utf8") as f:
process_nodes(g, f, 'pnSeed6_test', 18333)
g.write('#endif // UFO_CHAINPARAMSSEEDS_H\n')
if __name__ == '__main__':
main()
| 31.402878
| 99
| 0.582131
|
5e892ecc0aa86d3638f6129dca48b0af2b282daa
| 6,202
|
py
|
Python
|
simple_history/registry_tests/tests.py
|
YuriFarias741/django-simple-history
|
eb25a378927e01454f423776e01184593a7b0647
|
[
"BSD-3-Clause"
] | null | null | null |
simple_history/registry_tests/tests.py
|
YuriFarias741/django-simple-history
|
eb25a378927e01454f423776e01184593a7b0647
|
[
"BSD-3-Clause"
] | null | null | null |
simple_history/registry_tests/tests.py
|
YuriFarias741/django-simple-history
|
eb25a378927e01454f423776e01184593a7b0647
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import unicode_literals
import unittest
import uuid
from datetime import datetime, timedelta
from django.apps import apps
from django.contrib.auth import get_user_model
from django.core import management
from django.test import TestCase
from io import StringIO
from simple_history import exceptions, register
from ..tests.models import (Choice, InheritTracking1, InheritTracking2,
InheritTracking3, InheritTracking4, Poll,
Restaurant, TrackedAbstractBaseA,
TrackedAbstractBaseB, TrackedWithAbstractBase,
TrackedWithConcreteBase, UserAccessorDefault,
UserAccessorOverride, UUIDRegisterModel, Voter)
get_model = apps.get_model
User = get_user_model()
today = datetime(2021, 1, 1, 10, 0)
tomorrow = today + timedelta(days=1)
yesterday = today - timedelta(days=1)
class RegisterTest(TestCase):
def test_register_no_args(self):
self.assertEqual(len(Choice.history.all()), 0)
poll = Poll.objects.create(pub_date=today)
choice = Choice.objects.create(poll=poll, votes=0)
self.assertEqual(len(choice.history.all()), 1)
def test_register_separate_app(self):
def get_history(model):
return model.history
self.assertRaises(AttributeError, get_history, User)
self.assertEqual(len(User.histories.all()), 0)
user = User.objects.create(username='bob', password='pass')
self.assertEqual(len(User.histories.all()), 1)
self.assertEqual(len(user.histories.all()), 1)
def test_reregister(self):
with self.assertRaises(exceptions.MultipleRegistrationsError):
register(Restaurant, manager_name='again')
def test_register_custome_records(self):
self.assertEqual(len(Voter.history.all()), 0)
poll = Poll.objects.create(pub_date=today)
choice = Choice.objects.create(poll=poll, votes=0)
user = User.objects.create(username='voter')
voter = Voter.objects.create(choice=choice, user=user)
self.assertEqual(len(voter.history.all()), 1)
expected = 'Voter object changed by None as of '
self.assertEqual(expected,
str(voter.history.all()[0])[:len(expected)])
def test_register_history_id_field(self):
self.assertEqual(len(UUIDRegisterModel.history.all()), 0)
entry = UUIDRegisterModel.objects.create()
self.assertEqual(len(entry.history.all()), 1)
history = entry.history.all()[0]
self.assertTrue(isinstance(history.history_id, uuid.UUID))
class TestUserAccessor(unittest.TestCase):
def test_accessor_default(self):
register(UserAccessorDefault)
assert not hasattr(User, 'historicaluseraccessordefault_set')
def test_accessor_override(self):
register(UserAccessorOverride,
user_related_name='my_history_model_accessor')
assert hasattr(User, 'my_history_model_accessor')
class TestInheritedModule(TestCase):
def test_using_app_label(self):
try:
from ..tests.models import HistoricalConcreteExternal
except ImportError:
self.fail("HistoricalConcreteExternal is in wrong module")
def test_default(self):
try:
from ..tests.models import HistoricalConcreteExternal2
except ImportError:
self.fail("HistoricalConcreteExternal2 is in wrong module")
class TestTrackingInheritance(TestCase):
def test_tracked_abstract_base(self):
self.assertEqual(
[
f.attname
for f in TrackedWithAbstractBase.history.model._meta.fields
],
[
'id', 'history_id',
'history_change_reason', 'history_date', 'history_user_id',
'history_type',
],
)
def test_tracked_concrete_base(self):
self.assertEqual(
[
f.attname
for f in TrackedWithConcreteBase.history.model._meta.fields
],
[
'id', 'trackedconcretebase_ptr_id', 'history_id',
'history_change_reason', 'history_date', 'history_user_id',
'history_type',
],
)
def test_multiple_tracked_bases(self):
with self.assertRaises(exceptions.MultipleRegistrationsError):
class TrackedWithMultipleAbstractBases(
TrackedAbstractBaseA, TrackedAbstractBaseB):
pass
def test_tracked_abstract_and_untracked_concrete_base(self):
self.assertEqual(
[f.attname for f in InheritTracking1.history.model._meta.fields],
[
'id', 'untrackedconcretebase_ptr_id', 'history_id',
'history_change_reason', 'history_date',
'history_user_id', 'history_type',
],
)
def test_indirect_tracked_abstract_base(self):
self.assertEqual(
[f.attname for f in InheritTracking2.history.model._meta.fields],
[
'id', 'baseinherittracking2_ptr_id', 'history_id',
'history_change_reason', 'history_date',
'history_user_id', 'history_type',
],
)
def test_indirect_tracked_concrete_base(self):
self.assertEqual(
[f.attname for f in InheritTracking3.history.model._meta.fields],
[
'id', 'baseinherittracking3_ptr_id', 'history_id',
'history_change_reason', 'history_date',
'history_user_id', 'history_type',
],
)
def test_registering_with_tracked_abstract_base(self):
with self.assertRaises(exceptions.MultipleRegistrationsError):
register(InheritTracking4)
class TestMigrate(TestCase):
def test_makemigration_command(self):
management.call_command(
'makemigrations', 'migration_test_app', stdout=StringIO())
def test_migrate_command(self):
management.call_command(
'migrate', 'migration_test_app', fake=True, stdout=StringIO())
| 35.849711
| 77
| 0.6406
|
2215c96973d198eb0af7ef9887326436aadc95c6
| 3,251
|
py
|
Python
|
optimade/client/cli.py
|
materialsproject/optimade
|
638f3fc9b203603141032a814e42bc23bec1bd87
|
[
"MIT"
] | null | null | null |
optimade/client/cli.py
|
materialsproject/optimade
|
638f3fc9b203603141032a814e42bc23bec1bd87
|
[
"MIT"
] | null | null | null |
optimade/client/cli.py
|
materialsproject/optimade
|
638f3fc9b203603141032a814e42bc23bec1bd87
|
[
"MIT"
] | null | null | null |
import sys
import json
import pathlib
import click
import rich
from optimade.client.client import OptimadeClient
__all__ = ("_get",)
@click.command("optimade-get")
@click.option(
"--filter",
default=[""],
help="Filter to apply to OPTIMADE API. Default is an empty filter.",
multiple=True,
)
@click.option("--use-async/--no-async", default=True, help="Use asyncio or not")
@click.option(
"--max-results-per-provider",
default=10,
help="Set the maximum number of results to download from any single provider, where -1 or 0 indicate unlimited results.",
)
@click.option(
"--output-file",
default=None,
help="Write the results to a JSON file at this location.",
)
@click.option(
"--count/--no-count",
default=False,
help="Count the results of the filter rather than downloading them.",
)
@click.option(
"--endpoint",
default="structures",
help="The endpoint to query.",
)
@click.option(
"--sort",
default=None,
help="A field by which to sort the query results.",
)
@click.option(
"--response-fields",
default=None,
help="A string of comma-separated response fields to request.",
)
@click.option(
"--pretty-print",
is_flag=True,
help="Pretty print the JSON results.",
)
@click.argument("base-url", default=None, nargs=-1)
def get(
use_async,
filter,
base_url,
max_results_per_provider,
output_file,
count,
response_fields,
sort,
endpoint,
pretty_print,
):
return _get(
use_async,
filter,
base_url,
max_results_per_provider,
output_file,
count,
response_fields,
sort,
endpoint,
pretty_print,
)
def _get(
use_async,
filter,
base_url,
max_results_per_provider,
output_file,
count,
response_fields,
sort,
endpoint,
pretty_print,
):
if output_file:
output_file_path = pathlib.Path(output_file)
try:
output_file_path.touch(exist_ok=False)
except FileExistsError:
raise SystemExit(
f"Desired output file {output_file} already exists, not overwriting."
)
client = OptimadeClient(
base_urls=base_url,
use_async=use_async,
max_results_per_provider=max_results_per_provider,
)
if response_fields:
response_fields = response_fields.split(",")
try:
if count:
for f in filter:
client.count(f, endpoint=endpoint)
results = client.count_results
else:
for f in filter:
client.get(
f, endpoint=endpoint, sort=sort, response_fields=response_fields
)
results = client.all_results
except RuntimeError:
sys.exit(1)
if not output_file:
if pretty_print:
rich.print_json(data=results, indent=2, default=lambda _: _.dict())
else:
sys.stdout.write(json.dumps(results, indent=2, default=lambda _: _.dict()))
if output_file:
with open(output_file, "w") as f:
json.dump(results, f, indent=2, default=lambda _: _.dict())
if __name__ == "__main__":
get()
| 23.388489
| 125
| 0.613657
|
3ddbe3da18760df9ba6ecaf3962eba18e92664a0
| 908
|
py
|
Python
|
Space_Invaders/classes/Game/Sprites/BaseObject.py
|
Jh123x/Orbital
|
6f8f2da4fd26ef1d77c0c6183230c3a5e6bf0bb9
|
[
"MIT"
] | 4
|
2020-05-15T11:17:09.000Z
|
2020-06-30T01:11:41.000Z
|
Space_Invaders/classes/Game/Sprites/BaseObject.py
|
Jh123x/Orbital
|
6f8f2da4fd26ef1d77c0c6183230c3a5e6bf0bb9
|
[
"MIT"
] | 10
|
2020-05-16T10:45:32.000Z
|
2020-07-27T07:17:02.000Z
|
Space_Invaders/classes/Game/Sprites/BaseObject.py
|
Jh123x/Orbital
|
6f8f2da4fd26ef1d77c0c6183230c3a5e6bf0bb9
|
[
"MIT"
] | null | null | null |
import pygame
class BaseObject(pygame.sprite.Sprite):
def __init__(self, initial_x: int, initial_y: int, debug: bool = False):
"""Constructor for the object class in the game"""
# Store the coordinates
self.x = initial_x
self.y = initial_y
self.initial_x = initial_x
self.initial_y = initial_y
self.changed = True
self.debug = debug
# Call the superclass
super().__init__()
def get_coord(self) -> tuple:
"""Get the coordinates of the object"""
return (self.x, self.y)
def get_x(self) -> int:
"""Get the x coord of the obj"""
return self.x
def get_y(self) -> int:
"""Get the y coord of the obj"""
return self.y
def set_coord(self, position):
"""Set the coordinates of the base object"""
self.x = position[0]
self.y = position[1]
| 25.942857
| 76
| 0.579295
|
0783139068be470d97d9688bc20052609d80cc8c
| 5,681
|
py
|
Python
|
sanic/static.py
|
cmcaine/sanic
|
076cf51fb295e7624968db766fd613aa3948289d
|
[
"MIT"
] | 1
|
2018-08-30T06:30:11.000Z
|
2018-08-30T06:30:11.000Z
|
sanic/static.py
|
cmcaine/sanic
|
076cf51fb295e7624968db766fd613aa3948289d
|
[
"MIT"
] | 1
|
2018-10-07T15:15:28.000Z
|
2018-10-07T15:15:28.000Z
|
sanic/static.py
|
cmcaine/sanic
|
076cf51fb295e7624968db766fd613aa3948289d
|
[
"MIT"
] | null | null | null |
from mimetypes import guess_type
from os import path
from re import sub
from time import strftime, gmtime
from urllib.parse import unquote
from aiofiles.os import stat
from sanic.exceptions import (
ContentRangeError,
FileNotFound,
HeaderNotFound,
InvalidUsage,
)
from sanic.handlers import ContentRangeHandler
from sanic.response import file, file_stream, HTTPResponse
def register(app, uri, file_or_directory, pattern,
use_modified_since, use_content_range,
stream_large_files, name='static', host=None,
strict_slashes=None, content_type=None):
# TODO: Though sanic is not a file server, I feel like we should at least
# make a good effort here. Modified-since is nice, but we could
# also look into etags, expires, and caching
"""
Register a static directory handler with Sanic by adding a route to the
router and registering a handler.
:param app: Sanic
:param file_or_directory: File or directory path to serve from
:param uri: URL to serve from
:param pattern: regular expression used to match files in the URL
:param use_modified_since: If true, send file modified time, and return
not modified if the browser's matches the
server's
:param use_content_range: If true, process header for range requests
and sends the file part that is requested
:param stream_large_files: If true, use the file_stream() handler rather
than the file() handler to send the file
If this is an integer, this represents the
threshold size to switch to file_stream()
:param name: user defined name used for url_for
:param content_type: user defined content type for header
"""
# If we're not trying to match a file directly,
# serve from the folder
if not path.isfile(file_or_directory):
uri += '<file_uri:' + pattern + '>'
async def _handler(request, file_uri=None):
# Using this to determine if the URL is trying to break out of the path
# served. os.path.realpath seems to be very slow
if file_uri and '../' in file_uri:
raise InvalidUsage("Invalid URL")
# Merge served directory and requested file if provided
# Strip all / that in the beginning of the URL to help prevent python
# from herping a derp and treating the uri as an absolute path
root_path = file_path = file_or_directory
if file_uri:
file_path = path.join(
file_or_directory, sub('^[/]*', '', file_uri))
# URL decode the path sent by the browser otherwise we won't be able to
# match filenames which got encoded (filenames with spaces etc)
file_path = path.abspath(unquote(file_path))
if not file_path.startswith(path.abspath(unquote(root_path))):
raise FileNotFound('File not found',
path=file_or_directory,
relative_url=file_uri)
try:
headers = {}
# Check if the client has been sent this file before
# and it has not been modified since
stats = None
if use_modified_since:
stats = await stat(file_path)
modified_since = strftime(
'%a, %d %b %Y %H:%M:%S GMT', gmtime(stats.st_mtime))
if request.headers.get('If-Modified-Since') == modified_since:
return HTTPResponse(status=304)
headers['Last-Modified'] = modified_since
_range = None
if use_content_range:
_range = None
if not stats:
stats = await stat(file_path)
headers['Accept-Ranges'] = 'bytes'
headers['Content-Length'] = str(stats.st_size)
if request.method != 'HEAD':
try:
_range = ContentRangeHandler(request, stats)
except HeaderNotFound:
pass
else:
del headers['Content-Length']
for key, value in _range.headers.items():
headers[key] = value
headers['Content-Type'] = content_type \
or guess_type(file_path)[0] or 'text/plain'
if request.method == 'HEAD':
return HTTPResponse(headers=headers)
else:
if stream_large_files:
if isinstance(stream_large_files, int):
threshold = stream_large_files
else:
threshold = 1024 * 1024
if not stats:
stats = await stat(file_path)
if stats.st_size >= threshold:
return await file_stream(file_path, headers=headers,
_range=_range)
return await file(file_path, headers=headers, _range=_range)
except ContentRangeError:
raise
except Exception:
raise FileNotFound('File not found',
path=file_or_directory,
relative_url=file_uri)
# special prefix for static files
if not name.startswith('_static_'):
name = '_static_{}'.format(name)
app.route(uri, methods=['GET', 'HEAD'], name=name, host=host,
strict_slashes=strict_slashes)(_handler)
| 44.03876
| 79
| 0.574195
|
3c90bf0520d823ac69c32754657e6046fc474a2e
| 3,684
|
py
|
Python
|
pychron/pipeline/editors/script_editor.py
|
ael-noblegas/pychron
|
6ebbbb1f66a614972b62b7a9be4c784ae61b5d62
|
[
"Apache-2.0"
] | null | null | null |
pychron/pipeline/editors/script_editor.py
|
ael-noblegas/pychron
|
6ebbbb1f66a614972b62b7a9be4c784ae61b5d62
|
[
"Apache-2.0"
] | 80
|
2018-07-17T20:10:20.000Z
|
2021-08-17T15:38:24.000Z
|
pychron/pipeline/editors/script_editor.py
|
UManPychron/pychron
|
b84c9fd70072f9cbda30abe2c471e64fe3dd75d8
|
[
"Apache-2.0"
] | null | null | null |
# ===============================================================================
# Copyright 2019 ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
import io
from traits.api import Instance, Button, Str
from traitsui.api import View, Readonly, UItem, TextEditor, Tabbed, VGroup, HGroup
from pychron.core.file_listener import FileListener
from pychron.core.pychron_traits import BorderVGroup
from pychron.core.ui.code_editor import PyScriptCodeEditor
from pychron.core.ui.custom_label_editor import CustomLabel
from pychron.envisage.icon_button_editor import icon_button_editor
from pychron.envisage.tasks.base_editor import BaseTraitsEditor
from pychron.pyscripts.pipeline_pyscript import PipelinePyScript
class PipelinePyScriptEditor(BaseTraitsEditor):
script = Instance(PipelinePyScript)
execute_button = Button
file_listener = None
exception_trace = Str
output = Str
def destroy(self):
super(PipelinePyScriptEditor, self).destroy()
if self.file_listener:
self.file_listener.stop()
def init(self, path, auto_execute):
self.file_listener = FileListener(path=path, callback=self._refresh_from_disk)
script = self.script
script.display_state = 'not run'
script.bootstrap()
if auto_execute:
self._execute()
def _execute(self):
script = self.script
output = io.StringIO()
import sys
oout = sys.stdout
sys.stdout = output
script.execute(test=False, bootstrap=False)
if script.execution_error:
script.display_state = 'failed'
self.exception_trace = script.exception_trace
else:
script.display_state = 'completed'
self.exception_trace = ''
self.output = output.getvalue()
output.close()
sys.stdout = oout
def _refresh_from_disk(self):
self.script.bootstrap()
def _execute_button_fired(self):
self._execute()
def traits_view(self):
error_grp = BorderVGroup(Readonly('object.script.execution_error'),
BorderVGroup(UItem('exception_trace', style='custom',
editor=TextEditor(read_only=True)),
label='Exception'),
label='Errors')
output_grp = VGroup(BorderVGroup(UItem('output', style='custom', editor=TextEditor(read_only=True)),
label='StdOut'),
error_grp,
label='Output')
main_grp = VGroup(HGroup(icon_button_editor('execute_button', 'start'),
CustomLabel('object.script.display_state')),
UItem('object.script.text', style='custom',
editor=PyScriptCodeEditor()),
label='Main')
v = View(Tabbed(main_grp, output_grp))
return v
# ============= EOF =============================================
| 36.84
| 108
| 0.595005
|
4a7088566e44fec7883be2e0f762dea7302a437d
| 2,168
|
py
|
Python
|
services/workshop/crapi/shop/tests.py
|
mathew-jose/crAPI
|
7b4b0a9ab32df65e2c11fb1cb1741845ef1fa882
|
[
"Apache-2.0"
] | null | null | null |
services/workshop/crapi/shop/tests.py
|
mathew-jose/crAPI
|
7b4b0a9ab32df65e2c11fb1cb1741845ef1fa882
|
[
"Apache-2.0"
] | null | null | null |
services/workshop/crapi/shop/tests.py
|
mathew-jose/crAPI
|
7b4b0a9ab32df65e2c11fb1cb1741845ef1fa882
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed under the Apache License, Version 2.0 (the “License”);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an “AS IS” BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
contains all the test cases related to shop management
"""
from django.test import TestCase, Client
from utils.jwt import get_jwt
from utils.sample_data import get_sample_mechanic_data
from user.models import User
class ProductTestCase(TestCase):
"""
contains all the test cases related to Products
Attributes:
client: Client object used for testing
mechanic: sample mechanic sign up request body
user: dummy mechanic object
auth_headers: Auth headers for dummy mechanic
"""
def setUp(self):
"""
stores a sample request body for mechanic signup
creates a dummy mechanic corresponding auth tokens
:return: None
"""
self.client = Client()
self.mechanic = get_sample_mechanic_data()
self.client.post('/api/mechanic/signup', self.mechanic, content_type="application/json")
self.user = User.objects.get(email=self.mechanic['email'])
jwt_token = get_jwt(self.user)
self.auth_headers = {'HTTP_AUTHORIZATION': 'Bearer ' + jwt_token}
def test_add_products(self):
"""
creates a dummy product with add_product api with an image
should get a valid response saying product created
:return: None
"""
product_details = {
'name': 'test_Seat',
'price': 10,
'image_url': 'https://4.imimg.com/data4/NI/WE/MY-19393581/ciaz-car-seat-cover-500x500.jpg',
}
res = self.client.post('/api/shop/products', product_details, **self.auth_headers)
self.assertEqual(res.status_code, 200)
| 36.133333
| 103
| 0.680812
|
b8c0333f4bfad8843030ee47e6c3570b0e7f1a36
| 3,975
|
py
|
Python
|
mmdnn/conversion/_script/extractModel.py
|
kmader/MMdnn
|
f62a33a7d6834680537693c7fdc7e90e1b2382ef
|
[
"MIT"
] | 3,442
|
2017-11-20T08:39:51.000Z
|
2019-05-06T10:51:19.000Z
|
mmdnn/conversion/_script/extractModel.py
|
ocjosen/MMdnn
|
aa7e5bbce58afcd925781d56cab76ce1b8f6f6a3
|
[
"MIT"
] | 430
|
2017-11-29T04:21:48.000Z
|
2019-05-06T05:37:37.000Z
|
mmdnn/conversion/_script/extractModel.py
|
ocjosen/MMdnn
|
aa7e5bbce58afcd925781d56cab76ce1b8f6f6a3
|
[
"MIT"
] | 683
|
2017-11-20T08:50:34.000Z
|
2019-05-04T04:25:14.000Z
|
#----------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#----------------------------------------------------------------------------------------------
from six import text_type as _text_type
def generate_label(predict, label_file, offset):
import os
if not os.path.exists(label_file):
return predict
with open(label_file, 'r') as f:
labels = [l.rstrip() for l in f]
ret = []
for i, j in predict:
ret.append((labels[i - offset], i, j))
return ret
def extract_model(args):
if args.framework == 'caffe':
from mmdnn.conversion.examples.caffe.extractor import caffe_extractor
extractor = caffe_extractor()
elif args.framework == 'keras':
from mmdnn.conversion.examples.keras.extractor import keras_extractor
extractor = keras_extractor()
elif args.framework == 'tensorflow' or args.framework == 'tf':
from mmdnn.conversion.examples.tensorflow.extractor import tensorflow_extractor
extractor = tensorflow_extractor()
elif args.framework == 'mxnet':
from mmdnn.conversion.examples.mxnet.extractor import mxnet_extractor
extractor = mxnet_extractor()
elif args.framework == 'cntk':
from mmdnn.conversion.examples.cntk.extractor import cntk_extractor
extractor = cntk_extractor()
elif args.framework == 'pytorch':
from mmdnn.conversion.examples.pytorch.extractor import pytorch_extractor
extractor = pytorch_extractor()
elif args.framework == 'darknet':
from mmdnn.conversion.examples.darknet.extractor import darknet_extractor
extractor = darknet_extractor()
elif args.framework == 'coreml':
from mmdnn.conversion.examples.coreml.extractor import coreml_extractor
extractor = coreml_extractor()
else:
raise ValueError("Unknown framework [{}].".format(args.framework))
files = extractor.download(args.network, args.path)
if files and args.image:
predict = extractor.inference(args.network, files, args.path, args.image)
if type(predict) == list:
print(predict)
else:
if predict.ndim == 1:
if predict.shape[0] == 1001:
offset = 1
else:
offset = 0
top_indices = predict.argsort()[-5:][::-1]
predict = [(i, predict[i]) for i in top_indices]
predict = generate_label(predict, args.label, offset)
for line in predict:
print (line)
else:
print (predict.shape)
print (predict)
def _main():
import argparse
parser = argparse.ArgumentParser(description='Extract pre-trained models for frameworks.')
parser.add_argument(
'--framework', '-f',
type=_text_type,
required=True,
choices=["caffe", "cntk", "mxnet", "keras", "tensorflow", 'tf', 'pytorch', 'darknet', 'coreml'],
help="Framework name")
parser.add_argument(
'--network', '-n',
type=_text_type,
default=None,
help='Path to the model network file of the external tool (e.g caffe prototxt, keras json')
parser.add_argument(
'-i', '--image',
type=_text_type, help='Test Image Path')
parser.add_argument(
'--path', '-p', '-o',
type=_text_type,
default='./',
help='Path to save the pre-trained model files (e.g keras h5)')
parser.add_argument(
'-l', '--label',
type=_text_type,
default='mmdnn/conversion/examples/data/imagenet_1000.txt',
help='Path of label.')
args = parser.parse_args()
extract_model(args)
if __name__ == '__main__':
_main()
| 31.299213
| 104
| 0.589937
|
d957b7a3ee5017257ab48e877537efe17e83c25c
| 2,224
|
py
|
Python
|
tests/test_distance.py
|
dyuri/repacolors
|
4556efeb262529dde4586dad78ac7ff64d4dedf5
|
[
"MIT"
] | 1
|
2020-02-29T17:05:06.000Z
|
2020-02-29T17:05:06.000Z
|
tests/test_distance.py
|
dyuri/repacolors
|
4556efeb262529dde4586dad78ac7ff64d4dedf5
|
[
"MIT"
] | null | null | null |
tests/test_distance.py
|
dyuri/repacolors
|
4556efeb262529dde4586dad78ac7ff64d4dedf5
|
[
"MIT"
] | null | null | null |
from repacolors.distance import *
import random
def eq(rgb1, rgb2):
h1 = rgb2hex(rgb1, True)
h2 = rgb2hex(rgb2, True)
return h1 == h2
def test_distance_same():
for _ in range(100):
c = RGBTuple(random.random(), random.random(), random.random())
assert distance(c, c) == 0
def test_distance_not_same():
for _ in range(100):
c1 = RGBTuple(random.random(), random.random(), random.random())
c2 = RGBTuple(random.random(), random.random(), random.random())
if c1 != c2:
assert distance(c1, c2) > 0
def test_distance_triangle():
for _ in range(100):
c1 = RGBTuple(random.random(), random.random(), random.random())
c2 = RGBTuple(random.random(), random.random(), random.random())
c3 = RGBTuple(random.random(), random.random(), random.random())
d12 = distance(c1, c2)
d23 = distance(c2, c3)
d13 = distance(c1, c3)
assert int((d12 + d23) * 1000) >= int(d13 * 1000)
def test_distance_cie94_same():
for _ in range(100):
c = LabTuple(100 * random.random(), random.random() - .5, random.random() - .5)
assert distance_cie94(c, c) == 0
def test_distance_cie94_not_same():
for _ in range(100):
c1 = LabTuple(100 * random.random(), random.random() - .5, random.random() - .5)
c2 = LabTuple(100 * random.random(), random.random() - .5, random.random() - .5)
if c1 != c2:
assert distance_cie94(c1, c2) > 0
def test_distance_cie94_triangle():
for _ in range(100):
c1 = LabTuple(100 * random.random(), random.random() - .5, random.random() - .5)
c2 = LabTuple(100 * random.random(), random.random() - .5, random.random() - .5)
c3 = LabTuple(100 * random.random(), random.random() - .5, random.random() - .5)
d12 = distance_cie94(c1, c2)
d23 = distance_cie94(c2, c3)
d13 = distance_cie94(c1, c3)
assert int((d12 + d23) * 1000) >= int(d13 * 1000)
def test_distance_hue():
assert .1999 < distance_hue(.1, .3) < .20001
assert -.1999 > distance_hue(.3, .1) > -.20001
assert -.1999 > distance_hue(.1, .9) > -.20001
assert .1999 < distance_hue(.9, .1) < .20001
| 32.231884
| 88
| 0.593076
|
4e5a27ce62156889e0e07c8f37e5eab32230fd24
| 26
|
py
|
Python
|
windmill/utils/__init__.py
|
bhavaniravi/windmill
|
0bae5c34652d8366f6fff08ff7879d24a76c91b5
|
[
"Apache-2.0"
] | 1
|
2020-05-06T18:53:40.000Z
|
2020-05-06T18:53:40.000Z
|
windmill/utils/__init__.py
|
bhavaniravi/windmill
|
0bae5c34652d8366f6fff08ff7879d24a76c91b5
|
[
"Apache-2.0"
] | null | null | null |
windmill/utils/__init__.py
|
bhavaniravi/windmill
|
0bae5c34652d8366f6fff08ff7879d24a76c91b5
|
[
"Apache-2.0"
] | null | null | null |
from . import class_parser
| 26
| 26
| 0.846154
|
6a3eb176fa43002bdd9aec3a52d1c29b3d809f51
| 44,126
|
py
|
Python
|
src/MatchingProcessor_old.py
|
Charamba/Cross-Ratio-Arrays-Shape-Descriptor
|
b252814f54bb11c2519a1bd4d0be3f524faba901
|
[
"Unlicense"
] | 2
|
2021-09-05T15:50:02.000Z
|
2022-01-05T03:10:50.000Z
|
src/MatchingProcessor_old.py
|
Charamba/Cross-Ratio-Arrays-Shape-Descriptor
|
b252814f54bb11c2519a1bd4d0be3f524faba901
|
[
"Unlicense"
] | null | null | null |
src/MatchingProcessor_old.py
|
Charamba/Cross-Ratio-Arrays-Shape-Descriptor
|
b252814f54bb11c2519a1bd4d0be3f524faba901
|
[
"Unlicense"
] | null | null | null |
from ShapeDescriptor import *
from CrossFeature import *
from DTW import *
from DistanceMatchingMatrix import *
from Pearson import *
import itertools
PEARSON_THRESHOLD = 0.0
CR5_DISTANCE_PERCENTUAL_TOL = 0.1
CR5_MATCHING_TOL = 0.65
def removeDuplicityVerticesMatching(verticesPairs, distances):
verticesPairsDict = {}
for i, vPair in enumerate(verticesPairs):
(v1, v2) = vPair
new_d = distances[i]
if v2 in verticesPairsDict:
(old_v1, old_v2, old_d) = verticesPairsDict[v2]
if new_d < old_d:
verticesPairsDict[v2] = (v1, v2, new_d)
else:
verticesPairsDict[v2] = (v1, v2, new_d)
verticeValues = [(v[0], v[1], v[2]) for v in verticesPairsDict.values()]
verticeValues = sorted([(v1, v2, v3) for (v1, v2, v3) in verticeValues], key=lambda t: t[0])
verticePairs = [(v[0], v[1]) for v in verticeValues]
verticeDistances = [(v[2]) for v in verticeValues]
return (verticePairs, verticeDistances)
def removeDuplicityRaysMatching(templateRays):
raysPairsDict = {}
for ray in templateRays:
(s1, s2) = (ray.s, ray.bestMatchingRayPair.s)
new_d = ray.bestMatchingDistance
if s2 in raysPairsDict:
old_ray = raysPairsDict[s2]
old_d = old_ray.bestMatchingDistance
if new_d < old_d:
raysPairsDict[s2] = ray
else:
raysPairsDict[s2] = ray
# verticeValues = [(v[0], v[1], v[2]) for r in raysPairsDict.values()]
# verticeValues = sorted([(v1, v2, v3) for (v1, v2, v3) in verticeValues], key=lambda t: t[0])
# verticePairs = [(v[0], v[1]) for v in verticeValues]
# verticeDistances = [(v[2]) for v in verticeValues]
# return (verticePairs, verticeDistances)
return raysPairsDict.values()
def vector_metric(sigma, a, r):
x = 1-a
y = 1-r
return sigma*math.sqrt(x*x + y*y)
def vector_metric_simetric(sigma, a, b):
x = 1-a
y = 1-b
return sigma*math.sqrt(x*x + y*y)/math.sqrt(2)
def vector_metric_assimetric(sigma, a, b, r):
x = 1-a
y = 1-b
z = 1-r
return sigma*math.sqrt(x*x + y*y + z*z)/math.sqrt(3)
def atenuation_function(mi, rho):
return mi*math.exp(-rho)
def gravity_distance(global_distance, rho, M, m, n):
d = global_distance
d2 = d*d
return d2/((rho*(n/M)*(n/m)))/(n*n)
class MatchingProcessor:
def __init__(self, templateDescriptor, testDescriptor):
self.templateDescriptor = templateDescriptor
self.testDescriptor = testDescriptor
def compareByPencils(self, symetric_shape=False):
#1 Percorrer todos os pencils da imagem template
#2 Comparar pencils
#3 Escolher o que possui maior percentual de matches
distanceMatrix = []
spectrePairs = []
distanceValues = []
mTemplateRays = []
mTestRays = []
matching_result = False
totalRays = 0
distance = 0
matchingVerticesPairs = []
n = len(self.templateDescriptor.pencils)
m = len(self.testDescriptor.pencils)
nRays_template = 300 + n
nRays_test = 300 + m
#print("nRays_template = ", nRays_template)
#print("nRays_test = ", nRays_test)
nRays_Max = max(nRays_template, nRays_test)
for template_idx, pencil in enumerate(self.templateDescriptor.pencils):
(test_idx, distance, templateRays, testRays) = self.findBestMatchPencil(pencil, self.testDescriptor.pencils)
if distance != float('inf'):
matchingVerticesPairs.append((template_idx, test_idx))
distanceValues.append(distance)
mTemplateRays += templateRays
mTestRays += testRays
(matchingVerticesPairs, distanceValues) = removeDuplicityVerticesMatching(matchingVerticesPairs, distanceValues)
#print("vertices: ", matchingVerticesPairs)
#print("distances: ", distanceValues)
matchingVerticesPairsPoints = []
distanceObject = sum(distanceValues) # Symetric Shape
cr5dist = float('inf')
cr5distance_tol = 0
cr5MatchingPercent = 0
pCoef = 0
new_pCoef = 0
Distance = float('inf') # DISTANCE MAX VALUE
#print("distance obj: ", distanceObject)
if len(distanceValues) != 0:
#Distance = distanceObject/len(distanceValues)
#print("average(dist): ", distanceObject/(len(distanceValues)))
#print('Matching vertices Pairs: ', len(matchingVerticesPairs))9
if len(matchingVerticesPairs) > 1:
#print('entrou')
if symetric_shape:
(pCoef, inliersTestIndices) = self.calcPearsonCoefficient(matchingVerticesPairs, removeOutLiers_flag=False)
newMatchingVerticesPairs = []
for (ti, qi) in matchingVerticesPairs:
if qi in inliersTestIndices:
t_vertex = self.templateDescriptor.hullVertices[ti]
q_vertex = self.testDescriptor.hullVertices[qi]
matchingVerticesPairsPoints.append((t_vertex, q_vertex))
newMatchingVerticesPairs = matchingVerticesPairs
a = len(newMatchingVerticesPairs)/n
b = len(newMatchingVerticesPairs)/m
r = 1.0
sigma = distanceObject/len(distanceValues)
print("a = ", a)
print("b = ", b)
print("sigma = ", sigma)
Distance = vector_metric_simetric(sigma, a, b)
#print("vector_metric_simetric = ", Distance)
if a > 0:
matching_result = True
# if abs(pCoef) >= PEARSON_THRESHOLD or symetric_shape:
# templateVertices = self.templateDescriptor.hullVertices
# testVertices = self.testDescriptor.hullVertices
if not(symetric_shape):
templateVertices = self.templateDescriptor.hullVertices
testVertices = self.testDescriptor.hullVertices
(pCoef, inliersTestIndices) = self.calcPearsonCoefficient(matchingVerticesPairs, removeOutLiers_flag=False)
matchingVerticesPairsPoints, newMatchingVerticesPairs = self.compareCrossRatioVertices_combinations(templateVertices, testVertices, matchingVerticesPairs, inliersTestIndices)
print("old_p_coef = ", pCoef)
if newMatchingVerticesPairs:
(new_pCoef, new_inliersTestIndices) = self.calcPearsonCoefficient(newMatchingVerticesPairs, removeOutLiers_flag=True)
print("NEW PEARSON COEFFICIENT: ", new_pCoef)
#---- Calculando novos valores de distancia DTW entre espectros dos vertices que sobraram
oldMatchingVerticesPairs_oldValues = zip(matchingVerticesPairs, distanceValues)
new_values = []
for oldPair, oldValue in zip(matchingVerticesPairs, distanceValues):
if oldPair in newMatchingVerticesPairs:
new_values.append(oldValue)
new_dist_obj = sum(new_values)
#print("NEW Distance Object:", new_dist_obj)
new_average = new_dist_obj/len(new_values)
vertexes_matches = len(new_values)
#print("NEW Average Dist:", new_average)
Distance = atenuation_function(new_average, new_pCoef)
#gDist = gravity_distance(new_average, new_pCoef, n, m, vertexes_matches)
#print("gravit_distance = ",gDist)
# ------------------
sigma = new_average
a = len(new_values)/n
b = len(new_values)/m
r = new_pCoef
print("sigma = ", sigma)
print("a = ", a)
print("b = ", b)
print("r = ", r)
Distance = vector_metric_assimetric(sigma, a, b, r)
#print("vector_metric_assimetric = ", Distance)
#gravit_distance_force = (new_pCoef*(float(vertexes_matches)/n)*(float(vertexes_matches)/m))/(new_average*new_average)
#print("gravit_distance = ", 1.0/gravit_distance_force)
# mi = new_average/nRays_Max
# print("mi = ", mi)
# lamb = 0.7
# global_average_distance = lamb*(1.0-new_pCoef) + (1.0 - lamb)*mi
# print("global_distance: ", global_average_distance)
# Distance = global_average_distance
if matchingVerticesPairsPoints:
matching_result = True
return (matching_result, new_pCoef, mTemplateRays, mTestRays, matchingVerticesPairsPoints, Distance)
# def compareByPencils_old(self):
# #1 Percorrer todos os pencils da imagem template
# #2 Comparar pencils
# #3 Escolher o que possui maior percentual de matches
# distanceMatrix = []
# spectrePairs = []
# distanceValues = []
# mTemplateRays = []
# mTestRays = []
# # pencil_template = self.templateDescriptor.pencils[0]
# # pencil_test = self.testDescriptor.pencils[0]
# matching_result = False
# totalRays = 0
# distance = 0
# matchingVerticesPairs = []
# n = len(self.templateDescriptor.pencils)
# m = len(self.testDescriptor.pencils)
# for template_idx, pencil in enumerate(self.templateDescriptor.pencils):
# (test_idx, distance, templateRays, testRays) = self.findBestMatchPencil(pencil, self.testDescriptor.pencils)
# if distance != float('inf'):
# matchingVerticesPairs.append((template_idx, test_idx))
# distanceValues.append(distance)
# mTemplateRays += templateRays
# mTestRays += testRays
# # print("### ANTES")
# # print("vertices: ", matchingVerticesPairs)
# # print("distances: ", distanceValues)
# # print("### DEPOIS")
# (matchingVerticesPairs, distanceValues) = removeDuplicityVerticesMatching(matchingVerticesPairs, distanceValues)
# print("vertices: ", matchingVerticesPairs)
# print("distances: ", distanceValues)
# matchingVerticesPairsPoints = []
# distanceObject = sum(distanceValues)
# cr5dist = float('inf')
# cr5distance_tol = 0
# cr5MatchingPercent = 0
# pCoef = 0
# print("distance obj: ", distanceObject)
# if len(distanceValues) != 0:
# print("average(dist): ", distanceObject/(len(distanceValues)))
# if len(matchingVerticesPairs) > 1:
# (pCoef, inliersTestIndices) = self.calcPearsonCoefficient(matchingVerticesPairs)
# for (ti, qi) in matchingVerticesPairs:
# if qi in inliersTestIndices:
# t_vertex = self.templateDescriptor.hullVertices[ti]
# q_vertex = self.testDescriptor.hullVertices[qi]
# matchingVerticesPairsPoints.append((t_vertex, q_vertex))
# if abs(pCoef) >= PEARSON_THRESHOLD:
# templateVertices = self.templateDescriptor.hullVertices
# testVertices = self.testDescriptor.hullVertices
# cr5dist, cr5distance_tol, cr5vectorTemplate, cr5vectorTest = self.compareCrossRatioVertices(templateVertices, testVertices, matchingVerticesPairs, inliersTestIndices)
# cr5dist_, cr5distance_tol_, cr5vectorTemplate_, cr5vectorTest_ = self.compareCrossRatioVertices_(templateVertices, testVertices, matchingVerticesPairs, inliersTestIndices)
# cr5dist_2, cr5distance_tol_2, cr5vectorTemplate_2, cr5vectorTest_2 = self.compareCrossRatioVertices_2(templateVertices, testVertices, matchingVerticesPairs, distanceValues, inliersTestIndices)
# cr5dist_c, cr5distance_tol_c, cr5vectorTemplate_c, cr5vectorTest_c, matchingVerticesPairsPoints = self.compareCrossRatioVertices_combinations(templateVertices, testVertices, matchingVerticesPairs, inliersTestIndices)
# cr5MatchingPercent = compareCellPerCell(cr5vectorTemplate, cr5vectorTest, CR5_DISTANCE_PERCENTUAL_TOL)
# cr5MatchingPercent_ = compareCellPerCell(cr5vectorTemplate_, cr5vectorTest_, CR5_DISTANCE_PERCENTUAL_TOL)
# cr5MatchingPercent_2 = compareCellPerCell(cr5vectorTemplate_2, cr5vectorTest_2, CR5_DISTANCE_PERCENTUAL_TOL)
# cr5MatchingPercent_c = compareCellPerCell(cr5vectorTemplate_c, cr5vectorTest_c, CR5_DISTANCE_PERCENTUAL_TOL)
# print("cr5MatchingPercent = ", cr5MatchingPercent)
# print("cr5MatchingPercent_ = ", cr5MatchingPercent_)
# print("cr5MatchingPercent_2 = ", cr5MatchingPercent_2)
# print("cr5MatchingPercent_c = ", cr5MatchingPercent_c)
# if cr5MatchingPercent_ > cr5MatchingPercent:
# (cr5dist, cr5distance_tol, cr5vectorTemplate, cr5vectorTest) = (cr5dist_, cr5distance_tol_, cr5vectorTemplate_, cr5vectorTest_)
# cr5MatchingPercent = cr5MatchingPercent_
# # print("## PEARSON COEFFICIENT = ", pCoef)
# # print("## FIVE CROSS RATIO DISTANCE = ", dist)
# # print("## CrossRatio5 Matching Percent = ", cr5MatchingPercent)
# if cr5dist < cr5distance_tol or cr5MatchingPercent > CR5_MATCHING_TOL:
# matching_result = True
# return (matching_result, pCoef, cr5dist, cr5distance_tol, cr5MatchingPercent, mTemplateRays, mTestRays, matchingVerticesPairsPoints)
def calcPearsonCoefficient(self, verticesPairs, removeOutLiers_flag=False):
#Y = getVerticesIndices(verticesPairs)
(x0, y0) = verticesPairs[0]
Y = [y for (x, y) in verticesPairs]
Y = shifftSignalToMinValue(Y)
#print("Y: ", Y)
# X = range(0, len(Y))
# ----- remove outliers
points = []
for i, y in enumerate(Y):
points.append(R2_Point(i, y))
inliers = points
if removeOutLiers_flag:
inliers = removeOutLiers(points) # retirar ou continuar com isso?
newY = []
X = []
pCoef = 0
if inliers: # by Ransac
newY = [p.y for p in inliers]
#newY = Y #<--- parei aqui
X = [x for (x, y) in verticesPairs if y in newY]#list(range(0, len(newY)))
#print("tau: ", X)
#print("Q: ", newY)
minX = min(X)
pCoef = abs(pearsonCoefficient(X, newY))
return (pCoef, newY)
else: # by median Filter (caso Ransac falhe!)
X = [x for (x, y) in verticesPairs if y in Y]#list(range(0, len(Y)))
minX = min(X)
#print("tau: ", X)
#print("Q: ", Y)
pCoef = abs(pearsonCoefficient(X, Y))
return (pCoef, Y)
newY1 = median1DFilter(Y, growing=True)
X = [x for (x, y) in verticesPairs if y in newY1]#list(range(0, len(newY1)))
minX = min(X)
pCoef1 = abs(pearsonCoefficient(X, newY1))
newY2 = median1DFilter(Y, growing=False)
X = [x for (x, y) in verticesPairs if y in newY2]#list(range(0, len(newY2)))
minX = min(X)
pCoef2 = abs(pearsonCoefficient(X, newY2))
if pCoef1 > pCoef2:
pCoef = pCoef1
newY = newY1
else:
pCoef = pCoef2
newY = newY2
#------------------
if len(newY) == 0:
newY = Y
#X = list(range(0, len(newY)))
#print("newY: ", newY)
#print("pCoef = ", pCoef)
return (pCoef, newY)
def compareCrossRatioVertices_2(self, templateVertices, testVertices, verticePairs, distanceValues, inliersTestIndices=[]):
#Y = getVerticesIndices(verticesPairs)
Y = [y for (x, y) in verticePairs] # get test vertice indices
if inliersTestIndices:
Y = inliersTestIndices and Y
Y = Y and inliersTestIndices
Y = shifftSignalToMinValue(Y)
W = Y
#W = removePeaksAndDepressionsValues(Y)
(newPairs, newDistances) = filteredVerticePairs(W, verticePairs, distanceValues)
pairs_distances = sorted(list(zip(newPairs, newDistances)), key=lambda x:x[1])
newPairs = [p for (p,d) in pairs_distances]
newDistances = [d for (p,d) in pairs_distances]
print("newDistances = ", newDistances)
newTemplateVertices = [templateVertices[x] for (x, y) in newPairs]
newTestVertices = [testVertices[y] for (x, y) in newPairs]
cr5vectorTemplate = invariant5CrossRatioFilter(newTemplateVertices)
cr5vectorTest = invariant5CrossRatioFilter(newTestVertices)
#print("cr5vectorTemplate: ", cr5vectorTemplate)
#print("cr5vectorTest: ", cr5vectorTest)
if len(cr5vectorTemplate) != 0:
distance = calcDistanceVector(cr5vectorTemplate, cr5vectorTest)
else:
distance = float('Inf')
distance_tol = calcDistanceVector(cr5vectorTemplate, [0]*len(cr5vectorTemplate))*CR5_DISTANCE_PERCENTUAL_TOL
return distance, distance_tol, cr5vectorTemplate, cr5vectorTest
def compareCrossRatioVertices_(self, templateVertices, testVertices, verticePairs, inliersTestIndices=[]):
#Y = getVerticesIndices(verticesPairs)
Y = [y for (x, y) in verticePairs] # get test vertice indices
if inliersTestIndices:
Y = inliersTestIndices and Y
Y = Y and inliersTestIndices
Y = shifftSignalToMinValue(Y)
W = Y
#W = removePeaksAndDepressionsValues(Y)
newPairs = filteredVerticePairs(W, verticePairs)
newTemplateVertices = [templateVertices[x] for (x, y) in newPairs]
newTestVertices = [testVertices[y] for (x, y) in newPairs]
cr5vectorTemplate = invariant5CrossRatioFilter(newTemplateVertices)
cr5vectorTest = invariant5CrossRatioFilter(newTestVertices)
#print("cr5vectorTemplate: ", cr5vectorTemplate)
#print("cr5vectorTest: ", cr5vectorTest)
if len(cr5vectorTemplate) != 0:
distance = calcDistanceVector(cr5vectorTemplate, cr5vectorTest)
else:
distance = float('Inf')
distance_tol = calcDistanceVector(cr5vectorTemplate, [0]*len(cr5vectorTemplate))*CR5_DISTANCE_PERCENTUAL_TOL
return distance, distance_tol, cr5vectorTemplate, cr5vectorTest
def compareCrossRatioVertices_combinations(self, templateVertices, testVertices, verticeIndicesPairs, inliersTestIndices=[]):
Y = [y for (x, y) in verticeIndicesPairs] # get test vertice indices
if inliersTestIndices:
Y = inliersTestIndices and Y
Y = Y and inliersTestIndices
Y = shifftSignalToMinValue(Y)
W = Y
newIndxsPairs = filteredVerticePairs(W, verticeIndicesPairs)
T = templateVertices
Q = testVertices
combinations_of_pairs = itertools.combinations(newIndxsPairs, 5)
correctPairsIndices = []
wrongPairsIndices = []
# buscando a primeira combinação de pontos confiáveis
for _5pairs in combinations_of_pairs:
((a1, a2), (b1, b2), (c1, c2), (d1, d2), (e1, e2)) = _5pairs
val1 = crossRatio5(T[a1], T[b1], T[c1], T[d1], T[e1])
val2 = crossRatio5(Q[a2], Q[b2], Q[c2], Q[d2], Q[e2])
if val1 >= 1E-4 and val2 >= 1E-4:
if abs(val1-val2) <= val1*CR5_DISTANCE_PERCENTUAL_TOL:
correctPairsIndices = [(a1,a2), (b1,b2), (c1,c2), (d1,d2), (e1,e2)]
for (i, j) in correctPairsIndices:
idx = newIndxsPairs.index((i, j))
del newIndxsPairs[idx]
break
totalPairs = len(newIndxsPairs)
if newIndxsPairs == []:
wrongPairsIndices = newIndxsPairs
while not(newIndxsPairs == []):#len(correctPairsIndices) + len(wrongPairsIndices) < totalPairs:
(x1, x2) = newIndxsPairs[0]
isWrong = True
for comb in itertools.combinations(correctPairsIndices, 4):
((a1, a2), (b1, b2), (c1, c2), (d1, d2)) = comb
val1 = crossRatio5(T[a1], T[b1], T[c1], T[d1], T[x1])
val2 = crossRatio5(Q[a2], Q[b2], Q[c2], Q[d2], Q[x2])
#if not(val1 == 0 or val2 == 0):
#if val1 >= 1E-4 and val2 >= 1E-4:
if abs(val1-val2) <= val1*CR5_DISTANCE_PERCENTUAL_TOL:
correctPairsIndices.append((x1, x2))
isWrong = False
break
if isWrong:
wrongPairsIndices.append((x1, x2))
del newIndxsPairs[0]
n_correct = len(correctPairsIndices)
n_wrong = len(wrongPairsIndices)
#print("len(correctPairsIndices): ", n_correct)
#print("len(wrongPairsIndices): ", n_wrong)
#print("# -> percentual vertices inliers: ", n_correct/(n_correct + n_wrong))
#correctPairsIndices += wrongPairsIndices # apagar depois
newTemplateVertices = [templateVertices[x] for (x, y) in correctPairsIndices]
newTestVertices = [ testVertices[y] for (x, y) in correctPairsIndices]
matchingVerticesPairsPoints = list(zip(newTemplateVertices, newTestVertices))
#print("correctPairsIndices = ", correctPairsIndices)
# Corrigindo a ordem do correctPairsIndices por eventuais deslocamentos (gambiarra)
correctPairsIndices = [(x, y) for (x, y) in verticeIndicesPairs if (x, y) in correctPairsIndices]
return matchingVerticesPairsPoints, correctPairsIndices
def compareCrossRatioVertices(self, templateVertices, testVertices, verticePairs, inliersTestIndices=[]):
#Y = getVerticesIndices(verticesPairs)
Y = [y for (x, y) in verticePairs] # get test vertice indices
if inliersTestIndices:
Y = inliersTestIndices and Y
Y = Y and inliersTestIndices
Y = shifftSignalToMinValue(Y)
W = Y
#W = removePeaksAndDepressionsValues(Y)
newPairs = filteredVerticePairs(W, verticePairs)
Xindices = sorted([x for (x, y) in newPairs])
Yindices = [y for (x, y) in newPairs]
minIdx = Yindices.index(min(Yindices))
Yindices = sorted(Yindices[:minIdx]) + sorted(Yindices[minIdx:])
newTemplateVertices = [templateVertices[x] for x in Xindices]
newTestVertices = [testVertices[y] for y in Yindices]
cr5vectorTemplate = invariant5CrossRatioFilter(newTemplateVertices)
cr5vectorTest = invariant5CrossRatioFilter(newTestVertices)
#print("cr5vectorTemplate: ", cr5vectorTemplate)
#print("cr5vectorTest: ", cr5vectorTest)
if len(cr5vectorTemplate) != 0:
distance = calcDistanceVector(cr5vectorTemplate, cr5vectorTest)
else:
distance = float('Inf')
distance_tol = calcDistanceVector(cr5vectorTemplate, [0]*len(cr5vectorTemplate))*CR5_DISTANCE_PERCENTUAL_TOL
return distance, distance_tol, cr5vectorTemplate, cr5vectorTest
def compareFanBeamExtremePointsCR5(self, templateRays):
testRays = [r.bestMatchingRayPair for r in templateRays]
templateV0 = templateRays[0].edgePoints[0]
testV0 = testRays[0].edgePoints[0]
templateExtremePoints = [r.edgePoints[-1] for r in templateRays]
testExtremePoints = [r.edgePoints[-1] for r in testRays]
cr5vectorTemplate = invariant5CrossRatioFilter(templateExtremePoints)
cr5vectorTest = invariant5CrossRatioFilter(testExtremePoints)
if len(cr5vectorTemplate) != 0:
distance = calcDistanceVector(cr5vectorTemplate, cr5vectorTest)
else:
distance = float('Inf')
return distance
def generateSimpleTopologySpectre(self, matchedRaysList):
spectre = []
if len(matchedRaysList) > 0:
matchedRaysList = list(set(matchedRaysList))
sortRays = sorted(matchedRaysList, key=lambda r: r.s)
spectre_idx = [r.s for r in sortRays]
for ray in sortRays:
spectre.append(ray.numberOfEdgePoints)
return spectre
def generateKeySpectres(self, templateRays, testRays):
templateSpectre = []
testSpectre = []
keyValue = 2
for i in range(len(templateRays)):
templateRays[i].key = keyValue
templateRays[i].bestMatchingRayPair.key = keyValue
templateSpectre.append(keyValue)
keyValue += 1
bestTestRayPairs = [templateRay.bestMatchingRayPair for templateRay in templateRays]
for testRay in testRays:
if testRay in bestTestRayPairs:
i = bestTestRayPairs.index(testRay)
testSpectre.append(bestTestRayPairs[i].key)
return (templateSpectre, testSpectre)
def generateTopologySpectre(self, matchedRaysList, groundValue=0, maxLengthVector=None):
spectre = []
if len(matchedRaysList) > 0:
matchedRaysList = list(set(matchedRaysList))
sortRays = sorted(matchedRaysList, key=lambda r: r.s)
spectre_idx = [r.s for r in sortRays]
for i in range(maxLengthVector):
if i in spectre_idx:
[ray] = [r for r in sortRays if r.s==i]
spectre.append(ray.numberOfEdgePoints)
else:
spectre.append(groundValue)
for ray in sortRays:
spectre.append(ray.numberOfEdgePoints)
return spectre
def generateTopologySpectre_deprecated2(self, matchedRaysList, groundValue=0, maxLengthVector=None):
maxGapPercent = 0.2
maxGapLen = maxLengthVector*maxGapPercent
spectre = [groundValue]*maxLengthVector
if len(matchedRaysList) > 0:
matchedRaysList = list(set(matchedRaysList))
sortRays = sorted(matchedRaysList, key=lambda r: r.s)
spectre_idx = [r.s for r in sortRays]
s_max = max(spectre_idx)
if maxLengthVector == None:
maxLengthVector = max(spectre_idx)
#print("spectre_idx: ", spectre_idx)
# sortRaysDict = {}
# for ray in sortRays:
# sortRaysDict[ray.s] = ray
#spectre = [groundValue]*maxLengthVector
# lastValue = 0
# for i in range(0, len(spectre)):
# if i in sortRaysDict:
# lastValue = sortRaysDict[i].numberOfEdgePoints
# spectre[i] = lastValue
for ray in sortRays:
#spectre[ray.s] = ray.numberOfEdgePoints
extremeCR = crossRatio(ray.edgePoints[0],ray.edgePoints[1], ray.edgePoints[2], ray.edgePoints[-1])
#spectre[ray.s] = sum(ray.crossRatioVector)#/extremeCR
spectre.append(ray.numberOfEdgePoints)
# retificação do sinal (preenchendo espaços com buracos)
for i in range(0, len(sortRays)):
if i+1 < len(sortRays):
ray1 = sortRays[i]
ray2 = sortRays[i+1]
s1 = ray1.s
s2 = ray2.s
if abs(s2 - s1) <= maxGapLen:
for s in range(s1+1, s2):
spectre[s] = spectre[s1] # repetindo valor
# lastValue = groundValue
# for s in range(0, s_max):
# if spectre[s] == groundValue:
# spectre[s] = lastValue
# else:
# lastValue = spectre[s]
#print(spectre)
return spectre
def findBestMatchPencil(self, pencil, otherPencils):
# return spectres
(best_distance, best_matchedTemplateRays, best_matchedTestRays) = (float('inf'), [], [])
bestIdx = 0
for i, other in enumerate(otherPencils):
#print("test pencil i = ", i)
(distance, matchedTemplateRays, matchedTestRays) = self.comparePencils(pencil, other)
#print("(i, d) = (%d, %3.2f)" %(i, distance))
if distance < best_distance:
#print("SWAP best distance!")
bestIdx = i
best_distance = distance
best_matchedTemplateRays = matchedTemplateRays
best_matchedTestRays = matchedTestRays
#print("best idx = ", bestIdx)
return (bestIdx, best_distance, best_matchedTemplateRays, best_matchedTestRays)
def comparePencils(self, pencil1, pencil2):
# Pegar raios do pencil 1
# Usar a topologia desses raios para retorna lista de raios com a mesma topologia
rays1 = pencil1.getValues()
for i in range(0, len(rays1)):
rays1[i].cleanMatchingVariables()
rays2 = pencil2.getValues()
totalRays1 = len(rays1)
totalRays2 = len(rays2)
totalRays = min(totalRays1, totalRays2)
# print("len1 = ", totalRays1)
# print("len2 = ", totalRays2)
totalTemplateRays = len(rays1)
templateMatchs = [0]*totalTemplateRays
matchedTemplateRays = []
matchedTestRays = []
#matchPairsRays = []
for i, templateRay in enumerate(rays1):
for testRay in pencil2.access(templateRay.numberOfEdgePoints):
#if templateRay.isMatch(testRay):
if templateRay.isMatching(testRay):
matchedTemplateRays.append(templateRay)
#matchedTestRays.append(testRay)
templateMatchs[i] = 1
#matchPairsRays.append((templateRay.s, testRay.s))
templateRaysIndices = []
testRaysIndices = []
matchedTemplateRays = removeDuplicityRaysMatching(matchedTemplateRays)
matchedTemplateRays = list(set(matchedTemplateRays))
rays = matchedTemplateRays
matchedTemplateRays = sorted(rays, key=lambda r: r.s)
#CR5_distance = self.compareFanBeamExtremePointsCR5(matchedTemplateRays) rempover
raysIndicePairs = []
distances = []
# Pearson
for mTemplateRay in matchedTemplateRays:
matchedTestRays.append(mTemplateRay.bestMatchingRayPair)
# (s1, s2) = (mTemplateRay.s, mTemplateRay.bestMatchingRayPair.s)
# templateRaysIndices.append(s1)
# testRaysIndices.append(s2)
# # raysIndicePairs.append(s1,s2)
# # distances.append(mTemplateRay.bestMatchingDistance)
sumMatchedTemplateRays = sum(templateMatchs)
# # print("templateRaysIndices = ", templateRaysIndices)
# # print("testRaysIndices = ", testRaysIndices)
# p = pearsonCoefficient(templateRaysIndices, testRaysIndices)
#print(">>>> p = ", p)
if sumMatchedTemplateRays == 0:
return (float('inf'), matchedTemplateRays, matchedTestRays)
percentualMatch = 0
if totalTemplateRays != 0:
percentualMatch = sumMatchedTemplateRays/totalTemplateRays
mTemplateRays = list(set(matchedTemplateRays))
mTestRays = list(set(matchedTestRays))
# USANDO DESLOCAMENTO SIMPLIFICADO e DTW e Spectros de "rotulos"
templateSpectre = self.generateSimpleTopologySpectre(mTemplateRays)
testSpectre = self.generateSimpleTopologySpectre(mTestRays)
#(templateSpectre, testSpectre) = self.generateKeySpectres(mTemplateRays, mTestRays)
n = len(mTemplateRays)
costBinFunct = costFunctionBinary()
dtw = DTW(templateSpectre, testSpectre, costBinFunct)
n = len(mTemplateRays)
dtw_dist = dtw.distance()
error_distance = totalRays - n
error_distance += dtw_dist#sum([1 for a,b in zip(templateSpectre,testSpectre) if a==b])
distance_normalized = error_distance/n
# invertendo o espectro do query
testSpectreReversed = testSpectre
testSpectreReversed.reverse()
dtw = DTW(templateSpectre, testSpectreReversed, costBinFunct)
n = len(mTemplateRays)
dtw_dist = dtw.distance()
error_distance = totalRays - n
error_distance += dtw_dist#sum([1 for a,b in zip(templateSpectre,testSpectreReversed) if a==b])
distance_normalized_reversed = error_distance/n
distance_normalized = min(distance_normalized, distance_normalized_reversed)
# USANDO DTW e TOPOLOGY SPECTRUM
# templateSpectre = self.generateTopologySpectre(mTemplateRays, maxLengthVector=totalRays1)
# testSpectre = self.generateTopologySpectre(mTestRays, groundValue=1, maxLengthVector=totalRays2)
# costBinFunct = costFunctionBinary()
# dtw = DTW(templateSpectre, testSpectre, costBinFunct)
# n = len(mTemplateRays)
# distance_normalized = dtw.distance()
#print("dtw normalized = ", distance_normalized)
#print("CR5 distance = ", CR5_distance)
#return ((1-abs(p)), matchedTemplateRays, matchedTestRays)
#return (distance_normalized*(1-abs(p)), matchedTemplateRays, matchedTestRays)
return (distance_normalized, matchedTemplateRays, matchedTestRays)
def comparePencils_deprecated(self, pencil1, pencil2):
# Pegar raios do pencil 1
# Usar a topologia desses raios para retorna lista de raios com a mesma topologia
rays1 = pencil1.getValues()
rays2 = pencil2.getValues()
totalRays1 = len(rays1)
totalRays2 = len(rays2)
# print("len1 = ", totalRays1)
# print("len2 = ", totalRays2)
totalTemplateRays = len(rays1)
templateMatchs = [0]*totalTemplateRays
matchedTemplateRays = []
matchedTestRays = []
matchPairsRays = []
for i, templateRay in enumerate(rays1):
for testRay in pencil2.access(templateRay.numberOfEdgePoints):
if templateRay.isMatch(testRay):
matchedTemplateRays.append(templateRay)
matchedTestRays.append(testRay)
templateMatchs[i] = 1
#else:
sumMatchedTemplateRays = sum(templateMatchs)
if sumMatchedTemplateRays == 0:
return (float('inf'), matchedTemplateRays, matchedTestRays)
percentualMatch = 0
if totalTemplateRays != 0:
percentualMatch = sumMatchedTemplateRays/totalTemplateRays
mTemplateRays = list(set(matchedTemplateRays))
mTestRays = list(set(matchedTestRays))
templateSpectre = self.generateTopologySpectre(mTemplateRays, maxLengthVector=totalRays1)
testSpectre = self.generateTopologySpectre(mTestRays, groundValue=1, maxLengthVector=totalRays2)
costBinFunct = costFunctionBinary()
dtw = DTW(templateSpectre, testSpectre, costBinFunct)
#print("dtw.distance() = ", dtw.distance())
distance_normalized = dtw.distance()/len(mTemplateRays)
#print("dtw normalized = ", distance_normalized)
return (distance_normalized, matchedTemplateRays, matchedTestRays)
def compare(self, estimateVanishPoint=False):
matchedWhitePixels = self.templateDescriptor.whitePixelsImage
vanishPoints = []
matchedTemplateRays = []
matchedTestRays = []
badTemplRays = []
badTestRays = []
totalComp = 0
distanceError = 0
templateMatchs = [0]*len(self.templateDescriptor.raysTable.getValues())
#testMatchs = [0]*len(self.testDescriptor.rays)
matchByProjections = {}
for i, templateRay in enumerate(self.templateDescriptor.raysTable.getValues()):
mathcFlag = False
#for testRay in enumerate(self.testDescriptor.rays):
for testRay in self.testDescriptor.raysTable.access(templateRay.numberOfEdgePoints):
totalComp += 1
if templateRay.isMatch(testRay):
matchedTemplateRays.append(templateRay)
matchedTestRays.append(testRay)
for wp in templateRay.whitePixels:
matchedWhitePixels[wp] = 1
if estimateVanishPoint:
testRay.estimateVanishPoints(templateRay)
vanishPoints.append(testRay.getVanishPoint())
templateMatchs[i] = 1
#testMatchs[j] = 1
distanceError += calcDistanceVector(templateRay.crossRatioVector, testRay.crossRatioVector)
mathcFlag = True
# if testRay.numberOfEdgePoints % 2 == 0:
# if testRay.estimateVanishPoints(templateRay):
# vanishPoints.append(testRay.getVanishPoint())
# matchedTemplateRays.append(templateRay)
# matchedTestRays.append(testRay)
# else:
# badTemplRays.append(templateRay)
# badTestRays.append(testRay)
if mathcFlag:
theta = templateRay.theta
if matchByProjections.get(theta) is None:
matchByProjections[theta] = 1
else:
matchByProjections[theta] += 1
sumMatchedTemplateRays = sum(templateMatchs)
#sumMatchedTestRays = sum(testMatchs)
#print("matchedTemplateRays[0].s = ", matchedTemplateRays[0].s)
matchedTemplateRays = list(set(matchedTemplateRays))
sortRays = sorted(matchedTemplateRays, key=lambda r: r.s)
# SPECTRES
print("Matched Template Rays Topology")
for ray1 in sortRays:
print(ray1.numberOfEdgePoints)
print("Matched Template Rays")
for ray1 in sortRays:
print(sum(ray1.crossRatioVector))
# ---------------------------------------------------------------
matchedTestRays = list(set(matchedTestRays))
sortRays = sorted(matchedTestRays, key=lambda r: r.s)
print("Matched Test Rays Topology")
for ray2 in sortRays:
print(ray2.numberOfEdgePoints)
print("Matched Test Rays")
for ray2 in sortRays:
print(sum(ray2.crossRatioVector))
print("### MATCHING STATISTIC ###")
print("# Template Rays: ", len(self.templateDescriptor.raysTable.getValues()))
print("# Test Rays: ", len(self.testDescriptor.raysTable.getValues()))
print("# Comparations: ", totalComp)
print("# Matched Template Rays : ", sumMatchedTemplateRays)
#print("# Matched Test Rays : ", sumMatchedTestRays)
if len(self.templateDescriptor.raysTable.getValues()):
print("Percentual template match: %3.2f%%" %(sumMatchedTemplateRays*100/len(self.templateDescriptor.raysTable.getValues())))
# if len(self.testDescriptor.rays):
# print("Percentual test match: %3.2f%%" %(sumMatchedTestRays*100/len(self.testDescriptor.rays)))
print("Distance Error: ", distanceError)
print("---------------------------------")
matchedWhitePixelsValues = matchedWhitePixels.values()
totalWhitePixels = len(matchedWhitePixelsValues)
sumMatchedPixels = sum(matchedWhitePixelsValues)
print("White pixels in template image: ", totalWhitePixels)
print("White matched pixels in template image: ", sumMatchedPixels)
if totalWhitePixels > 0:
percentual = (sumMatchedPixels*100)/totalWhitePixels
print("Percentual matched pixels: <__ %3.2f %%__>" %(percentual))
print("Match by Projections: ")
# matchByProjections = {}
# for templRay in matchedTemplateRays:
# theta = templRay.theta
# if matchByProjections.get(theta) is None:
# matchByProjections[theta] = 1
# else:
# matchByProjections[theta] += 1
print("len(matchByProjections) = ", len(matchByProjections.items()))
print("(theta, percentual match)")
matchByProjections.items()
for (theta, count) in matchByProjections.items():
percentual = count*100/sumMatchedTemplateRays
print("(%f, %d) ---- %3.2f" %(theta, count, percentual))
return (matchedTemplateRays, matchedTestRays, badTemplRays, badTestRays, vanishPoints)
def compareByCrossFeatures(self):
matchedTemplateRays = []
matchedTestRays = []
# -----------------------
vanishPoints = []
matchedtemplateFeatures = []
matchedtestFeatures = []
badTemplRays = []
badtestFeatures = []
totalComp = 0
distanceError = 0
templateMatchsFeatures = [0]*len(self.templateDescriptor.crossFeatures)
testMatchsFeatures = [0]*len(self.testDescriptor.crossFeatures)
templateMatchsRaysCounter = {}
testMatchRaysCounter = {}
templateEdgePointsCounter = {}
templateJunctionPointsCounter = {}
for i, templateFeature in enumerate(self.templateDescriptor.crossFeatures):
mathcFlag = False
for j, testFeature in enumerate(self.testDescriptor.crossFeatures):
totalComp += 1
if templateFeature.isMatch(testFeature):
#if templateFeature.compareRays(testFeature):
matchedtemplateFeatures.append(templateFeature)
matchedtestFeatures.append(testFeature)
# ------------
matchedTemplateRays.append(templateFeature.ray1)
matchedTemplateRays.append(templateFeature.ray2)
matchedTestRays.append(testFeature.ray1)
matchedTestRays.append(testFeature.ray2)
(s1, t1) = templateFeature.ray1.getPolarCoordinate()
(s2, t2) = templateFeature.ray2.getPolarCoordinate()
templateMatchsRaysCounter[(s1, t1)] = 1
templateMatchsRaysCounter[(s2, t2)] = 1
templateEdgePointsCounter[(s1, t1)] = templateFeature.ray1.numberOfEdgePoints - 1
templateEdgePointsCounter[(s2, t2)] = templateFeature.ray2.numberOfEdgePoints - 1
if templateJunctionPointsCounter.get(s1, t1) is not None:
templateJunctionPointsCounter[(s1, t1)] = templateJunctionPointsCounter.get(s1, t1) + 1
else:
templateJunctionPointsCounter[(s1, t1)] = 1
if templateJunctionPointsCounter.get(s2, t2) is not None:
templateJunctionPointsCounter[(s2, t2)] = templateJunctionPointsCounter.get(s1, t1) + 1
else:
templateJunctionPointsCounter[(s2, t2)] = 1
templateMatchsFeatures[i] = 1
testMatchsFeatures[j] = 1
sumMatchedtemplateFeatures = sum(templateMatchsFeatures)
sumMatchedtestFeatures = sum(testMatchsFeatures)
print("### MATCHING STATISTIC ###")
print("* FEATURES:")
print("# Template Features: ", len(self.templateDescriptor.crossFeatures))
print("# Test Features: ", len(self.testDescriptor.crossFeatures))
print("# Comparations: ", totalComp)
print("# Matched Template Features : ", sumMatchedtemplateFeatures)
print("# Matched Test Features : ", sumMatchedtestFeatures)
if len(self.templateDescriptor.crossFeatures) > 0:
print("Percentual template match: < %3.2f%% >" %(sumMatchedtemplateFeatures*100/len(self.templateDescriptor.crossFeatures)))
if len(self.testDescriptor.crossFeatures) > 0:
print("Percentual test match: %3.2f%%" %(sumMatchedtestFeatures*100/len(self.testDescriptor.crossFeatures)))
print("---------------------------------")
print("* RAYS:")
nTemplateRaysMatch = sum(templateMatchsRaysCounter.values())
totalTemplateRays = len(self.templateDescriptor.rays)
print(" Matched Template Rays: ", nTemplateRaysMatch)
if totalTemplateRays > 0:
print("Percentual template ray match: < %3.2f%% >" %(nTemplateRaysMatch*100/totalTemplateRays))
print("---------------------------------")
print("* EDGE POINTS + JUNCTION POINTS:")
matchedEdgePoints = sum(templateEdgePointsCounter.values())
matchedJunctionPoints = sum(templateJunctionPointsCounter.values())
matchedPoints = matchedEdgePoints + matchedJunctionPoints
totalTemplatePoints = self.templateDescriptor.numberOfEdgePoints + self.templateDescriptor.numberOfJunctionPoints
print("Matched Edge points: ", matchedEdgePoints)
print("Matched Junction points: ", matchedJunctionPoints)
print("Matched Points: ", matchedPoints)
if totalTemplateRays > 0:
print("Percentual match edge points : %3.2f%%" %(matchedEdgePoints*100/self.templateDescriptor.numberOfEdgePoints))
if self.templateDescriptor.numberOfJunctionPoints > 0:
print("Percentual match junction points : %3.2f%%" %(matchedJunctionPoints*100/self.templateDescriptor.numberOfJunctionPoints))
print("Percentual match points : < %3.2f%% >" %(matchedPoints*100/totalTemplatePoints))
return (matchedTemplateRays, matchedTestRays, badTemplRays, badtestFeatures, vanishPoints)
def compareByTripleCrossFeatures(self):
matchedTemplateRays = []
matchedTestRays = []
# -----------------------
vanishPoints = []
matchedtemplateFeatures = []
matchedtestFeatures = []
badTemplRays = []
badtestFeatures = []
totalComp = 0
distanceError = 0
templateFeatures = self.templateDescriptor.tripleCrossFeaturesTable.getValues()
testFeatures = self.testDescriptor.tripleCrossFeaturesTable.getValues()
templateMatchsFeatures = [0]*len(templateFeatures)
testMatchsFeatures = [0]*len(testFeatures)
templateMatchsRaysCounter = {}
testMatchRaysCounter = {}
templateEdgePointsCounter = {}
templateJunctionPointsCounter = {}
for i, templateFeature in enumerate(templateFeatures):
mathcFlag = False
key = templateFeature.topoKey
for j, testFeature in enumerate(self.testDescriptor.tripleCrossFeaturesTable.access(key)):
totalComp += 1
if templateFeature.isMatch(testFeature):
# print("5-Points Cross Ratio:")
# print(templateFeature.fivePointsCrossRatioVector)
# print(testFeature.fivePointsCrossRatioVector)
#if templateFeature.compareRays(testFeature):
matchedtemplateFeatures.append(templateFeature)
matchedtestFeatures.append(testFeature)
# ------------
matchedTemplateRays.append(templateFeature.ray1)
matchedTemplateRays.append(templateFeature.ray2)
matchedTemplateRays.append(templateFeature.ray3)
matchedTestRays.append(testFeature.ray1)
matchedTestRays.append(testFeature.ray2)
matchedTestRays.append(testFeature.ray3)
(s1, t1) = templateFeature.ray1.getPolarCoordinate()
(s2, t2) = templateFeature.ray2.getPolarCoordinate()
(s3, t3) = templateFeature.ray3.getPolarCoordinate()
templateMatchsRaysCounter[(s1, t1)] = 1
templateMatchsRaysCounter[(s2, t2)] = 1
templateMatchsRaysCounter[(s3, t3)] = 1
templateEdgePointsCounter[(s1, t1)] = templateFeature.ray1.numberOfEdgePoints - 2
templateEdgePointsCounter[(s2, t2)] = templateFeature.ray2.numberOfEdgePoints - 2
templateEdgePointsCounter[(s3, t3)] = templateFeature.ray3.numberOfEdgePoints - 2
# if templateJunctionPointsCounter.get(s1, t1) is not None:
# templateJunctionPointsCounter[(s1, t1)] = templateJunctionPointsCounter.get(s1, t1) + 1
# else:
# templateJunctionPointsCounter[(s1, t1)] = 1
# if templateJunctionPointsCounter.get(s2, t2) is not None:
# templateJunctionPointsCounter[(s2, t2)] = templateJunctionPointsCounter.get(s1, t1) + 1
# else:
# templateJunctionPointsCounter[(s2, t2)] = 1
# if templateJunctionPointsCounter.get(s3, t3) is not None:
# templateJunctionPointsCounter[(s3, t3)] = templateJunctionPointsCounter.get(s1, t1) + 1
# else:
# templateJunctionPointsCounter[(s3, t3)] = 1
templateMatchsFeatures[i] = 1
testMatchsFeatures[j] = 1 # <==== ERRADO (corrigir)
sumMatchedtemplateFeatures = sum(templateMatchsFeatures)
sumMatchedtestFeatures = sum(testMatchsFeatures)
print("### MATCHING STATISTIC ###")
print("* TRIPLE CROSS FEATURES:")
print("# Template Features: ", len(templateFeatures))
print("# Test Features: ", len(testFeatures))
print("# Comparations: ", totalComp)
print("# Matched Template Features : ", sumMatchedtemplateFeatures)
print("# Matched Test Features : ", sumMatchedtestFeatures)
if len(templateFeatures) > 0:
print("Percentual template match: < %3.2f%% >" %(sumMatchedtemplateFeatures*100/len(templateFeatures)))
if len(testFeatures) > 0:
print("Percentual test match: %3.2f%%" %(sumMatchedtestFeatures*100/len(testFeatures)))
print("---------------------------------")
print("* RAYS:")
nTemplateRaysMatch = sum(templateMatchsRaysCounter.values())
totalTemplateRays = len(self.templateDescriptor.raysTable.getValues())
print(" Matched Template Rays: ", nTemplateRaysMatch)
if totalTemplateRays > 0:
print("Percentual template ray match: < %3.2f%% >" %(nTemplateRaysMatch*100/totalTemplateRays))
print("---------------------------------")
print("* EDGE POINTS + JUNCTION POINTS:")
matchedEdgePoints = sum(templateEdgePointsCounter.values())
#matchedJunctionPoints = sum(templateJunctionPointsCounter.values())
#matchedPoints = matchedEdgePoints + matchedJunctionPoints
#totalTemplatePoints = self.templateDescriptor.numberOfEdgePoints + self.templateDescriptor.numberOfJunctionPoints
print("Matched Edge points: ", matchedEdgePoints)
#print("Matched Junction points: ", matchedJunctionPoints)
#print("Matched Points: ", matchedPoints)
if totalTemplateRays > 0:
print("Percentual match edge points : %3.2f%%" %(matchedEdgePoints*100/self.templateDescriptor.numberOfEdgePoints))
# if self.templateDescriptor.numberOfJunctionPoints > 0:
# print("Percentual match junction points : %3.2f%%" %(matchedJunctionPoints*100/self.templateDescriptor.numberOfJunctionPoints))
#print("Percentual match points : < %3.2f%% >" %(matchedPoints*100/totalTemplatePoints))
return (matchedTemplateRays, matchedTestRays, badTemplRays, badtestFeatures, vanishPoints)
| 35.471061
| 223
| 0.722567
|
927dc85c0a971df3dd42578929f8e613388cbab9
| 24,577
|
py
|
Python
|
tabnet/tabnet.py
|
zhenglinghan/tf-TabNet
|
185455b1eb4810f5c98f7712eaca7c93cb769e9d
|
[
"MIT"
] | 2
|
2020-08-08T14:39:14.000Z
|
2020-09-12T11:08:56.000Z
|
tabnet/tabnet.py
|
zhenglinghan/tf-TabNet
|
185455b1eb4810f5c98f7712eaca7c93cb769e9d
|
[
"MIT"
] | null | null | null |
tabnet/tabnet.py
|
zhenglinghan/tf-TabNet
|
185455b1eb4810f5c98f7712eaca7c93cb769e9d
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
from tabnet.custom_objects import glu, sparsemax, GroupNormalization
class TransformBlock(tf.keras.Model):
def __init__(self, features,
norm_type,
momentum=0.9,
groups=2,
virtual_batch_size=None,
**kwargs):
super(TransformBlock, self).__init__(**kwargs)
self.features = features
self.norm_type = norm_type
self.momentum = momentum
self.groups = groups
self.virtual_batch_size = virtual_batch_size
self.transform = tf.keras.layers.Dense(self.features, use_bias=False)
if norm_type == 'batch':
self.bn = tf.keras.layers.BatchNormalization(axis=-1, momentum=momentum,
virtual_batch_size=virtual_batch_size)
else:
self.bn = GroupNormalization(axis=-1, groups=self.groups)
def call(self, inputs, training=None):
x = self.transform(inputs)
x = self.bn(x, training=training)
return x
class TabNet(tf.keras.Model):
def __init__(self, feature_columns,
feature_dim=64,
output_dim=64,
num_features=None,
num_decision_steps=5,
relaxation_factor=1.5,
sparsity_coefficient=1e-5,
norm_type='group',
batch_momentum=0.98,
virtual_batch_size=None,
num_groups=2,
epsilon=1e-5,
**kwargs):
"""
Tensorflow 2.0 implementation of [TabNet: Attentive Interpretable Tabular Learning](https://arxiv.org/abs/1908.07442)
# Hyper Parameter Tuning (Excerpt from the paper)
We consider datasets ranging from ∼10K to ∼10M training points, with varying degrees of fitting
difficulty. TabNet obtains high performance for all with a few general principles on hyperparameter
selection:
- Most datasets yield the best results for Nsteps ∈ [3, 10]. Typically, larger datasets and
more complex tasks require a larger Nsteps. A very high value of Nsteps may suffer from
overfitting and yield poor generalization.
- Adjustment of the values of Nd and Na is the most efficient way of obtaining a trade-off
between performance and complexity. Nd = Na is a reasonable choice for most datasets. A
very high value of Nd and Na may suffer from overfitting and yield poor generalization.
- An optimal choice of γ can have a major role on the overall performance. Typically a larger
Nsteps value favors for a larger γ.
- A large batch size is beneficial for performance - if the memory constraints permit, as large
as 1-10 % of the total training dataset size is suggested. The virtual batch size is typically
much smaller than the batch size.
- Initially large learning rate is important, which should be gradually decayed until convergence.
Args:
feature_columns: The Tensorflow feature columns for the dataset.
feature_dim (N_a): Dimensionality of the hidden representation in feature
transformation block. Each layer first maps the representation to a
2*feature_dim-dimensional output and half of it is used to determine the
nonlinearity of the GLU activation where the other half is used as an
input to GLU, and eventually feature_dim-dimensional output is
transferred to the next layer.
output_dim (N_d): Dimensionality of the outputs of each decision step, which is
later mapped to the final classification or regression output.
num_features: The number of input features (i.e the number of columns for
tabular data assuming each feature is represented with 1 dimension).
num_decision_steps(N_steps): Number of sequential decision steps.
relaxation_factor (gamma): Relaxation factor that promotes the reuse of each
feature at different decision steps. When it is 1, a feature is enforced
to be used only at one decision step and as it increases, more
flexibility is provided to use a feature at multiple decision steps.
sparsity_coefficient (lambda_sparse): Strength of the sparsity regularization.
Sparsity may provide a favorable inductive bias for convergence to
higher accuracy for some datasets where most of the input features are redundant.
norm_type: Type of normalization to perform for the model. Can be either
'batch' or 'group'. 'group' is the default.
batch_momentum: Momentum in ghost batch normalization.
virtual_batch_size: Virtual batch size in ghost batch normalization. The
overall batch size should be an integer multiple of virtual_batch_size.
num_groups: Number of groups used for group normalization.
epsilon: A small number for numerical stability of the entropy calculations.
"""
super(TabNet, self).__init__(**kwargs)
# Input checks
if feature_columns is not None:
if type(feature_columns) not in (list, tuple):
raise ValueError("`feature_columns` must be a list or a tuple.")
if len(feature_columns) == 0:
raise ValueError("`feature_columns` must be contain at least 1 tf.feature_column !")
if num_features is None:
num_features = len(feature_columns)
else:
num_features = int(num_features)
else:
if num_features is None:
raise ValueError("If `feature_columns` is None, then `num_features` cannot be None.")
feature_dim = int(feature_dim)
output_dim = int(output_dim)
num_decision_steps = int(num_decision_steps)
relaxation_factor = float(relaxation_factor)
sparsity_coefficient = float(sparsity_coefficient)
batch_momentum = float(batch_momentum)
num_groups = max(1, int(num_groups))
epsilon = float(epsilon)
if relaxation_factor < 0.:
raise ValueError("`relaxation_factor` cannot be negative !")
if sparsity_coefficient < 0.:
raise ValueError("`sparsity_coefficient` cannot be negative !")
if virtual_batch_size is not None:
virtual_batch_size = int(virtual_batch_size)
if norm_type not in ['batch', 'group']:
raise ValueError("`norm_type` must be either `batch` or `group`")
self.feature_columns = feature_columns
self.num_features = num_features
self.feature_dim = feature_dim
self.output_dim = output_dim
self.num_decision_steps = num_decision_steps
self.relaxation_factor = relaxation_factor
self.sparsity_coefficient = sparsity_coefficient
self.norm_type = norm_type
self.batch_momentum = batch_momentum
self.virtual_batch_size = virtual_batch_size
self.num_groups = num_groups
self.epsilon = epsilon
if self.feature_columns is not None:
self.input_features = tf.keras.layers.DenseFeatures(feature_columns)
if self.norm_type == 'batch':
self.input_bn = tf.keras.layers.BatchNormalization(axis=-1, momentum=batch_momentum)
else:
self.input_bn = GroupNormalization(axis=-1, groups=self.num_groups)
else:
self.input_features = None
self.input_bn = None
self.transform_f1 = TransformBlock(2 * self.feature_dim, self.batch_momentum, self.virtual_batch_size,
self.num_groups)
self.transform_f2 = TransformBlock(2 * self.feature_dim, self.batch_momentum, self.virtual_batch_size,
self.num_groups)
self.transform_f3 = TransformBlock(2 * self.feature_dim, self.batch_momentum, self.virtual_batch_size,
self.num_groups)
self.transform_f4 = TransformBlock(2 * self.feature_dim, self.batch_momentum, self.virtual_batch_size,
self.num_groups)
self.transform_coef = TransformBlock(self.num_features, self.batch_momentum, self.virtual_batch_size,
self.num_groups)
self._step_feature_selection_masks = None
self._step_aggregate_feature_selection_mask = None
def call(self, inputs, training=None):
if self.input_features is not None:
features = self.input_features(inputs)
features = self.input_bn(features, training=training)
else:
features = inputs
batch_size = tf.shape(features)[0]
self._step_feature_selection_masks = []
self._step_aggregate_feature_selection_mask = None
# Initializes decision-step dependent variables.
output_aggregated = tf.zeros([batch_size, self.output_dim])
masked_features = features
mask_values = tf.zeros([batch_size, self.num_features])
aggregated_mask_values = tf.zeros([batch_size, self.num_features])
complemantary_aggregated_mask_values = tf.ones(
[batch_size, self.num_features])
total_entropy = 0.0
entropy_loss = 0.
for ni in range(self.num_decision_steps):
# Feature transformer with two shared and two decision step dependent
# blocks is used below.
transform_f1 = self.transform_f1(masked_features, training=training)
transform_f1 = glu(transform_f1, self.feature_dim)
transform_f2 = self.transform_f2(transform_f1, training=training)
transform_f2 = (glu(transform_f2, self.feature_dim) +
transform_f1) * tf.math.sqrt(0.5)
transform_f3 = self.transform_f3(transform_f2, training=training)
transform_f3 = (glu(transform_f3, self.feature_dim) +
transform_f2) * tf.math.sqrt(0.5)
transform_f4 = self.transform_f4(transform_f3, training=training)
transform_f4 = (glu(transform_f4, self.feature_dim) +
transform_f3) * tf.math.sqrt(0.5)
if (ni > 0):
decision_out = tf.nn.relu(transform_f4[:, :self.output_dim])
# Decision aggregation.
output_aggregated += decision_out
# Aggregated masks are used for visualization of the
# feature importance attributes.
scale_agg = tf.reduce_sum(decision_out, axis=1, keepdims=True)
scale_agg = scale_agg / tf.cast(self.num_decision_steps - 1, tf.float32)
aggregated_mask_values += mask_values * scale_agg
features_for_coef = (transform_f4[:, self.output_dim:])
if (ni < (self.num_decision_steps - 1)):
# Determines the feature masks via linear and nonlinear
# transformations, taking into account of aggregated feature use.
mask_values = self.transform_coef(features_for_coef, training=training)
mask_values *= complemantary_aggregated_mask_values
mask_values = sparsemax(mask_values, axis=-1)
# Relaxation factor controls the amount of reuse of features between
# different decision blocks and updated with the values of
# coefficients.
complemantary_aggregated_mask_values *= (
self.relaxation_factor - mask_values)
# Entropy is used to penalize the amount of sparsity in feature
# selection.
total_entropy += tf.reduce_mean(
tf.reduce_sum(
-mask_values * tf.math.log(mask_values + self.epsilon), axis=1)) / (
tf.cast(self.num_decision_steps - 1, tf.float32))
# Add entropy loss
entropy_loss = total_entropy
# Feature selection.
masked_features = tf.multiply(mask_values, features)
# Visualization of the feature selection mask at decision step ni
# tf.summary.image(
# "Mask for step" + str(ni),
# tf.expand_dims(tf.expand_dims(mask_values, 0), 3),
# max_outputs=1)
mask_at_step_i = tf.expand_dims(tf.expand_dims(mask_values, 0), 3)
self._step_feature_selection_masks.append(mask_at_step_i)
else:
# This branch is needed for correct compilation by tf.autograph
entropy_loss = 0.
# Adds the loss automatically
self.add_loss(self.sparsity_coefficient * entropy_loss)
# Visualization of the aggregated feature importances
# tf.summary.image(
# "Aggregated mask",
# tf.expand_dims(tf.expand_dims(aggregated_mask_values, 0), 3),
# max_outputs=1)
agg_mask = tf.expand_dims(tf.expand_dims(aggregated_mask_values, 0), 3)
self._step_aggregate_feature_selection_mask = agg_mask
return output_aggregated
@property
def feature_selection_masks(self):
return self._step_feature_selection_masks
@property
def aggregate_feature_selection_mask(self):
return self._step_aggregate_feature_selection_mask
class TabNetClassifier(tf.keras.Model):
def __init__(self, feature_columns,
num_classes,
num_features=None,
feature_dim=64,
output_dim=64,
num_decision_steps=5,
relaxation_factor=1.5,
sparsity_coefficient=1e-5,
norm_type='group',
batch_momentum=0.98,
virtual_batch_size=None,
num_groups=1,
epsilon=1e-5,
**kwargs):
"""
Tensorflow 2.0 implementation of [TabNet: Attentive Interpretable Tabular Learning](https://arxiv.org/abs/1908.07442)
# Hyper Parameter Tuning (Excerpt from the paper)
We consider datasets ranging from ∼10K to ∼10M training points, with varying degrees of fitting
difficulty. TabNet obtains high performance for all with a few general principles on hyperparameter
selection:
- Most datasets yield the best results for Nsteps ∈ [3, 10]. Typically, larger datasets and
more complex tasks require a larger Nsteps. A very high value of Nsteps may suffer from
overfitting and yield poor generalization.
- Adjustment of the values of Nd and Na is the most efficient way of obtaining a trade-off
between performance and complexity. Nd = Na is a reasonable choice for most datasets. A
very high value of Nd and Na may suffer from overfitting and yield poor generalization.
- An optimal choice of γ can have a major role on the overall performance. Typically a larger
Nsteps value favors for a larger γ.
- A large batch size is beneficial for performance - if the memory constraints permit, as large
as 1-10 % of the total training dataset size is suggested. The virtual batch size is typically
much smaller than the batch size.
- Initially large learning rate is important, which should be gradually decayed until convergence.
Args:
feature_columns: The Tensorflow feature columns for the dataset.
num_classes: Number of classes.
feature_dim (N_a): Dimensionality of the hidden representation in feature
transformation block. Each layer first maps the representation to a
2*feature_dim-dimensional output and half of it is used to determine the
nonlinearity of the GLU activation where the other half is used as an
input to GLU, and eventually feature_dim-dimensional output is
transferred to the next layer.
output_dim (N_d): Dimensionality of the outputs of each decision step, which is
later mapped to the final classification or regression output.
num_features: The number of input features (i.e the number of columns for
tabular data assuming each feature is represented with 1 dimension).
num_decision_steps(N_steps): Number of sequential decision steps.
relaxation_factor (gamma): Relaxation factor that promotes the reuse of each
feature at different decision steps. When it is 1, a feature is enforced
to be used only at one decision step and as it increases, more
flexibility is provided to use a feature at multiple decision steps.
sparsity_coefficient (lambda_sparse): Strength of the sparsity regularization.
Sparsity may provide a favorable inductive bias for convergence to
higher accuracy for some datasets where most of the input features are redundant.
norm_type: Type of normalization to perform for the model. Can be either
'group' or 'group'. 'group' is the default.
batch_momentum: Momentum in ghost batch normalization.
virtual_batch_size: Virtual batch size in ghost batch normalization. The
overall batch size should be an integer multiple of virtual_batch_size.
num_groups: Number of groups used for group normalization.
epsilon: A small number for numerical stability of the entropy calculations.
"""
super(TabNetClassifier, self).__init__(**kwargs)
self.num_classes = num_classes
self.tabnet = TabNet(feature_columns=feature_columns,
num_features=num_features,
feature_dim=feature_dim,
output_dim=output_dim,
num_decision_steps=num_decision_steps,
relaxation_factor=relaxation_factor,
sparsity_coefficient=sparsity_coefficient,
norm_type=norm_type,
batch_momentum=batch_momentum,
virtual_batch_size=virtual_batch_size,
num_groups=num_groups,
epsilon=epsilon,
**kwargs)
self.clf = tf.keras.layers.Dense(num_classes, activation='softmax', use_bias=False)
def call(self, inputs, training=None):
self.activations = self.tabnet(inputs, training=training)
out = self.clf(self.activations)
return out
class TabNetRegressor(tf.keras.Model):
def __init__(self, feature_columns,
num_regressors,
num_features=None,
feature_dim=64,
output_dim=64,
num_decision_steps=5,
relaxation_factor=1.5,
sparsity_coefficient=1e-5,
norm_type='group',
batch_momentum=0.98,
virtual_batch_size=None,
num_groups=1,
epsilon=1e-5,
**kwargs):
"""
Tensorflow 2.0 implementation of [TabNet: Attentive Interpretable Tabular Learning](https://arxiv.org/abs/1908.07442)
# Hyper Parameter Tuning (Excerpt from the paper)
We consider datasets ranging from ∼10K to ∼10M training points, with varying degrees of fitting
difficulty. TabNet obtains high performance for all with a few general principles on hyperparameter
selection:
- Most datasets yield the best results for Nsteps ∈ [3, 10]. Typically, larger datasets and
more complex tasks require a larger Nsteps. A very high value of Nsteps may suffer from
overfitting and yield poor generalization.
- Adjustment of the values of Nd and Na is the most efficient way of obtaining a trade-off
between performance and complexity. Nd = Na is a reasonable choice for most datasets. A
very high value of Nd and Na may suffer from overfitting and yield poor generalization.
- An optimal choice of γ can have a major role on the overall performance. Typically a larger
Nsteps value favors for a larger γ.
- A large batch size is beneficial for performance - if the memory constraints permit, as large
as 1-10 % of the total training dataset size is suggested. The virtual batch size is typically
much smaller than the batch size.
- Initially large learning rate is important, which should be gradually decayed until convergence.
Args:
feature_columns: The Tensorflow feature columns for the dataset.
num_regressors: Number of regression variables.
feature_dim (N_a): Dimensionality of the hidden representation in feature
transformation block. Each layer first maps the representation to a
2*feature_dim-dimensional output and half of it is used to determine the
nonlinearity of the GLU activation where the other half is used as an
input to GLU, and eventually feature_dim-dimensional output is
transferred to the next layer.
output_dim (N_d): Dimensionality of the outputs of each decision step, which is
later mapped to the final classification or regression output.
num_features: The number of input features (i.e the number of columns for
tabular data assuming each feature is represented with 1 dimension).
num_decision_steps(N_steps): Number of sequential decision steps.
relaxation_factor (gamma): Relaxation factor that promotes the reuse of each
feature at different decision steps. When it is 1, a feature is enforced
to be used only at one decision step and as it increases, more
flexibility is provided to use a feature at multiple decision steps.
sparsity_coefficient (lambda_sparse): Strength of the sparsity regularization.
Sparsity may provide a favorable inductive bias for convergence to
higher accuracy for some datasets where most of the input features are redundant.
norm_type: Type of normalization to perform for the model. Can be either
'group' or 'group'. 'group' is the default.
batch_momentum: Momentum in ghost batch normalization.
virtual_batch_size: Virtual batch size in ghost batch normalization. The
overall batch size should be an integer multiple of virtual_batch_size.
num_groups: Number of groups used for group normalization.
epsilon: A small number for numerical stability of the entropy calculations.
"""
super(TabNetRegressor, self).__init__(**kwargs)
self.num_regressors = num_regressors
self.tabnet = TabNet(feature_columns=feature_columns,
num_features=num_features,
feature_dim=feature_dim,
output_dim=output_dim,
num_decision_steps=num_decision_steps,
relaxation_factor=relaxation_factor,
sparsity_coefficient=sparsity_coefficient,
norm_type=norm_type,
batch_momentum=batch_momentum,
virtual_batch_size=virtual_batch_size,
num_groups=num_groups,
epsilon=epsilon,
**kwargs)
self.regressor = tf.keras.layers.Dense(num_regressors, use_bias=False)
def call(self, inputs, training=None):
self.activations = self.tabnet(inputs, training=training)
out = self.regressor(self.activations)
return out
# Aliases
TabNetClassification = TabNetClassifier
TabNetRegression = TabNetRegressor
| 49.450704
| 125
| 0.629776
|
bf8bd542a57b3b4d3f225ceab81bad1a36daef8e
| 3,557
|
py
|
Python
|
qualifier/deploy/cohorte-home/repo/herald/transports/xmpp/directory.py
|
isandlaTech/cohorte-devtools
|
9ba9021369188d2f0ad5c845ef242fd5a7097b57
|
[
"Apache-2.0"
] | 1
|
2017-03-04T14:37:15.000Z
|
2017-03-04T14:37:15.000Z
|
qualifier/deploy/cohorte-home/repo/herald/transports/xmpp/directory.py
|
isandlaTech/cohorte-devtools
|
9ba9021369188d2f0ad5c845ef242fd5a7097b57
|
[
"Apache-2.0"
] | 4
|
2017-08-21T08:17:14.000Z
|
2018-03-02T13:51:43.000Z
|
qualifier/deploy/cohorte-home/repo/herald/transports/xmpp/directory.py
|
isandlaTech/cohorte-devtools
|
9ba9021369188d2f0ad5c845ef242fd5a7097b57
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# -- Content-Encoding: UTF-8 --
"""
Herald XMPP transport directory
:author: Thomas Calmant
:copyright: Copyright 2014, isandlaTech
:license: Apache License 2.0
:version: 1.0.1
:status: Alpha
..
Copyright 2014 isandlaTech
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Bundle version
import herald.version
__version__=herald.version.__version__
# ------------------------------------------------------------------------------
# Herald XMPP
from . import SERVICE_XMPP_DIRECTORY, ACCESS_ID
from .beans import XMPPAccess
# Herald
import herald
# Standard library
import logging
from pelix.ipopo.decorators import ComponentFactory, Requires, Provides, \
Property, Validate, Invalidate, Instantiate
# ------------------------------------------------------------------------------
_logger = logging.getLogger(__name__)
# ------------------------------------------------------------------------------
@ComponentFactory('herald-xmpp-directory-factory')
@Requires('_directory', herald.SERVICE_DIRECTORY)
@Property('_access_id', herald.PROP_ACCESS_ID, ACCESS_ID)
@Provides((herald.SERVICE_TRANSPORT_DIRECTORY, SERVICE_XMPP_DIRECTORY))
@Instantiate('herald-xmpp-directory')
class XMPPDirectory(object):
"""
XMPP Directory for Herald
"""
def __init__(self):
"""
Sets up the transport directory
"""
# Herald Core Directory
self._directory = None
self._access_id = ACCESS_ID
# JID -> Peer bean
self._jid_peer = {}
# Group name -> XMPP room JID
self._groups = {}
@Validate
def _validate(self, _):
"""
Component validated
"""
self._jid_peer.clear()
self._groups.clear()
@Invalidate
def _invalidate(self, _):
"""
Component invalidated
"""
self._jid_peer.clear()
self._groups.clear()
def load_access(self, data):
"""
Loads a dumped access
:param data: Result of a call to XmppAccess.dump()
:return: An XMPPAccess bean
"""
return XMPPAccess(data)
def peer_access_set(self, peer, data):
"""
The access to the given peer matching our access ID has been set
:param peer: The Peer bean
:param data: The peer access data, previously loaded with load_access()
"""
if peer.uid != self._directory.local_uid:
self._jid_peer[data.jid] = peer
def peer_access_unset(self, _, data):
"""
The access to the given peer matching our access ID has been removed
:param _: The Peer bean
:param data: The peer access data
"""
try:
del self._jid_peer[data.jid]
except KeyError:
pass
def from_jid(self, jid):
"""
Returns the peer associated to the given JID
:param jid: The (full) JID of a peer
:return: A peer bean
:raise KeyError: Unknown JID
"""
return self._jid_peer[jid]
| 26.744361
| 80
| 0.603317
|
1522ca25cfce2926cdc72c63a03561bf0cb63994
| 42,003
|
py
|
Python
|
include/scons/src/engine/SCons/Taskmaster.py
|
SWEN-712/screen-reader-brandonp728
|
e30c25ad2d10ce632fac0548696a61a872328f59
|
[
"bzip2-1.0.6"
] | null | null | null |
include/scons/src/engine/SCons/Taskmaster.py
|
SWEN-712/screen-reader-brandonp728
|
e30c25ad2d10ce632fac0548696a61a872328f59
|
[
"bzip2-1.0.6"
] | 4
|
2019-04-11T16:27:45.000Z
|
2019-04-11T23:56:30.000Z
|
include/scons/src/engine/SCons/Taskmaster.py
|
SWEN-712/screen-reader-brandonp728
|
e30c25ad2d10ce632fac0548696a61a872328f59
|
[
"bzip2-1.0.6"
] | 1
|
2019-06-12T20:30:04.000Z
|
2019-06-12T20:30:04.000Z
|
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
import sys
__doc__ = """
Generic Taskmaster module for the SCons build engine.
=====================================================
This module contains the primary interface(s) between a wrapping user
interface and the SCons build engine. There are two key classes here:
Taskmaster
----------
This is the main engine for walking the dependency graph and
calling things to decide what does or doesn't need to be built.
Task
----
This is the base class for allowing a wrapping interface to
decide what does or doesn't actually need to be done. The
intention is for a wrapping interface to subclass this as
appropriate for different types of behavior it may need.
The canonical example is the SCons native Python interface,
which has Task subclasses that handle its specific behavior,
like printing "'foo' is up to date" when a top-level target
doesn't need to be built, and handling the -c option by removing
targets as its "build" action. There is also a separate subclass
for suppressing this output when the -q option is used.
The Taskmaster instantiates a Task object for each (set of)
target(s) that it decides need to be evaluated and/or built.
"""
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
from itertools import chain
import operator
import sys
import traceback
import SCons.Errors
import SCons.Node
import SCons.Warnings
StateString = SCons.Node.StateString
NODE_NO_STATE = SCons.Node.no_state
NODE_PENDING = SCons.Node.pending
NODE_EXECUTING = SCons.Node.executing
NODE_UP_TO_DATE = SCons.Node.up_to_date
NODE_EXECUTED = SCons.Node.executed
NODE_FAILED = SCons.Node.failed
print_prepare = 0 # set by option --debug=prepare
# A subsystem for recording stats about how different Nodes are handled by
# the main Taskmaster loop. There's no external control here (no need for
# a --debug= option); enable it by changing the value of CollectStats.
CollectStats = None
class Stats(object):
"""
A simple class for holding statistics about the disposition of a
Node by the Taskmaster. If we're collecting statistics, each Node
processed by the Taskmaster gets one of these attached, in which case
the Taskmaster records its decision each time it processes the Node.
(Ideally, that's just once per Node.)
"""
def __init__(self):
"""
Instantiates a Taskmaster.Stats object, initializing all
appropriate counters to zero.
"""
self.considered = 0
self.already_handled = 0
self.problem = 0
self.child_failed = 0
self.not_built = 0
self.side_effects = 0
self.build = 0
StatsNodes = []
fmt = "%(considered)3d "\
"%(already_handled)3d " \
"%(problem)3d " \
"%(child_failed)3d " \
"%(not_built)3d " \
"%(side_effects)3d " \
"%(build)3d "
def dump_stats():
for n in sorted(StatsNodes, key=lambda a: str(a)):
print((fmt % n.attributes.stats.__dict__) + str(n))
class Task(object):
"""
Default SCons build engine task.
This controls the interaction of the actual building of node
and the rest of the engine.
This is expected to handle all of the normally-customizable
aspects of controlling a build, so any given application
*should* be able to do what it wants by sub-classing this
class and overriding methods as appropriate. If an application
needs to customize something by sub-classing Taskmaster (or
some other build engine class), we should first try to migrate
that functionality into this class.
Note that it's generally a good idea for sub-classes to call
these methods explicitly to update state, etc., rather than
roll their own interaction with Taskmaster from scratch.
"""
def __init__(self, tm, targets, top, node):
self.tm = tm
self.targets = targets
self.top = top
self.node = node
self.exc_clear()
def trace_message(self, method, node, description='node'):
fmt = '%-20s %s %s\n'
return fmt % (method + ':', description, self.tm.trace_node(node))
def display(self, message):
"""
Hook to allow the calling interface to display a message.
This hook gets called as part of preparing a task for execution
(that is, a Node to be built). As part of figuring out what Node
should be built next, the actual target list may be altered,
along with a message describing the alteration. The calling
interface can subclass Task and provide a concrete implementation
of this method to see those messages.
"""
pass
def prepare(self):
"""
Called just before the task is executed.
This is mainly intended to give the target Nodes a chance to
unlink underlying files and make all necessary directories before
the Action is actually called to build the targets.
"""
global print_prepare
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.prepare()', self.node))
# Now that it's the appropriate time, give the TaskMaster a
# chance to raise any exceptions it encountered while preparing
# this task.
self.exception_raise()
if self.tm.message:
self.display(self.tm.message)
self.tm.message = None
# Let the targets take care of any necessary preparations.
# This includes verifying that all of the necessary sources
# and dependencies exist, removing the target file(s), etc.
#
# As of April 2008, the get_executor().prepare() method makes
# sure that all of the aggregate sources necessary to build this
# Task's target(s) exist in one up-front check. The individual
# target t.prepare() methods check that each target's explicit
# or implicit dependencies exists, and also initialize the
# .sconsign info.
executor = self.targets[0].get_executor()
if executor is None:
return
executor.prepare()
for t in executor.get_action_targets():
if print_prepare:
print("Preparing target %s..."%t)
for s in t.side_effects:
print("...with side-effect %s..."%s)
t.prepare()
for s in t.side_effects:
if print_prepare:
print("...Preparing side-effect %s..."%s)
s.prepare()
def get_target(self):
"""Fetch the target being built or updated by this task.
"""
return self.node
def needs_execute(self):
# TODO(deprecate): "return True" is the old default behavior;
# change it to NotImplementedError (after running through the
# Deprecation Cycle) so the desired behavior is explicitly
# determined by which concrete subclass is used.
#raise NotImplementedError
msg = ('Taskmaster.Task is an abstract base class; instead of\n'
'\tusing it directly, '
'derive from it and override the abstract methods.')
SCons.Warnings.warn(SCons.Warnings.TaskmasterNeedsExecuteWarning, msg)
return True
def execute(self):
"""
Called to execute the task.
This method is called from multiple threads in a parallel build,
so only do thread safe stuff here. Do thread unsafe stuff in
prepare(), executed() or failed().
"""
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.execute()', self.node))
try:
cached_targets = []
for t in self.targets:
if not t.retrieve_from_cache():
break
cached_targets.append(t)
if len(cached_targets) < len(self.targets):
# Remove targets before building. It's possible that we
# partially retrieved targets from the cache, leaving
# them in read-only mode. That might cause the command
# to fail.
#
for t in cached_targets:
try:
t.fs.unlink(t.get_internal_path())
except (IOError, OSError):
pass
self.targets[0].build()
else:
for t in cached_targets:
t.cached = 1
except SystemExit:
exc_value = sys.exc_info()[1]
raise SCons.Errors.ExplicitExit(self.targets[0], exc_value.code)
except SCons.Errors.UserError:
raise
except SCons.Errors.BuildError:
raise
except Exception as e:
buildError = SCons.Errors.convert_to_BuildError(e)
buildError.node = self.targets[0]
buildError.exc_info = sys.exc_info()
raise buildError
def executed_without_callbacks(self):
"""
Called when the task has been successfully executed
and the Taskmaster instance doesn't want to call
the Node's callback methods.
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.executed_without_callbacks()',
self.node))
for t in self.targets:
if t.get_state() == NODE_EXECUTING:
for side_effect in t.side_effects:
side_effect.set_state(NODE_NO_STATE)
t.set_state(NODE_EXECUTED)
def executed_with_callbacks(self):
"""
Called when the task has been successfully executed and
the Taskmaster instance wants to call the Node's callback
methods.
This may have been a do-nothing operation (to preserve build
order), so we must check the node's state before deciding whether
it was "built", in which case we call the appropriate Node method.
In any event, we always call "visited()", which will handle any
post-visit actions that must take place regardless of whether
or not the target was an actual built target or a source Node.
"""
global print_prepare
T = self.tm.trace
if T: T.write(self.trace_message('Task.executed_with_callbacks()',
self.node))
for t in self.targets:
if t.get_state() == NODE_EXECUTING:
for side_effect in t.side_effects:
side_effect.set_state(NODE_NO_STATE)
t.set_state(NODE_EXECUTED)
if not t.cached:
t.push_to_cache()
t.built()
t.visited()
if (not print_prepare and
(not hasattr(self, 'options') or not self.options.debug_includes)):
t.release_target_info()
else:
t.visited()
executed = executed_with_callbacks
def failed(self):
"""
Default action when a task fails: stop the build.
Note: Although this function is normally invoked on nodes in
the executing state, it might also be invoked on up-to-date
nodes when using Configure().
"""
self.fail_stop()
def fail_stop(self):
"""
Explicit stop-the-build failure.
This sets failure status on the target nodes and all of
their dependent parent nodes.
Note: Although this function is normally invoked on nodes in
the executing state, it might also be invoked on up-to-date
nodes when using Configure().
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.failed_stop()', self.node))
# Invoke will_not_build() to clean-up the pending children
# list.
self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED))
# Tell the taskmaster to not start any new tasks
self.tm.stop()
# We're stopping because of a build failure, but give the
# calling Task class a chance to postprocess() the top-level
# target under which the build failure occurred.
self.targets = [self.tm.current_top]
self.top = 1
def fail_continue(self):
"""
Explicit continue-the-build failure.
This sets failure status on the target nodes and all of
their dependent parent nodes.
Note: Although this function is normally invoked on nodes in
the executing state, it might also be invoked on up-to-date
nodes when using Configure().
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.failed_continue()', self.node))
self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED))
def make_ready_all(self):
"""
Marks all targets in a task ready for execution.
This is used when the interface needs every target Node to be
visited--the canonical example being the "scons -c" option.
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.make_ready_all()', self.node))
self.out_of_date = self.targets[:]
for t in self.targets:
t.disambiguate().set_state(NODE_EXECUTING)
for s in t.side_effects:
# add disambiguate here to mirror the call on targets above
s.disambiguate().set_state(NODE_EXECUTING)
def make_ready_current(self):
"""
Marks all targets in a task ready for execution if any target
is not current.
This is the default behavior for building only what's necessary.
"""
global print_prepare
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.make_ready_current()',
self.node))
self.out_of_date = []
needs_executing = False
for t in self.targets:
try:
t.disambiguate().make_ready()
is_up_to_date = not t.has_builder() or \
(not t.always_build and t.is_up_to_date())
except EnvironmentError as e:
raise SCons.Errors.BuildError(node=t, errstr=e.strerror, filename=e.filename)
if not is_up_to_date:
self.out_of_date.append(t)
needs_executing = True
if needs_executing:
for t in self.targets:
t.set_state(NODE_EXECUTING)
for s in t.side_effects:
# add disambiguate here to mirror the call on targets in first loop above
s.disambiguate().set_state(NODE_EXECUTING)
else:
for t in self.targets:
# We must invoke visited() to ensure that the node
# information has been computed before allowing the
# parent nodes to execute. (That could occur in a
# parallel build...)
t.visited()
t.set_state(NODE_UP_TO_DATE)
if (not print_prepare and
(not hasattr(self, 'options') or not self.options.debug_includes)):
t.release_target_info()
make_ready = make_ready_current
def postprocess(self):
"""
Post-processes a task after it's been executed.
This examines all the targets just built (or not, we don't care
if the build was successful, or even if there was no build
because everything was up-to-date) to see if they have any
waiting parent Nodes, or Nodes waiting on a common side effect,
that can be put back on the candidates list.
"""
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.postprocess()', self.node))
# We may have built multiple targets, some of which may have
# common parents waiting for this build. Count up how many
# targets each parent was waiting for so we can subtract the
# values later, and so we *don't* put waiting side-effect Nodes
# back on the candidates list if the Node is also a waiting
# parent.
targets = set(self.targets)
pending_children = self.tm.pending_children
parents = {}
for t in targets:
# A node can only be in the pending_children set if it has
# some waiting_parents.
if t.waiting_parents:
if T: T.write(self.trace_message(u'Task.postprocess()',
t,
'removing'))
pending_children.discard(t)
for p in t.waiting_parents:
parents[p] = parents.get(p, 0) + 1
t.waiting_parents = set()
for t in targets:
if t.side_effects is not None:
for s in t.side_effects:
if s.get_state() == NODE_EXECUTING:
s.set_state(NODE_NO_STATE)
# The side-effects may have been transferred to
# NODE_NO_STATE by executed_with{,out}_callbacks, but was
# not taken out of the waiting parents/pending children
# data structures. Check for that now.
if s.get_state() == NODE_NO_STATE and s.waiting_parents:
pending_children.discard(s)
for p in s.waiting_parents:
parents[p] = parents.get(p, 0) + 1
s.waiting_parents = set()
for p in s.waiting_s_e:
if p.ref_count == 0:
self.tm.candidates.append(p)
for p, subtract in parents.items():
p.ref_count = p.ref_count - subtract
if T: T.write(self.trace_message(u'Task.postprocess()',
p,
'adjusted parent ref count'))
if p.ref_count == 0:
self.tm.candidates.append(p)
for t in targets:
t.postprocess()
# Exception handling subsystem.
#
# Exceptions that occur while walking the DAG or examining Nodes
# must be raised, but must be raised at an appropriate time and in
# a controlled manner so we can, if necessary, recover gracefully,
# possibly write out signature information for Nodes we've updated,
# etc. This is done by having the Taskmaster tell us about the
# exception, and letting
def exc_info(self):
"""
Returns info about a recorded exception.
"""
return self.exception
def exc_clear(self):
"""
Clears any recorded exception.
This also changes the "exception_raise" attribute to point
to the appropriate do-nothing method.
"""
self.exception = (None, None, None)
self.exception_raise = self._no_exception_to_raise
def exception_set(self, exception=None):
"""
Records an exception to be raised at the appropriate time.
This also changes the "exception_raise" attribute to point
to the method that will, in fact
"""
if not exception:
exception = sys.exc_info()
self.exception = exception
self.exception_raise = self._exception_raise
def _no_exception_to_raise(self):
pass
def _exception_raise(self):
"""
Raises a pending exception that was recorded while getting a
Task ready for execution.
"""
exc = self.exc_info()[:]
try:
exc_type, exc_value, exc_traceback = exc
except ValueError:
exc_type, exc_value = exc
exc_traceback = None
# raise exc_type(exc_value).with_traceback(exc_traceback)
if sys.version_info[0] == 2:
exec("raise exc_type, exc_value, exc_traceback")
else: # sys.version_info[0] == 3:
if isinstance(exc_value, Exception): #hasattr(exc_value, 'with_traceback'):
# If exc_value is an exception, then just reraise
exec("raise exc_value.with_traceback(exc_traceback)")
else:
# else we'll create an exception using the value and raise that
exec("raise exc_type(exc_value).with_traceback(exc_traceback)")
# raise e.__class__, e.__class__(e), sys.exc_info()[2]
# exec("raise exc_type(exc_value).with_traceback(exc_traceback)")
class AlwaysTask(Task):
def needs_execute(self):
"""
Always returns True (indicating this Task should always
be executed).
Subclasses that need this behavior (as opposed to the default
of only executing Nodes that are out of date w.r.t. their
dependencies) can use this as follows:
class MyTaskSubclass(SCons.Taskmaster.Task):
needs_execute = SCons.Taskmaster.Task.execute_always
"""
return True
class OutOfDateTask(Task):
def needs_execute(self):
"""
Returns True (indicating this Task should be executed) if this
Task's target state indicates it needs executing, which has
already been determined by an earlier up-to-date check.
"""
return self.targets[0].get_state() == SCons.Node.executing
def find_cycle(stack, visited):
if stack[-1] in visited:
return None
visited.add(stack[-1])
for n in stack[-1].waiting_parents:
stack.append(n)
if stack[0] == stack[-1]:
return stack
if find_cycle(stack, visited):
return stack
stack.pop()
return None
class Taskmaster(object):
"""
The Taskmaster for walking the dependency DAG.
"""
def __init__(self, targets=[], tasker=None, order=None, trace=None):
self.original_top = targets
self.top_targets_left = targets[:]
self.top_targets_left.reverse()
self.candidates = []
if tasker is None:
tasker = OutOfDateTask
self.tasker = tasker
if not order:
order = lambda l: l
self.order = order
self.message = None
self.trace = trace
self.next_candidate = self.find_next_candidate
self.pending_children = set()
def find_next_candidate(self):
"""
Returns the next candidate Node for (potential) evaluation.
The candidate list (really a stack) initially consists of all of
the top-level (command line) targets provided when the Taskmaster
was initialized. While we walk the DAG, visiting Nodes, all the
children that haven't finished processing get pushed on to the
candidate list. Each child can then be popped and examined in
turn for whether *their* children are all up-to-date, in which
case a Task will be created for their actual evaluation and
potential building.
Here is where we also allow candidate Nodes to alter the list of
Nodes that should be examined. This is used, for example, when
invoking SCons in a source directory. A source directory Node can
return its corresponding build directory Node, essentially saying,
"Hey, you really need to build this thing over here instead."
"""
try:
return self.candidates.pop()
except IndexError:
pass
try:
node = self.top_targets_left.pop()
except IndexError:
return None
self.current_top = node
alt, message = node.alter_targets()
if alt:
self.message = message
self.candidates.append(node)
self.candidates.extend(self.order(alt))
node = self.candidates.pop()
return node
def no_next_candidate(self):
"""
Stops Taskmaster processing by not returning a next candidate.
Note that we have to clean-up the Taskmaster candidate list
because the cycle detection depends on the fact all nodes have
been processed somehow.
"""
while self.candidates:
candidates = self.candidates
self.candidates = []
self.will_not_build(candidates)
return None
def _validate_pending_children(self):
"""
Validate the content of the pending_children set. Assert if an
internal error is found.
This function is used strictly for debugging the taskmaster by
checking that no invariants are violated. It is not used in
normal operation.
The pending_children set is used to detect cycles in the
dependency graph. We call a "pending child" a child that is
found in the "pending" state when checking the dependencies of
its parent node.
A pending child can occur when the Taskmaster completes a loop
through a cycle. For example, let's imagine a graph made of
three nodes (A, B and C) making a cycle. The evaluation starts
at node A. The Taskmaster first considers whether node A's
child B is up-to-date. Then, recursively, node B needs to
check whether node C is up-to-date. This leaves us with a
dependency graph looking like::
Next candidate \
\
Node A (Pending) --> Node B(Pending) --> Node C (NoState)
^ |
| |
+-------------------------------------+
Now, when the Taskmaster examines the Node C's child Node A,
it finds that Node A is in the "pending" state. Therefore,
Node A is a pending child of node C.
Pending children indicate that the Taskmaster has potentially
loop back through a cycle. We say potentially because it could
also occur when a DAG is evaluated in parallel. For example,
consider the following graph::
Node A (Pending) --> Node B(Pending) --> Node C (Pending) --> ...
| ^
| |
+----------> Node D (NoState) --------+
/
Next candidate /
The Taskmaster first evaluates the nodes A, B, and C and
starts building some children of node C. Assuming, that the
maximum parallel level has not been reached, the Taskmaster
will examine Node D. It will find that Node C is a pending
child of Node D.
In summary, evaluating a graph with a cycle will always
involve a pending child at one point. A pending child might
indicate either a cycle or a diamond-shaped DAG. Only a
fraction of the nodes ends-up being a "pending child" of
another node. This keeps the pending_children set small in
practice.
We can differentiate between the two cases if we wait until
the end of the build. At this point, all the pending children
nodes due to a diamond-shaped DAG will have been properly
built (or will have failed to build). But, the pending
children involved in a cycle will still be in the pending
state.
The taskmaster removes nodes from the pending_children set as
soon as a pending_children node moves out of the pending
state. This also helps to keep the pending_children set small.
"""
for n in self.pending_children:
assert n.state in (NODE_PENDING, NODE_EXECUTING), \
(str(n), StateString[n.state])
assert len(n.waiting_parents) != 0, (str(n), len(n.waiting_parents))
for p in n.waiting_parents:
assert p.ref_count > 0, (str(n), str(p), p.ref_count)
def trace_message(self, message):
return 'Taskmaster: %s\n' % message
def trace_node(self, node):
return '<%-10s %-3s %s>' % (StateString[node.get_state()],
node.ref_count,
repr(str(node)))
def _find_next_ready_node(self):
"""
Finds the next node that is ready to be built.
This is *the* main guts of the DAG walk. We loop through the
list of candidates, looking for something that has no un-built
children (i.e., that is a leaf Node or has dependencies that are
all leaf Nodes or up-to-date). Candidate Nodes are re-scanned
(both the target Node itself and its sources, which are always
scanned in the context of a given target) to discover implicit
dependencies. A Node that must wait for some children to be
built will be put back on the candidates list after the children
have finished building. A Node that has been put back on the
candidates list in this way may have itself (or its sources)
re-scanned, in order to handle generated header files (e.g.) and
the implicit dependencies therein.
Note that this method does not do any signature calculation or
up-to-date check itself. All of that is handled by the Task
class. This is purely concerned with the dependency graph walk.
"""
self.ready_exc = None
T = self.trace
if T: T.write(SCons.Util.UnicodeType('\n') + self.trace_message('Looking for a node to evaluate'))
while True:
node = self.next_candidate()
if node is None:
if T: T.write(self.trace_message('No candidate anymore.') + u'\n')
return None
node = node.disambiguate()
state = node.get_state()
# For debugging only:
#
# try:
# self._validate_pending_children()
# except:
# self.ready_exc = sys.exc_info()
# return node
if CollectStats:
if not hasattr(node.attributes, 'stats'):
node.attributes.stats = Stats()
StatsNodes.append(node)
S = node.attributes.stats
S.considered = S.considered + 1
else:
S = None
if T: T.write(self.trace_message(u' Considering node %s and its children:' % self.trace_node(node)))
if state == NODE_NO_STATE:
# Mark this node as being on the execution stack:
node.set_state(NODE_PENDING)
elif state > NODE_PENDING:
# Skip this node if it has already been evaluated:
if S: S.already_handled = S.already_handled + 1
if T: T.write(self.trace_message(u' already handled (executed)'))
continue
executor = node.get_executor()
try:
children = executor.get_all_children()
except SystemExit:
exc_value = sys.exc_info()[1]
e = SCons.Errors.ExplicitExit(node, exc_value.code)
self.ready_exc = (SCons.Errors.ExplicitExit, e)
if T: T.write(self.trace_message(' SystemExit'))
return node
except Exception as e:
# We had a problem just trying to figure out the
# children (like a child couldn't be linked in to a
# VariantDir, or a Scanner threw something). Arrange to
# raise the exception when the Task is "executed."
self.ready_exc = sys.exc_info()
if S: S.problem = S.problem + 1
if T: T.write(self.trace_message(' exception %s while scanning children.\n' % e))
return node
children_not_visited = []
children_pending = set()
children_not_ready = []
children_failed = False
for child in chain(executor.get_all_prerequisites(), children):
childstate = child.get_state()
if T: T.write(self.trace_message(u' ' + self.trace_node(child)))
if childstate == NODE_NO_STATE:
children_not_visited.append(child)
elif childstate == NODE_PENDING:
children_pending.add(child)
elif childstate == NODE_FAILED:
children_failed = True
if childstate <= NODE_EXECUTING:
children_not_ready.append(child)
# These nodes have not even been visited yet. Add
# them to the list so that on some next pass we can
# take a stab at evaluating them (or their children).
children_not_visited.reverse()
self.candidates.extend(self.order(children_not_visited))
# if T and children_not_visited:
# T.write(self.trace_message(' adding to candidates: %s' % map(str, children_not_visited)))
# T.write(self.trace_message(' candidates now: %s\n' % map(str, self.candidates)))
# Skip this node if any of its children have failed.
#
# This catches the case where we're descending a top-level
# target and one of our children failed while trying to be
# built by a *previous* descent of an earlier top-level
# target.
#
# It can also occur if a node is reused in multiple
# targets. One first descends though the one of the
# target, the next time occurs through the other target.
#
# Note that we can only have failed_children if the
# --keep-going flag was used, because without it the build
# will stop before diving in the other branch.
#
# Note that even if one of the children fails, we still
# added the other children to the list of candidate nodes
# to keep on building (--keep-going).
if children_failed:
for n in executor.get_action_targets():
n.set_state(NODE_FAILED)
if S: S.child_failed = S.child_failed + 1
if T: T.write(self.trace_message('****** %s\n' % self.trace_node(node)))
continue
if children_not_ready:
for child in children_not_ready:
# We're waiting on one or more derived targets
# that have not yet finished building.
if S: S.not_built = S.not_built + 1
# Add this node to the waiting parents lists of
# anything we're waiting on, with a reference
# count so we can be put back on the list for
# re-evaluation when they've all finished.
node.ref_count = node.ref_count + child.add_to_waiting_parents(node)
if T: T.write(self.trace_message(u' adjusted ref count: %s, child %s' %
(self.trace_node(node), repr(str(child)))))
if T:
for pc in children_pending:
T.write(self.trace_message(' adding %s to the pending children set\n' %
self.trace_node(pc)))
self.pending_children = self.pending_children | children_pending
continue
# Skip this node if it has side-effects that are
# currently being built:
wait_side_effects = False
for se in executor.get_action_side_effects():
if se.get_state() == NODE_EXECUTING:
se.add_to_waiting_s_e(node)
wait_side_effects = True
if wait_side_effects:
if S: S.side_effects = S.side_effects + 1
continue
# The default when we've gotten through all of the checks above:
# this node is ready to be built.
if S: S.build = S.build + 1
if T: T.write(self.trace_message(u'Evaluating %s\n' %
self.trace_node(node)))
# For debugging only:
#
# try:
# self._validate_pending_children()
# except:
# self.ready_exc = sys.exc_info()
# return node
return node
return None
def next_task(self):
"""
Returns the next task to be executed.
This simply asks for the next Node to be evaluated, and then wraps
it in the specific Task subclass with which we were initialized.
"""
node = self._find_next_ready_node()
if node is None:
return None
executor = node.get_executor()
if executor is None:
return None
tlist = executor.get_all_targets()
task = self.tasker(self, tlist, node in self.original_top, node)
try:
task.make_ready()
except Exception as e :
# We had a problem just trying to get this task ready (like
# a child couldn't be linked to a VariantDir when deciding
# whether this node is current). Arrange to raise the
# exception when the Task is "executed."
self.ready_exc = sys.exc_info()
if self.ready_exc:
task.exception_set(self.ready_exc)
self.ready_exc = None
return task
def will_not_build(self, nodes, node_func=lambda n: None):
"""
Perform clean-up about nodes that will never be built. Invokes
a user defined function on all of these nodes (including all
of their parents).
"""
T = self.trace
pending_children = self.pending_children
to_visit = set(nodes)
pending_children = pending_children - to_visit
if T:
for n in nodes:
T.write(self.trace_message(' removing node %s from the pending children set\n' %
self.trace_node(n)))
try:
while len(to_visit):
node = to_visit.pop()
node_func(node)
# Prune recursion by flushing the waiting children
# list immediately.
parents = node.waiting_parents
node.waiting_parents = set()
to_visit = to_visit | parents
pending_children = pending_children - parents
for p in parents:
p.ref_count = p.ref_count - 1
if T: T.write(self.trace_message(' removing parent %s from the pending children set\n' %
self.trace_node(p)))
except KeyError:
# The container to_visit has been emptied.
pass
# We have the stick back the pending_children list into the
# taskmaster because the python 1.5.2 compatibility does not
# allow us to use in-place updates
self.pending_children = pending_children
def stop(self):
"""
Stops the current build completely.
"""
self.next_candidate = self.no_next_candidate
def cleanup(self):
"""
Check for dependency cycles.
"""
if not self.pending_children:
return
nclist = [(n, find_cycle([n], set())) for n in self.pending_children]
genuine_cycles = [
node for node,cycle in nclist
if cycle or node.get_state() != NODE_EXECUTED
]
if not genuine_cycles:
# All of the "cycles" found were single nodes in EXECUTED state,
# which is to say, they really weren't cycles. Just return.
return
desc = 'Found dependency cycle(s):\n'
for node, cycle in nclist:
if cycle:
desc = desc + " " + " -> ".join(map(str, cycle)) + "\n"
else:
desc = desc + \
" Internal Error: no cycle found for node %s (%s) in state %s\n" % \
(node, repr(node), StateString[node.get_state()])
raise SCons.Errors.UserError(desc)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| 38.855689
| 115
| 0.587982
|
3828f70b7f5ae05655f477d9ebe2dd6dff1c08ee
| 1,315
|
py
|
Python
|
accounts/templatetags/avatar.py
|
lesspointless/Shakal-NG
|
eee491af94527228735c2bca7644605effd74b37
|
[
"MIT"
] | null | null | null |
accounts/templatetags/avatar.py
|
lesspointless/Shakal-NG
|
eee491af94527228735c2bca7644605effd74b37
|
[
"MIT"
] | null | null | null |
accounts/templatetags/avatar.py
|
lesspointless/Shakal-NG
|
eee491af94527228735c2bca7644605effd74b37
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import hashlib
from django.conf import settings
from django.utils.html import escape
from django_jinja import library
from accounts.utils import generated_avatar
from autoimagefield.utils import thumbnail
GRAVATAR_URL_PREFIX = getattr(settings, "GRAVATAR_URL_PREFIX", "//www.gravatar.com/")
GRAVATAR_DEFAULT_IMAGE = getattr(settings, "GRAVATAR_DEFAULT_IMAGE", "")
GRAVATAR_DEFAULT_SIZE = getattr(settings, "GRAVATAR_DEFAULT_IMAGE", 200)
@library.global_function
def gravatar_for_email(email, size=GRAVATAR_DEFAULT_SIZE):
url = "%savatar/%s/?s=%s&default=%s" % (GRAVATAR_URL_PREFIX, hashlib.md5(bytes(email.encode('utf-8'))).hexdigest(), str(size), GRAVATAR_DEFAULT_IMAGE)
return escape(url)
@library.global_function
def avatar_for_user(user, size=GRAVATAR_DEFAULT_SIZE):
if user.avatar:
avatar = thumbnail(user.avatar, size=(size, size), crop=True)
if avatar:
return avatar.url
return gravatar_for_email(user.email, size)
@library.global_function
def prefetch_avatars_for_ip(*object_lists):
icon_cache = {}
for object_list in object_lists:
for obj in object_list:
ip = obj.ip_address
if ip:
if not ip in icon_cache:
icon_cache[ip] = generated_avatar(ip)
obj.ip_address_avatar = icon_cache[ip]
return ''
| 27.978723
| 151
| 0.772624
|
d4b8178d1543edaf5fb9cd0a68ea5b917c93d09e
| 1,691
|
py
|
Python
|
fae2/abouts/urls.py
|
opena11y/oaa-fae2
|
3c5a54552e219f9c210b73b4ae6c6d71daf70605
|
[
"Apache-2.0"
] | 39
|
2016-01-08T23:54:42.000Z
|
2021-11-03T17:35:54.000Z
|
fae2/abouts/urls.py
|
opena11y/oaa-fae2
|
3c5a54552e219f9c210b73b4ae6c6d71daf70605
|
[
"Apache-2.0"
] | 66
|
2016-01-08T23:50:29.000Z
|
2021-06-10T17:28:07.000Z
|
fae2/abouts/urls.py
|
opena11y/oaa-fae2
|
3c5a54552e219f9c210b73b4ae6c6d71daf70605
|
[
"Apache-2.0"
] | 19
|
2015-12-10T22:21:12.000Z
|
2020-05-01T06:39:34.000Z
|
"""
Copyright 2014-2016 University of Illinois
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
File: abouts/tests.py
Author: Jon Gunderson
"""
# abouts/urls.py
from __future__ import absolute_import
from django.conf.urls import url
from .views import ConceptsTermsView
from .views import DisclaimerView
from .views import OverviewView
from .views import PrivacyView
from .views import ReportIssuesView
from .views import SharingView
from .views import VersionsView
from .views import FAQView
from .views import VPATView
urlpatterns = [
url(r'^$', OverviewView.as_view(), name='overview'),
url(r'^concepts/$', ConceptsTermsView.as_view(), name='concepts_terms'),
url(r'^disclaimer/$', DisclaimerView.as_view(), name='disclaimer'),
url(r'^issues/$', ReportIssuesView.as_view(), name='report_issues'),
url(r'^privacy/$', PrivacyView.as_view(), name='privacy'),
url(r'^sharing/$', SharingView.as_view(), name='sharing'),
url(r'^versions/$', VersionsView.as_view(), name='versions'),
url(r'^faqs/$', FAQView.as_view(), name='faqs'),
url(r'^vpat/$', VPATView.as_view(), name='vpat'),
]
| 35.978723
| 79
| 0.702543
|
3c075633d0c838fdb466244862fb59d2b5eaef5f
| 10,912
|
py
|
Python
|
general/mutils.py
|
achinta/CategoricalNF
|
d8717a037e8f13641e9d9a89abf66fba38e23f91
|
[
"MIT"
] | 47
|
2020-06-20T10:00:39.000Z
|
2022-03-08T13:41:45.000Z
|
general/mutils.py
|
achinta/CategoricalNF
|
d8717a037e8f13641e9d9a89abf66fba38e23f91
|
[
"MIT"
] | 1
|
2020-07-05T20:58:19.000Z
|
2020-09-08T12:45:29.000Z
|
general/mutils.py
|
achinta/CategoricalNF
|
d8717a037e8f13641e9d9a89abf66fba38e23f91
|
[
"MIT"
] | 10
|
2020-07-05T17:18:40.000Z
|
2022-01-07T03:17:40.000Z
|
import torch
import torch.nn as nn
import argparse
import random
import numpy as np
import datetime
import os
import sys
import pickle
from glob import glob
sys.path.append("../")
from general.radam import RAdam, AdamW
PARAM_CONFIG_FILE = "param_config.pik"
####################
## OUTPUT CONTROL ##
####################
# 0 => Full debug
# 1 => Reduced output
# 2 => No output at all (recommended on cluster)
DEBUG_LEVEL = 0
def set_debug_level(level):
global DEBUG_LEVEL
DEBUG_LEVEL = level
def debug_level():
global DEBUG_LEVEL
return DEBUG_LEVEL
class Tracker:
def __init__(self, exp_decay=1.0):
self.val_sum = 0.0
self.counter = 0
self.exp_decay = exp_decay
def add(self, val):
self.val_sum = self.val_sum * self.exp_decay + val
self.counter = self.counter * self.exp_decay + 1
def get_mean(self, reset=False):
if self.counter <= 0:
mean = 0
else:
mean = self.val_sum / self.counter
if reset:
self.reset()
return mean
def reset(self):
self.val_sum = 0.0
self.counter = 0
###################
## MODEL LOADING ##
###################
def load_model(checkpoint_path, model=None, optimizer=None, lr_scheduler=None, load_best_model=False, warn_unloaded_weights=True):
# Determine the checkpoint file to load
if os.path.isdir(checkpoint_path):
checkpoint_files = sorted(glob(os.path.join(checkpoint_path, "*.tar")))
if len(checkpoint_files) == 0:
print("No checkpoint files found at", checkpoint_path)
return dict()
checkpoint_file = checkpoint_files[-1]
else:
checkpoint_file = checkpoint_path
# Loading checkpoint
print("Loading checkpoint \"" + str(checkpoint_file) + "\"")
if torch.cuda.is_available():
checkpoint = torch.load(checkpoint_file)
else:
checkpoint = torch.load(checkpoint_file, map_location='cpu')
# If best model should be loaded, look for it if checkpoint_path is a directory
if os.path.isdir(checkpoint_path) and load_best_model:
if os.path.isfile(checkpoint["best_save_dict"]["file"]):
print("Load best model!")
return load_model(checkpoint["best_save_dict"]["file"], model=model, optimizer=optimizer, lr_scheduler=lr_scheduler, load_best_model=False)
else:
print("[!] WARNING: Best save dict file is listed as \"%s\", but file could not been found. Using default one..." % checkpoint["best_save_dict"]["file"])
# Load the model parameters
if model is not None:
pretrained_model_dict = {key: val for key, val in checkpoint['model_state_dict'].items()}
model_dict = model.state_dict()
unchanged_keys = [key for key in model_dict.keys() if key not in pretrained_model_dict.keys()]
if warn_unloaded_weights and len(unchanged_keys) != 0: # Parameters in this list might have been forgotten to be saved
print("[#] WARNING: Some weights have been left unchanged by the loading of the model: " + str(unchanged_keys))
model_dict.update(pretrained_model_dict)
model.load_state_dict(model_dict)
# Load the state and parameters of the optimizer
if optimizer is not None and 'optimizer_state_dict' in checkpoint:
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
# Load the state of the learning rate scheduler
if lr_scheduler is not None and 'scheduler_state_dict' in checkpoint:
lr_scheduler.load_state_dict(checkpoint['scheduler_state_dict'])
# Load the additional parameters that were saved in the dict
add_param_dict = dict()
for key, val in checkpoint.items():
if "state_dict" not in key:
add_param_dict[key] = val
return add_param_dict
def load_model_from_args(args, model_function, checkpoint_path=None, load_best_model=False):
model_params, optimizer_params = args_to_params(args)
model = model_function(model_params).to(get_device())
if checkpoint_path is not None:
load_model(checkpoint_path, model=model, load_best_model=load_best_model)
return model
def load_args(checkpoint_path):
if os.path.isfile(checkpoint_path):
checkpoint_path = checkpoint_path.rsplit("/",1)[0]
param_file_path = os.path.join(checkpoint_path, PARAM_CONFIG_FILE)
if not os.path.exists(param_file_path):
print("[!] ERROR: Could not find parameter config file: " + str(param_file_path))
with open(param_file_path, "rb") as f:
print("Loading parameter configuration from \"" + str(param_file_path) + "\"")
args = pickle.load(f)
return args
def general_args_to_params(args, model_params=None):
optimizer_params = {
"optimizer": args.optimizer,
"learning_rate": args.learning_rate,
"weight_decay": args.weight_decay,
"lr_decay_factor": args.lr_decay_factor,
"lr_decay_step": args.lr_decay_step,
"lr_minimum": args.lr_minimum,
"momentum": args.momentum,
"beta1": args.beta1,
"beta2": args.beta2,
"warmup": args.warmup
}
# Set seed
np.random.seed(args.seed)
random.seed(args.seed)
torch.manual_seed(args.seed)
if torch.cuda.is_available:
torch.cuda.manual_seed(args.seed)
torch.cuda.manual_seed_all(args.seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
return model_params, optimizer_params
OPTIMIZER_SGD = 0
OPTIMIZER_ADAM = 1
OPTIMIZER_ADAMAX = 2
OPTIMIZER_RMSPROP = 3
OPTIMIZER_RADAM = 4
OPTIMIZER_ADAM_WARMUP = 5
def create_optimizer_from_args(parameters_to_optimize, optimizer_params):
if optimizer_params["optimizer"] == OPTIMIZER_SGD:
optimizer = torch.optim.SGD(parameters_to_optimize,
lr=optimizer_params["learning_rate"],
weight_decay=optimizer_params["weight_decay"],
momentum=optimizer_params["momentum"])
elif optimizer_params["optimizer"] == OPTIMIZER_ADAM:
optimizer = torch.optim.Adam(parameters_to_optimize,
lr=optimizer_params["learning_rate"],
betas=(optimizer_params["beta1"], optimizer_params["beta2"]),
weight_decay=optimizer_params["weight_decay"])
elif optimizer_params["optimizer"] == OPTIMIZER_ADAMAX:
optimizer = torch.optim.Adamax(parameters_to_optimize,
lr=optimizer_params["learning_rate"],
weight_decay=optimizer_params["weight_decay"])
elif optimizer_params["optimizer"] == OPTIMIZER_RMSPROP:
optimizer = torch.optim.RMSprop(parameters_to_optimize,
lr=optimizer_params["learning_rate"],
weight_decay=optimizer_params["weight_decay"])
elif optimizer_params["optimizer"] == OPTIMIZER_RADAM:
optimizer = RAdam(parameters_to_optimize,
lr=optimizer_params["learning_rate"],
betas=(optimizer_params["beta1"], optimizer_params["beta2"]),
weight_decay=optimizer_params["weight_decay"])
elif optimizer_params["optimizer"] == OPTIMIZER_ADAM_WARMUP:
optimizer = AdamW(parameters_to_optimize,
lr=optimizer_params["learning_rate"],
weight_decay=optimizer_params["weight_decay"],
betas=(optimizer_params["beta1"], optimizer_params["beta2"]),
warmup=optimizer_params["warmup"])
else:
print("[!] ERROR: Unknown optimizer: " + str(optimizer_params["optimizer"]))
sys.exit(1)
return optimizer
def get_param_val(param_dict, key, default_val=None, allow_default=True, error_location="", warning_if_default=True):
if key in param_dict:
return param_dict[key]
elif allow_default:
if warning_if_default:
print("[#] WARNING: Using default value %s for key %s" % (str(default_val), str(key)))
return default_val
else:
assert False, "[!] ERROR (%s): could not find key \"%s\" in the dictionary although it is required." % (error_location, str(key))
def append_in_dict(val_dict, key, new_val):
if key not in val_dict:
val_dict[key] = list()
val_dict[key].append(new_val)
####################################
## VISUALIZATION WITH TENSORBOARD ##
####################################
def write_dict_to_tensorboard(writer, val_dict, base_name, iteration):
for name, val in val_dict.items():
if isinstance(val, dict):
write_dict_to_tensorboard(writer, val, base_name=base_name+"/"+name, iteration=iteration)
elif isinstance(val, (list, np.ndarray)):
continue
elif isinstance(val, (int, float)):
writer.add_scalar(base_name + "/" + name, val, iteration)
else:
if debug_level() == 0:
print("Skipping output \""+str(name) + "\" of value " + str(val) + "(%s)" % (val.__class__.__name__))
###############################
## WRAPPER FOR DATA PARALLEL ##
###############################
class WrappedDataParallel(nn.DataParallel):
def __getattr__(self, name):
try:
return super().__getattr__(name)
except AttributeError:
return getattr(self.module, name)
def get_device():
return torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
def one_hot(x, num_classes, dtype=torch.float32):
if isinstance(x, np.ndarray):
x_onehot = np.zeros(x.shape + (num_classes,), dtype=np.float32)
x_onehot[np.arange(x.shape[0]), x] = 1.0
elif isinstance(x, torch.Tensor):
assert torch.max(x) < num_classes, "[!] ERROR: One-hot input has larger entries (%s) than classes (%i)" % (str(torch.max(x)), num_classes)
x_onehot = x.new_zeros(x.shape + (num_classes,), dtype=dtype)
x_onehot.scatter_(-1, x.unsqueeze(dim=-1), 1)
else:
print("[!] ERROR: Unknown object given for one-hot conversion:", x)
sys.exit(1)
return x_onehot
def _create_length_mask(length, max_len=None, dtype=torch.float32):
if max_len is None:
max_len = length.max()
mask = (torch.arange(max_len, device=length.device).view(1, max_len) < length.unsqueeze(dim=-1)).to(dtype=dtype)
return mask
def create_transformer_mask(length, max_len=None, dtype=torch.float32):
mask = _create_length_mask(length=length, max_len=max_len, dtype=torch.bool)
mask = ~mask # Negating mask, as positions that should be masked, need a True, and others False
# mask = mask.masked_fill(mask == 0, float('-inf')).masked_fill(mask == 1, float(0.0))
return mask
def create_channel_mask(length, max_len=None, dtype=torch.float32):
mask = _create_length_mask(length=length, max_len=max_len, dtype=dtype)
mask = mask.unsqueeze(dim=-1) # Unsqueeze over channels
return mask
def create_T_one_hot(length, dataset_max_len, dtype=torch.float32):
if length is None:
print("Length", length)
print("Dataset max len", dataset_max_len)
max_batch_len = length.max()
assert max_batch_len <= dataset_max_len, "[!] ERROR - T_one_hot: Max batch size (%s) was larger than given dataset max length (%s)" % (str(max_batch_len.item()), str(dataset_max_len))
time_range = torch.arange(max_batch_len, device=length.device).view(1, max_batch_len).expand(length.size(0),-1)
length_onehot_pos = one_hot(x=time_range, num_classes=dataset_max_len, dtype=dtype)
inv_time_range = (length.unsqueeze(dim=-1)-1) - time_range
length_mask = (inv_time_range>=0.0).float()
inv_time_range = inv_time_range.clamp(min=0.0)
length_onehot_neg = one_hot(x=inv_time_range, num_classes=dataset_max_len, dtype=dtype)
length_onehot = torch.cat([length_onehot_pos, length_onehot_neg], dim=-1)
length_onehot = length_onehot * length_mask.unsqueeze(dim=-1)
return length_onehot
| 35.894737
| 184
| 0.726815
|
09cc5dd62c7c8b77d3a708404e071797f9a82def
| 873
|
py
|
Python
|
iqfeed/__init__.py
|
dangerzone/iqfeed
|
296a35c0df97ccd97829425028178d04ba12259b
|
[
"Apache-2.0"
] | 26
|
2016-07-06T07:16:15.000Z
|
2021-02-28T22:34:24.000Z
|
iqfeed/__init__.py
|
dangerzone/iqfeed
|
296a35c0df97ccd97829425028178d04ba12259b
|
[
"Apache-2.0"
] | 2
|
2016-11-07T21:29:05.000Z
|
2020-01-18T05:36:21.000Z
|
iqfeed/__init__.py
|
dangerzone/iqfeed
|
296a35c0df97ccd97829425028178d04ba12259b
|
[
"Apache-2.0"
] | 16
|
2016-07-13T04:13:05.000Z
|
2020-10-03T18:12:22.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from .download import get_bars
from .main import main
__all__ = ['get_bars', 'main']
| 37.956522
| 62
| 0.767468
|
d972fcbaf3cac98c49e5d09857f1ec6ee7b0afd6
| 571
|
py
|
Python
|
Python/PythonCrashCourse2ndEdition/6-9_favoriteplaces.py
|
awakun/LearningPython
|
578f9290c8065df37ade49abe4b0ab4e6b35a1bd
|
[
"MIT"
] | null | null | null |
Python/PythonCrashCourse2ndEdition/6-9_favoriteplaces.py
|
awakun/LearningPython
|
578f9290c8065df37ade49abe4b0ab4e6b35a1bd
|
[
"MIT"
] | null | null | null |
Python/PythonCrashCourse2ndEdition/6-9_favoriteplaces.py
|
awakun/LearningPython
|
578f9290c8065df37ade49abe4b0ab4e6b35a1bd
|
[
"MIT"
] | null | null | null |
favorite_places = {
'Dave': ['Seattle', 'Crystal Mountain'],
'Lizzy': 'Baltimore',
'Taylor': ['Philadelphia', 'Tokyo', 'San Francisco']
}
for name, places in favorite_places.items():
# isinstance checks whether or not an object is of a certain type
# doing this to see if places is a single string
if isinstance(places, str):
print(f"{name} likes {places}!")
else:
# join() joins strings, call it on the delimiter(s) you want with the list as the argument
print(f'{name} likes all these places: {", ".join(places)}!')
| 38.066667
| 98
| 0.642732
|
ea979e89d228da33a6eb1ec60177f6d9a26d57ce
| 1,090
|
py
|
Python
|
01_Data Structures and Algorithms/15_filtering_sequence_elements.py
|
raghukrishnamoorthy/Python-Cookbook
|
d4547c05b8f0d21ebbcc06b3833d226156f993f0
|
[
"MIT"
] | null | null | null |
01_Data Structures and Algorithms/15_filtering_sequence_elements.py
|
raghukrishnamoorthy/Python-Cookbook
|
d4547c05b8f0d21ebbcc06b3833d226156f993f0
|
[
"MIT"
] | null | null | null |
01_Data Structures and Algorithms/15_filtering_sequence_elements.py
|
raghukrishnamoorthy/Python-Cookbook
|
d4547c05b8f0d21ebbcc06b3833d226156f993f0
|
[
"MIT"
] | null | null | null |
# Filter a sequence using some criteria
mylist = [1, 4, -5, 10, -7, 2, 3, -1]
print([n for n in mylist if n < 2])
# If original sequence is large
pos = (n for n in mylist if n < 2)
for x in pos:
print(x)
# Use separate function when filter logic is complicated
values = ['1', '2', '-3', '-', '4', 'N/A', '5']
def is_int(val):
try:
x = int(val)
return True
except ValueError:
return False
ivals = filter(is_int, values)
for item in ivals:
print(item)
myList = list(ivals)
print(myList)
# List Comprehensions
mylist = [1, 4, -5, 10, -7, 2, 3, -1]
import math
[math.sqrt(n) for n in mylist if n > 0]
#if and else in list comprehension
clip_neg = [n if n > 0 else 0 for n in mylist]
addresses = [
'5412 N CLARK',
'5148 N CLARK',
'5800 E 58TH',
'2122 N CLARK'
'5645 N RAVENSWOOD',
'1060 W ADDISON',
'4801 N BROADWAY',
'1039 W GRANVILLE',
]
# Iterable and selector
from itertools import compress
counts = [0, 3, 10, 4, 1, 7, 6, 1]
more5 = [n > 5 for n in counts]
filtered_list = list(compress(addresses, more5))
print(filtered_list)
| 16.515152
| 56
| 0.62844
|
5afb329039665b806dab3851d99eb17c49c9f5fe
| 1,822
|
py
|
Python
|
tutorials/KNIX-General-Tutorial/resize/resize.py
|
JoshTDN03/knix
|
def52081509526529f9d772775ba6848cc022aea
|
[
"Apache-2.0"
] | 167
|
2020-04-20T22:16:29.000Z
|
2022-03-15T22:53:43.000Z
|
tutorials/KNIX-General-Tutorial/resize/resize.py
|
JoshTDN03/knix
|
def52081509526529f9d772775ba6848cc022aea
|
[
"Apache-2.0"
] | 98
|
2020-05-07T03:34:44.000Z
|
2022-01-04T21:30:49.000Z
|
tutorials/KNIX-General-Tutorial/resize/resize.py
|
JoshTDN03/knix
|
def52081509526529f9d772775ba6848cc022aea
|
[
"Apache-2.0"
] | 20
|
2020-04-29T14:45:29.000Z
|
2021-09-26T09:51:04.000Z
|
# Copyright 2020 The KNIX Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json, base64, io
from PIL import Image
# pre-requisite (PIL module should be downloaded in the current folder)
# pip3 install pillow -t .
# OR
# docker run -it --rm -u $(id -u):$(id -g) -v $(pwd):/temp -w /temp python:3.6 pip3 install pillow -t .
# zip -r ../resize.zip .
def handle(event, context):
filename = event['Filename']
print('resize ' + filename)
img = io.BytesIO(base64.b64decode(event['EncodedFile'])) # Read bytes from input
with Image.open(img) as image:
image.thumbnail(tuple(x/2 for x in image.size)) # Resize using PIL
buf = io.BytesIO()
image.save(buf, format=image.format) # Store bytes in a buffer
resized_name = filename+'_resize.jpg'
if context != None:
context.put(resized_name, base64.b64encode(buf.getvalue()).decode()) # Write buffer to KNIX key-value store
print(resized_name + ' written to datalayer')
else:
with open(filename+'_resize.jpg', 'wb') as f:
f.write(buf.getvalue())
print(resized_name + ' written to local filesystem')
event['Resized'] = filename+'_resize.jpg' # Return the name of the resize file
event['EncodedFile'] = ''
return event
| 39.608696
| 114
| 0.67124
|
807d66d239467aaeed76b87a8b02b80fc0db4eca
| 1,578
|
py
|
Python
|
app/modules/notice_content/model.py
|
DsCodeStudio98m0f/DTia-Nugrahab
|
218ef140727849206fe76f42f43a5a231cfda9d9
|
[
"Apache-2.0"
] | 16
|
2020-03-26T13:21:15.000Z
|
2021-07-25T18:49:05.000Z
|
app/modules/notice_content/model.py
|
DsCodeStudio98m0f/DTia-Nugrahab
|
218ef140727849206fe76f42f43a5a231cfda9d9
|
[
"Apache-2.0"
] | 13
|
2020-03-24T18:19:48.000Z
|
2022-03-12T00:18:59.000Z
|
app/modules/notice_content/model.py
|
DsCodeStudio98m0f/DTia-Nugrahab
|
218ef140727849206fe76f42f43a5a231cfda9d9
|
[
"Apache-2.0"
] | 14
|
2020-03-31T01:02:38.000Z
|
2021-09-17T01:10:36.000Z
|
"""
Notice Content database models
PROJECT: BaoAI Backend
AUTHOR: henry <703264459@qq.com>
WEBSITE: http://www.baoai.co
COPYRIGHT: Copyright © 2016-2020 广州源宝网络有限公司 Guangzhou Yuanbao Network Co., Ltd. ( http://www.ybao.org )
LICENSE: Apache-2.0
CREATEDATE: 2019-11-30 02:22:26
"""
from app import db
from sqlalchemy.schema import FetchedValue
from app.common.mixin import *
class Notice_content(TableMixin, db.Model):
"""
Notice Content database model.
"""
__tablename__ = 'notice_content'
title = db.Column(db.String(255), nullable=False, unique=False, index=True)
icon = db.Column(db.String(100), nullable=True, unique=False, index=False, default='fa fa-circle-o')
content = db.Column(db.Text(), nullable=True, unique=False, index=False)
receiver = db.Column(db.Integer(), nullable=True, unique=False, index=False)
module = db.Column(db.String(100), nullable=True, unique=False, index=False)
reference = db.Column(db.String(255), nullable=True, unique=False, index=False)
reference_params = db.Column(db.String(255), nullable=True, unique=False, index=False)
status = db.Column(db.Boolean(), nullable=False, unique=False, index=True, default=True)
weight = db.Column(db.Integer(), nullable=True, unique=False, index=False, default='0')
def __repr__(self):
return (
"<{class_name}("
"id={self.id}, "
"title=\"{self.title}\""
")>".format(
class_name=self.__class__.__name__,
self=self
)
)
| 35.863636
| 104
| 0.65526
|
5dea14955655da3660d9ef8ba7b6e7ff22532ff0
| 598
|
py
|
Python
|
speechclas/data_utils.py
|
deephdc/speech-to-text-tf
|
e4269aad722efafa2cfa3174e335da6ecd36c4e5
|
[
"Apache-2.0"
] | 1
|
2020-02-03T16:55:25.000Z
|
2020-02-03T16:55:25.000Z
|
speechclas/data_utils.py
|
deephdc/speech-to-text-tf
|
e4269aad722efafa2cfa3174e335da6ecd36c4e5
|
[
"Apache-2.0"
] | null | null | null |
speechclas/data_utils.py
|
deephdc/speech-to-text-tf
|
e4269aad722efafa2cfa3174e335da6ecd36c4e5
|
[
"Apache-2.0"
] | 1
|
2019-07-31T12:51:25.000Z
|
2019-07-31T12:51:25.000Z
|
"""
Miscellaneous functions manage data.
Date: September 2018
Author: Ignacio Heredia
Email: iheredia@ifca.unican.es
Github: ignacioheredia
"""
import subprocess
import warnings
def mount_nextcloud(frompath, topath):
"""
Mount a NextCloud folder in your local machine or viceversa.
"""
command = (['rclone', 'copy', frompath, topath])
result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = result.communicate()
if error:
warnings.warn("Error while mounting NextCloud: {}".format(error))
return output, error
| 23
| 86
| 0.714047
|
bcbb3b7498ce310675eca04be0dd27fc40521a23
| 4,305
|
py
|
Python
|
model.py
|
neilsonxia/chineseocr
|
269360773e8100a84bbbcc6f773d2a00a540a691
|
[
"MIT"
] | null | null | null |
model.py
|
neilsonxia/chineseocr
|
269360773e8100a84bbbcc6f773d2a00a540a691
|
[
"MIT"
] | null | null | null |
model.py
|
neilsonxia/chineseocr
|
269360773e8100a84bbbcc6f773d2a00a540a691
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import time
import cv2
import numpy as np
from PIL import Image
from glob import glob
from crnn.crnn import crnnOcr as crnnOcr
from text.detector.detectors import TextDetector
from apphelper.image import get_boxes,letterbox_image
from config import opencvFlag,GPU,IMGSIZE
from text.opencv_dnn_detect import angle_detect##文字方向检测,支持dnn/tensorflow
from apphelper.image import estimate_skew_angle ,rotate_cut_img,xy_rotate_box,sort_box,box_rotate,solve
if opencvFlag=='opencv':
from text import opencv_dnn_detect as detect ##opencv dnn model for darknet
elif opencvFlag=='darknet':
from text import darknet_detect as detect
else:
## keras版本文字检测
from text import keras_detect as detect
def text_detect(img,
MAX_HORIZONTAL_GAP=30,
MIN_V_OVERLAPS=0.6,
MIN_SIZE_SIM=0.6,
TEXT_PROPOSALS_MIN_SCORE=0.7,
TEXT_PROPOSALS_NMS_THRESH=0.3,
TEXT_LINE_NMS_THRESH = 0.3,
MIN_RATIO=1.0,
LINE_MIN_SCORE=0.8,
TEXT_PROPOSALS_WIDTH=5,
MIN_NUM_PROPOSALS=1,
):
boxes, scores = detect.text_detect(np.array(img))
boxes = np.array(boxes,dtype=np.float32)
scores = np.array(scores,dtype=np.float32)
textdetector = TextDetector(MAX_HORIZONTAL_GAP,MIN_V_OVERLAPS,MIN_SIZE_SIM)
shape = img.size[::-1]
boxes = textdetector.detect(boxes,
scores[:, np.newaxis],
shape,
TEXT_PROPOSALS_MIN_SCORE,
TEXT_PROPOSALS_NMS_THRESH,
TEXT_LINE_NMS_THRESH,
MIN_RATIO,
LINE_MIN_SCORE,
TEXT_PROPOSALS_WIDTH,
MIN_NUM_PROPOSALS)
text_recs = get_boxes(boxes)
newBox = []
rx = 1
ry = 1
for box in text_recs:
x1,y1 = (box[0],box[1])
x2,y2 = (box[2],box[3])
x3,y3 = (box[6],box[7])
x4,y4 = (box[4],box[5])
newBox.append([x1*rx,y1*ry,x2*rx,y2*ry,x3*rx,y3*ry,x4*rx,y4*ry])
return newBox
def crnnRec(im,boxes,leftAdjust=False,rightAdjust=False,alph=0.2,f=1.0):
"""
crnn模型,ocr识别
@@model,
@@converter,
@@im:Array
@@text_recs:text box
@@ifIm:是否输出box对应的img
"""
results = []
im = Image.fromarray(im)
for index,box in enumerate(boxes):
degree,w,h,cx,cy = solve(box)
partImg,newW,newH = rotate_cut_img(im,degree,box,w,h,leftAdjust,rightAdjust,alph)
newBox = xy_rotate_box(cx,cy,newW,newH,degree)
partImg_ = partImg.convert('L')
simPred = crnnOcr(partImg_)##识别的文本
if simPred.strip()!=u'':
results.append({'cx':cx*f,'cy':cy*f,'text':simPred,'w':newW*f,'h':newH*f,'degree':degree*180.0/np.pi})
return results
def eval_angle(im,detectAngle=False,ifadjustDegree=True):
"""
估计图片偏移角度
@@param:im
@@param:ifadjustDegree 调整文字识别结果
@@param:detectAngle 是否检测文字朝向
"""
angle = 0
degree=0.0
img = np.array(im)
if detectAngle:
angle = angle_detect(img=np.copy(img))##文字朝向检测
if angle==90:
im = im.transpose(Image.ROTATE_90)
elif angle==180:
im = im.transpose(Image.ROTATE_180)
elif angle==270:
im = im.transpose(Image.ROTATE_270)
img = np.array(im)
if ifadjustDegree:
degree = estimate_skew_angle(np.array(im.convert('L')))
return angle,degree,im.rotate(degree)
def model(img,detectAngle=False,config={},leftAdjust=False,rightAdjust=False,alph=0.2,ifadjustDegree=False):
"""
@@param:img,
@@param:ifadjustDegree 调整文字识别倾斜角度
@@param:detectAngle,是否检测文字朝向
"""
angle,degree,img = eval_angle(img,detectAngle=detectAngle,ifadjustDegree=ifadjustDegree)
if opencvFlag!='keras':
img,f =letterbox_image(img, IMGSIZE)
else:
f=1.0##解决box在原图坐标不一致问题
config['img'] = img
text_recs = text_detect(**config)
newBox = sort_box(text_recs)
result = crnnRec(np.array(img),newBox,leftAdjust,rightAdjust,alph,1.0/f)
return img,result,angle
| 30.531915
| 114
| 0.601626
|
f10e0fc75d7e3b8f9112326e72881f81b392c614
| 14,559
|
py
|
Python
|
homeassistant/components/recorder/__init__.py
|
spacesuitdiver/home-assistant
|
9ce4755f8ae18309dd28910ee7ff519fc90d46f1
|
[
"Apache-2.0"
] | 2
|
2019-02-22T06:44:24.000Z
|
2019-02-22T10:58:16.000Z
|
homeassistant/components/recorder/__init__.py
|
spacesuitdiver/home-assistant
|
9ce4755f8ae18309dd28910ee7ff519fc90d46f1
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/recorder/__init__.py
|
spacesuitdiver/home-assistant
|
9ce4755f8ae18309dd28910ee7ff519fc90d46f1
|
[
"Apache-2.0"
] | 1
|
2019-02-07T11:50:04.000Z
|
2019-02-07T11:50:04.000Z
|
"""
Support for recording details.
Component that records all events and state changes. Allows other components
to query this database.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/recorder/
"""
import asyncio
from collections import namedtuple
import concurrent.futures
from datetime import datetime, timedelta
import logging
import queue
import threading
import time
from typing import Any, Dict, Optional # noqa: F401
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID, CONF_DOMAINS, CONF_ENTITIES, CONF_EXCLUDE, CONF_INCLUDE,
EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, EVENT_STATE_CHANGED,
EVENT_TIME_CHANGED, MATCH_ALL)
from homeassistant.core import CoreState, HomeAssistant, callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entityfilter import generate_filter
from homeassistant.helpers.typing import ConfigType
import homeassistant.util.dt as dt_util
from homeassistant.loader import bind_hass
from . import migration, purge
from .const import DATA_INSTANCE
from .util import session_scope
REQUIREMENTS = ['sqlalchemy==1.2.5']
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'recorder'
SERVICE_PURGE = 'purge'
ATTR_KEEP_DAYS = 'keep_days'
ATTR_REPACK = 'repack'
SERVICE_PURGE_SCHEMA = vol.Schema({
vol.Optional(ATTR_KEEP_DAYS):
vol.All(vol.Coerce(int), vol.Range(min=0)),
vol.Optional(ATTR_REPACK, default=False): cv.boolean
})
DEFAULT_URL = 'sqlite:///{hass_config_path}'
DEFAULT_DB_FILE = 'home-assistant_v2.db'
CONF_DB_URL = 'db_url'
CONF_PURGE_KEEP_DAYS = 'purge_keep_days'
CONF_PURGE_INTERVAL = 'purge_interval'
CONF_EVENT_TYPES = 'event_types'
CONNECT_RETRY_WAIT = 3
FILTER_SCHEMA = vol.Schema({
vol.Optional(CONF_EXCLUDE, default={}): vol.Schema({
vol.Optional(CONF_DOMAINS): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_ENTITIES): cv.entity_ids,
vol.Optional(CONF_EVENT_TYPES): vol.All(cv.ensure_list, [cv.string]),
}),
vol.Optional(CONF_INCLUDE, default={}): vol.Schema({
vol.Optional(CONF_DOMAINS): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_ENTITIES): cv.entity_ids,
})
})
CONFIG_SCHEMA = vol.Schema({
DOMAIN: FILTER_SCHEMA.extend({
vol.Optional(CONF_PURGE_KEEP_DAYS, default=10):
vol.All(vol.Coerce(int), vol.Range(min=1)),
vol.Optional(CONF_PURGE_INTERVAL, default=1):
vol.All(vol.Coerce(int), vol.Range(min=0)),
vol.Optional(CONF_DB_URL): cv.string,
})
}, extra=vol.ALLOW_EXTRA)
@bind_hass
async def wait_connection_ready(hass):
"""Wait till the connection is ready."""
return await hass.data[DATA_INSTANCE].async_db_ready
def run_information(hass, point_in_time: Optional[datetime] = None):
"""Return information about current run.
There is also the run that covers point_in_time.
"""
from . import models
ins = hass.data[DATA_INSTANCE]
recorder_runs = models.RecorderRuns
if point_in_time is None or point_in_time > ins.recording_start:
return ins.run_info
with session_scope(hass=hass) as session:
res = session.query(recorder_runs).filter(
(recorder_runs.start < point_in_time) &
(recorder_runs.end > point_in_time)).first()
if res:
session.expunge(res)
return res
@asyncio.coroutine
def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the recorder."""
conf = config.get(DOMAIN, {})
keep_days = conf.get(CONF_PURGE_KEEP_DAYS)
purge_interval = conf.get(CONF_PURGE_INTERVAL)
db_url = conf.get(CONF_DB_URL, None)
if not db_url:
db_url = DEFAULT_URL.format(
hass_config_path=hass.config.path(DEFAULT_DB_FILE))
include = conf.get(CONF_INCLUDE, {})
exclude = conf.get(CONF_EXCLUDE, {})
instance = hass.data[DATA_INSTANCE] = Recorder(
hass=hass, keep_days=keep_days, purge_interval=purge_interval,
uri=db_url, include=include, exclude=exclude)
instance.async_initialize()
instance.start()
@asyncio.coroutine
def async_handle_purge_service(service):
"""Handle calls to the purge service."""
instance.do_adhoc_purge(**service.data)
hass.services.async_register(
DOMAIN, SERVICE_PURGE, async_handle_purge_service,
schema=SERVICE_PURGE_SCHEMA)
return (yield from instance.async_db_ready)
PurgeTask = namedtuple('PurgeTask', ['keep_days', 'repack'])
class Recorder(threading.Thread):
"""A threaded recorder class."""
def __init__(self, hass: HomeAssistant, keep_days: int,
purge_interval: int, uri: str,
include: Dict, exclude: Dict) -> None:
"""Initialize the recorder."""
threading.Thread.__init__(self, name='Recorder')
self.hass = hass
self.keep_days = keep_days
self.purge_interval = purge_interval
self.queue = queue.Queue() # type: Any
self.recording_start = dt_util.utcnow()
self.db_url = uri
self.async_db_ready = asyncio.Future(loop=hass.loop)
self.engine = None # type: Any
self.run_info = None # type: Any
self.entity_filter = generate_filter(
include.get(CONF_DOMAINS, []), include.get(CONF_ENTITIES, []),
exclude.get(CONF_DOMAINS, []), exclude.get(CONF_ENTITIES, []))
self.exclude_t = exclude.get(CONF_EVENT_TYPES, [])
self.get_session = None
@callback
def async_initialize(self):
"""Initialize the recorder."""
self.hass.bus.async_listen(MATCH_ALL, self.event_listener)
def do_adhoc_purge(self, **kwargs):
"""Trigger an adhoc purge retaining keep_days worth of data."""
keep_days = kwargs.get(ATTR_KEEP_DAYS, self.keep_days)
repack = kwargs.get(ATTR_REPACK)
self.queue.put(PurgeTask(keep_days, repack))
def run(self):
"""Start processing events to save."""
from .models import States, Events
from homeassistant.components import persistent_notification
from sqlalchemy import exc
tries = 1
connected = False
while not connected and tries <= 10:
if tries != 1:
time.sleep(CONNECT_RETRY_WAIT)
try:
self._setup_connection()
migration.migrate_schema(self)
self._setup_run()
connected = True
_LOGGER.debug("Connected to recorder database")
except Exception as err: # pylint: disable=broad-except
_LOGGER.error("Error during connection setup: %s (retrying "
"in %s seconds)", err, CONNECT_RETRY_WAIT)
tries += 1
if not connected:
@callback
def connection_failed():
"""Connect failed tasks."""
self.async_db_ready.set_result(False)
persistent_notification.async_create(
self.hass,
"The recorder could not start, please check the log",
"Recorder")
self.hass.add_job(connection_failed)
return
shutdown_task = object()
hass_started = concurrent.futures.Future()
@callback
def register():
"""Post connection initialize."""
self.async_db_ready.set_result(True)
def shutdown(event):
"""Shut down the Recorder."""
if not hass_started.done():
hass_started.set_result(shutdown_task)
self.queue.put(None)
self.join()
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown)
if self.hass.state == CoreState.running:
hass_started.set_result(None)
else:
@callback
def notify_hass_started(event):
"""Notify that hass has started."""
hass_started.set_result(None)
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_START, notify_hass_started)
self.hass.add_job(register)
result = hass_started.result()
# If shutdown happened before Home Assistant finished starting
if result is shutdown_task:
return
# Start periodic purge
if self.keep_days and self.purge_interval:
@callback
def async_purge(now):
"""Trigger the purge and schedule the next run."""
self.queue.put(
PurgeTask(self.keep_days, repack=False))
self.hass.helpers.event.async_track_point_in_time(
async_purge, now + timedelta(days=self.purge_interval))
earliest = dt_util.utcnow() + timedelta(minutes=30)
run = latest = dt_util.utcnow() + \
timedelta(days=self.purge_interval)
with session_scope(session=self.get_session()) as session:
event = session.query(Events).first()
if event is not None:
session.expunge(event)
run = dt_util.as_utc(event.time_fired) + timedelta(
days=self.keep_days+self.purge_interval)
run = min(latest, max(run, earliest))
self.hass.helpers.event.track_point_in_time(async_purge, run)
while True:
event = self.queue.get()
if event is None:
self._close_run()
self._close_connection()
self.queue.task_done()
return
elif isinstance(event, PurgeTask):
purge.purge_old_data(self, event.keep_days, event.repack)
self.queue.task_done()
continue
elif event.event_type == EVENT_TIME_CHANGED:
self.queue.task_done()
continue
elif event.event_type in self.exclude_t:
self.queue.task_done()
continue
entity_id = event.data.get(ATTR_ENTITY_ID)
if entity_id is not None:
if not self.entity_filter(entity_id):
self.queue.task_done()
continue
tries = 1
updated = False
while not updated and tries <= 10:
if tries != 1:
time.sleep(CONNECT_RETRY_WAIT)
try:
with session_scope(session=self.get_session()) as session:
dbevent = Events.from_event(event)
session.add(dbevent)
session.flush()
if event.event_type == EVENT_STATE_CHANGED:
dbstate = States.from_event(event)
dbstate.event_id = dbevent.event_id
session.add(dbstate)
updated = True
except exc.OperationalError as err:
_LOGGER.error("Error in database connectivity: %s. "
"(retrying in %s seconds)", err,
CONNECT_RETRY_WAIT)
tries += 1
if not updated:
_LOGGER.error("Error in database update. Could not save "
"after %d tries. Giving up", tries)
self.queue.task_done()
@callback
def event_listener(self, event):
"""Listen for new events and put them in the process queue."""
self.queue.put(event)
def block_till_done(self):
"""Block till all events processed."""
self.queue.join()
def _setup_connection(self):
"""Ensure database is ready to fly."""
from sqlalchemy import create_engine, event
from sqlalchemy.engine import Engine
from sqlalchemy.orm import scoped_session
from sqlalchemy.orm import sessionmaker
from sqlite3 import Connection
from . import models
kwargs = {}
# pylint: disable=unused-variable
@event.listens_for(Engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
"""Set sqlite's WAL mode."""
if isinstance(dbapi_connection, Connection):
old_isolation = dbapi_connection.isolation_level
dbapi_connection.isolation_level = None
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA journal_mode=WAL")
cursor.close()
dbapi_connection.isolation_level = old_isolation
if self.db_url == 'sqlite://' or ':memory:' in self.db_url:
from sqlalchemy.pool import StaticPool
kwargs['connect_args'] = {'check_same_thread': False}
kwargs['poolclass'] = StaticPool
kwargs['pool_reset_on_return'] = None
else:
kwargs['echo'] = False
if self.engine is not None:
self.engine.dispose()
self.engine = create_engine(self.db_url, **kwargs)
models.Base.metadata.create_all(self.engine)
self.get_session = scoped_session(sessionmaker(bind=self.engine))
def _close_connection(self):
"""Close the connection."""
self.engine.dispose()
self.engine = None
self.get_session = None
def _setup_run(self):
"""Log the start of the current run."""
from .models import RecorderRuns
with session_scope(session=self.get_session()) as session:
for run in session.query(RecorderRuns).filter_by(end=None):
run.closed_incorrect = True
run.end = self.recording_start
_LOGGER.warning("Ended unfinished session (id=%s from %s)",
run.run_id, run.start)
session.add(run)
self.run_info = RecorderRuns(
start=self.recording_start,
created=dt_util.utcnow()
)
session.add(self.run_info)
session.flush()
session.expunge(self.run_info)
def _close_run(self):
"""Save end time for current run."""
with session_scope(session=self.get_session()) as session:
self.run_info.end = dt_util.utcnow()
session.add(self.run_info)
self.run_info = None
| 34.913669
| 79
| 0.611168
|
f01098d0ef900733c6c7fa7b4d2f54b2946d03a5
| 1,227
|
py
|
Python
|
stubs/micropython-v1_18-esp32/onewire.py
|
open-lv/air-pilot
|
6e5a5f1abf4cd230812665ea0353425a439bbbd6
|
[
"MIT"
] | 5
|
2021-09-23T14:10:06.000Z
|
2022-02-14T12:36:23.000Z
|
stubs/micropython-v1_18-esp32/onewire.py
|
open-lv/air-pilot
|
6e5a5f1abf4cd230812665ea0353425a439bbbd6
|
[
"MIT"
] | 25
|
2021-09-23T12:08:55.000Z
|
2022-03-20T14:51:03.000Z
|
stubs/micropython-v1_18-esp32/onewire.py
|
open-lv/air-pilot
|
6e5a5f1abf4cd230812665ea0353425a439bbbd6
|
[
"MIT"
] | 2
|
2021-09-23T11:40:26.000Z
|
2021-12-20T21:01:03.000Z
|
"""
Module: 'onewire' on micropython-v1.18-esp32
"""
# MCU: {'ver': 'v1.18', 'port': 'esp32', 'arch': 'xtensawin', 'sysname': 'esp32', 'release': '1.18.0', 'name': 'micropython', 'mpy': 10757, 'version': '1.18.0', 'machine': 'ESP32 module (spiram) with ESP32', 'build': '', 'nodename': 'esp32', 'platform': 'esp32', 'family': 'micropython'}
# Stubber: 1.5.4
from typing import Any
class OneWireError(Exception):
""""""
class OneWire:
""""""
def __init__(self, *argv, **kwargs) -> None:
""""""
...
def readinto(self, *args, **kwargs) -> Any:
...
def write(self, *args, **kwargs) -> Any:
...
def crc8(self, *args, **kwargs) -> Any:
...
def readbit(self, *args, **kwargs) -> Any:
...
def readbyte(self, *args, **kwargs) -> Any:
...
def reset(self, *args, **kwargs) -> Any:
...
def scan(self, *args, **kwargs) -> Any:
...
def writebit(self, *args, **kwargs) -> Any:
...
def writebyte(self, *args, **kwargs) -> Any:
...
SEARCH_ROM = 240 # type: int
MATCH_ROM = 85 # type: int
SKIP_ROM = 204 # type: int
def select_rom(self, *args, **kwargs) -> Any:
...
| 23.150943
| 287
| 0.509372
|
1f23ddc532e0a1b8394c1e3bd9927a92d06cba5f
| 40,116
|
py
|
Python
|
federatedml/tree/hetero/hetero_decision_tree_guest.py
|
chenwanqq/FATE
|
d7aafe6ce5faba0e70ba6bffc2d1fcfe22ba1329
|
[
"Apache-2.0"
] | 1
|
2021-06-14T02:13:50.000Z
|
2021-06-14T02:13:50.000Z
|
federatedml/tree/hetero/hetero_decision_tree_guest.py
|
chenwanqq/FATE
|
d7aafe6ce5faba0e70ba6bffc2d1fcfe22ba1329
|
[
"Apache-2.0"
] | 9
|
2020-11-13T18:59:35.000Z
|
2022-02-10T02:13:58.000Z
|
federatedml/tree/hetero/hetero_decision_tree_guest.py
|
UnreliableBuilder/Fate
|
10fca2e0aad8d29efa5302be77841d2f155b3185
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
#
#
################################################################################
# =============================================================================
# HeteroDecisionTreeGuest
# =============================================================================
import copy
import functools
from arch.api import session
from arch.api.utils import log_utils
from federatedml.feature.fate_element_type import NoneType
from federatedml.protobuf.generated.boosting_tree_model_meta_pb2 import CriterionMeta
from federatedml.protobuf.generated.boosting_tree_model_meta_pb2 import DecisionTreeModelMeta
from federatedml.protobuf.generated.boosting_tree_model_param_pb2 import DecisionTreeModelParam
from federatedml.transfer_variable.transfer_class.hetero_decision_tree_transfer_variable import \
HeteroDecisionTreeTransferVariable
from federatedml.tree import DecisionTree
from federatedml.tree import FeatureHistogram
from federatedml.tree import Node
from federatedml.tree import Splitter
from federatedml.util import consts
from federatedml.util.io_check import assert_io_num_rows_equal
LOGGER = log_utils.getLogger()
class HeteroDecisionTreeGuest(DecisionTree):
def __init__(self, tree_param):
LOGGER.info("hetero decision tree guest init!")
super(HeteroDecisionTreeGuest, self).__init__(tree_param)
self.splitter = Splitter(self.criterion_method, self.criterion_params, self.min_impurity_split,
self.min_sample_split, self.min_leaf_node)
self.data_bin = None
self.grad_and_hess = None
self.bin_split_points = None
self.bin_sparse_points = None
self.data_bin_with_node_dispatch = None
self.node_dispatch = None
self.infos = None
self.valid_features = None
self.encrypter = None
self.encrypted_mode_calculator = None
self.best_splitinfo_guest = None
self.tree_node_queue = None
self.cur_split_nodes = None
self.tree_ = []
self.tree_node_num = 0
self.split_maskdict = {}
self.missing_dir_maskdict = {}
self.transfer_inst = HeteroDecisionTreeTransferVariable()
self.predict_weights = None
self.host_party_idlist = []
self.runtime_idx = 0
self.sitename = consts.GUEST
self.feature_importances_ = {}
def set_flowid(self, flowid=0):
LOGGER.info("set flowid, flowid is {}".format(flowid))
self.transfer_inst.set_flowid(flowid)
def set_host_party_idlist(self, host_party_idlist):
self.host_party_idlist = host_party_idlist
def set_inputinfo(self, data_bin=None, grad_and_hess=None, bin_split_points=None, bin_sparse_points=None):
LOGGER.info("set input info")
self.data_bin = data_bin
self.grad_and_hess = grad_and_hess
self.bin_split_points = bin_split_points
self.bin_sparse_points = bin_sparse_points
def set_encrypter(self, encrypter):
LOGGER.info("set encrypter")
self.encrypter = encrypter
def set_encrypted_mode_calculator(self, encrypted_mode_calculator):
self.encrypted_mode_calculator = encrypted_mode_calculator
def encrypt(self, val):
return self.encrypter.encrypt(val)
def decrypt(self, val):
return self.encrypter.decrypt(val)
def encode(self, etype="feature_idx", val=None, nid=None):
if etype == "feature_idx":
return val
if etype == "feature_val":
self.split_maskdict[nid] = val
return None
if etype == "missing_dir":
self.missing_dir_maskdict[nid] = val
return None
raise TypeError("encode type %s is not support!" % (str(etype)))
@staticmethod
def decode(dtype="feature_idx", val=None, nid=None, split_maskdict=None, missing_dir_maskdict=None):
if dtype == "feature_idx":
return val
if dtype == "feature_val":
if nid in split_maskdict:
return split_maskdict[nid]
else:
raise ValueError("decode val %s cause error, can't reconize it!" % (str(val)))
if dtype == "missing_dir":
if nid in missing_dir_maskdict:
return missing_dir_maskdict[nid]
else:
raise ValueError("decode val %s cause error, can't reconize it!" % (str(val)))
return TypeError("decode type %s is not support!" % (str(dtype)))
def set_valid_features(self, valid_features=None):
LOGGER.info("set valid features")
self.valid_features = valid_features
def sync_encrypted_grad_and_hess(self):
LOGGER.info("send encrypted grad and hess to host")
encrypted_grad_and_hess = self.encrypt_grad_and_hess()
# LOGGER.debug("encrypted_grad_and_hess is {}".format(list(encrypted_grad_and_hess.collect())))
self.transfer_inst.encrypted_grad_and_hess.remote(encrypted_grad_and_hess,
role=consts.HOST,
idx=-1)
"""
federation.remote(obj=encrypted_grad_and_hess,
name=self.transfer_inst.encrypted_grad_and_hess.name,
tag=self.transfer_inst.generate_transferid(self.transfer_inst.encrypted_grad_and_hess),
role=consts.HOST,
idx=-1)
"""
def encrypt_grad_and_hess(self):
LOGGER.info("start to encrypt grad and hess")
encrypted_grad_and_hess = self.encrypted_mode_calculator.encrypt(self.grad_and_hess)
return encrypted_grad_and_hess
def get_grad_hess_sum(self, grad_and_hess_table):
LOGGER.info("calculate the sum of grad and hess")
grad, hess = grad_and_hess_table.reduce(
lambda value1, value2: (value1[0] + value2[0], value1[1] + value2[1]))
return grad, hess
def dispatch_all_node_to_root(self, root_id=0):
LOGGER.info("dispatch all node to root")
self.node_dispatch = self.data_bin.mapValues(lambda data_inst: (1, root_id))
def get_histograms(self, node_map={}):
LOGGER.info("start to get node histograms")
acc_histograms = FeatureHistogram.calculate_histogram(
self.data_bin_with_node_dispatch, self.grad_and_hess,
self.bin_split_points, self.bin_sparse_points,
self.valid_features, node_map,
self.use_missing, self.zero_as_missing)
return acc_histograms
def sync_tree_node_queue(self, tree_node_queue, dep=-1):
LOGGER.info("send tree node queue of depth {}".format(dep))
mask_tree_node_queue = copy.deepcopy(tree_node_queue)
for i in range(len(mask_tree_node_queue)):
mask_tree_node_queue[i] = Node(id=mask_tree_node_queue[i].id)
self.transfer_inst.tree_node_queue.remote(mask_tree_node_queue,
role=consts.HOST,
idx=-1,
suffix=(dep,))
"""
federation.remote(obj=mask_tree_node_queue,
name=self.transfer_inst.tree_node_queue.name,
tag=self.transfer_inst.generate_transferid(self.transfer_inst.tree_node_queue, dep),
role=consts.HOST,
idx=-1)
"""
def sync_node_positions(self, dep):
LOGGER.info("send node positions of depth {}".format(dep))
self.transfer_inst.node_positions.remote(self.node_dispatch,
role=consts.HOST,
idx=-1,
suffix=(dep,))
"""
federation.remote(obj=self.node_dispatch,
name=self.transfer_inst.node_positions.name,
tag=self.transfer_inst.generate_transferid(self.transfer_inst.node_positions, dep),
role=consts.HOST,
idx=-1)
"""
def sync_encrypted_splitinfo_host(self, dep=-1, batch=-1):
LOGGER.info("get encrypted splitinfo of depth {}, batch {}".format(dep, batch))
encrypted_splitinfo_host = self.transfer_inst.encrypted_splitinfo_host.get(idx=-1,
suffix=(dep, batch,))
ret = []
for obj in encrypted_splitinfo_host:
ret.append(obj.get_data())
"""
encrypted_splitinfo_host = federation.get(name=self.transfer_inst.encrypted_splitinfo_host.name,
tag=self.transfer_inst.generate_transferid(
self.transfer_inst.encrypted_splitinfo_host, dep, batch),
idx=-1)
"""
return ret
def sync_federated_best_splitinfo_host(self, federated_best_splitinfo_host, dep=-1, batch=-1, idx=-1):
LOGGER.info("send federated best splitinfo of depth {}, batch {}".format(dep, batch))
self.transfer_inst.federated_best_splitinfo_host.remote(federated_best_splitinfo_host,
role=consts.HOST,
idx=idx,
suffix=(dep, batch,))
"""
federation.remote(obj=federated_best_splitinfo_host,
name=self.transfer_inst.federated_best_splitinfo_host.name,
tag=self.transfer_inst.generate_transferid(self.transfer_inst.federated_best_splitinfo_host,
dep,
batch),
role=consts.HOST,
idx=idx)
"""
def find_host_split(self, value):
cur_split_node, encrypted_splitinfo_host = value
sum_grad = cur_split_node.sum_grad
sum_hess = cur_split_node.sum_hess
best_gain = self.min_impurity_split - consts.FLOAT_ZERO
best_idx = -1
for i in range(len(encrypted_splitinfo_host)):
sum_grad_l, sum_hess_l = encrypted_splitinfo_host[i]
sum_grad_l = self.decrypt(sum_grad_l)
sum_hess_l = self.decrypt(sum_hess_l)
sum_grad_r = sum_grad - sum_grad_l
sum_hess_r = sum_hess - sum_hess_l
gain = self.splitter.split_gain(sum_grad, sum_hess, sum_grad_l,
sum_hess_l, sum_grad_r, sum_hess_r)
if gain > self.min_impurity_split and gain > best_gain:
best_gain = gain
best_idx = i
encrypted_best_gain = self.encrypt(best_gain)
return best_idx, encrypted_best_gain, best_gain
def federated_find_split(self, dep=-1, batch=-1):
LOGGER.info("federated find split of depth {}, batch {}".format(dep, batch))
encrypted_splitinfo_host = self.sync_encrypted_splitinfo_host(dep, batch)
for i in range(len(encrypted_splitinfo_host)):
init_gain = self.min_impurity_split - consts.FLOAT_ZERO
encrypted_init_gain = self.encrypter.encrypt(init_gain)
best_splitinfo_host = [[-1, encrypted_init_gain] for j in range(len(self.cur_split_nodes))]
best_gains = [init_gain for j in range(len(self.cur_split_nodes))]
max_nodes = max(len(encrypted_splitinfo_host[i][j]) for j in range(len(self.cur_split_nodes)))
for k in range(0, max_nodes, consts.MAX_FEDERATED_NODES):
batch_splitinfo_host = [encrypted_splitinfo[k: k + consts.MAX_FEDERATED_NODES] for encrypted_splitinfo
in encrypted_splitinfo_host[i]]
encrypted_splitinfo_host_table = session.parallelize(zip(self.cur_split_nodes, batch_splitinfo_host),
include_key=False,
partition=self.data_bin._partitions)
splitinfos = encrypted_splitinfo_host_table.mapValues(self.find_host_split).collect()
for _, splitinfo in splitinfos:
if best_splitinfo_host[_][0] == -1:
best_splitinfo_host[_] = list(splitinfo[:2])
best_gains[_] = splitinfo[2]
elif splitinfo[0] != -1 and splitinfo[2] > best_gains[_]:
best_splitinfo_host[_][0] = k + splitinfo[0]
best_splitinfo_host[_][1] = splitinfo[1]
best_gains[_] = splitinfo[2]
self.sync_federated_best_splitinfo_host(best_splitinfo_host, dep, batch, i)
def sync_final_split_host(self, dep=-1, batch=-1):
LOGGER.info("get host final splitinfo of depth {}, batch {}".format(dep, batch))
final_splitinfo_host = self.transfer_inst.final_splitinfo_host.get(idx=-1,
suffix=(dep, batch,))
"""
final_splitinfo_host = federation.get(name=self.transfer_inst.final_splitinfo_host.name,
tag=self.transfer_inst.generate_transferid(
self.transfer_inst.final_splitinfo_host, dep, batch),
idx=-1)
"""
return final_splitinfo_host
def find_best_split_guest_and_host(self, splitinfo_guest_host):
best_gain_host = self.decrypt(splitinfo_guest_host[1].gain)
best_gain_host_idx = 1
for i in range(1, len(splitinfo_guest_host)):
gain_host_i = self.decrypt(splitinfo_guest_host[i].gain)
if best_gain_host < gain_host_i:
best_gain_host = gain_host_i
best_gain_host_idx = i
if splitinfo_guest_host[0].gain >= best_gain_host - consts.FLOAT_ZERO:
best_splitinfo = splitinfo_guest_host[0]
else:
best_splitinfo = splitinfo_guest_host[best_gain_host_idx]
best_splitinfo.sum_grad = self.decrypt(best_splitinfo.sum_grad)
best_splitinfo.sum_hess = self.decrypt(best_splitinfo.sum_hess)
best_splitinfo.gain = best_gain_host
return best_splitinfo
def merge_splitinfo(self, splitinfo_guest, splitinfo_host):
LOGGER.info("merge splitinfo")
merge_infos = []
for i in range(len(splitinfo_guest)):
splitinfo = [splitinfo_guest[i]]
for j in range(len(splitinfo_host)):
splitinfo.append(splitinfo_host[j][i])
merge_infos.append(splitinfo)
splitinfo_guest_host_table = session.parallelize(merge_infos,
include_key=False,
partition=self.data_bin._partitions)
best_splitinfo_table = splitinfo_guest_host_table.mapValues(self.find_best_split_guest_and_host)
best_splitinfos = [None for i in range(len(merge_infos))]
for _, best_splitinfo in best_splitinfo_table.collect():
best_splitinfos[_] = best_splitinfo
# best_splitinfos = [best_splitinfo[1] for best_splitinfo in best_splitinfo_table.collect()]
return best_splitinfos
def update_feature_importance(self, splitinfo):
if self.feature_importance_type == "split":
inc = 1
elif self.feature_importance_type == "gain":
inc = splitinfo.gain
else:
raise ValueError("feature importance type {} not support yet".format(self.feature_importance_type))
sitename = splitinfo.sitename
fid = splitinfo.best_fid
if (sitename, fid) not in self.feature_importances_:
self.feature_importances_[(sitename, fid)] = 0
self.feature_importances_[(sitename, fid)] += inc
def update_tree_node_queue(self, splitinfos, max_depth_reach):
LOGGER.info("update tree node, splitlist length is {}, tree node queue size is {}".format(
len(splitinfos), len(self.tree_node_queue)))
new_tree_node_queue = []
for i in range(len(self.tree_node_queue)):
sum_grad = self.tree_node_queue[i].sum_grad
sum_hess = self.tree_node_queue[i].sum_hess
if max_depth_reach or splitinfos[i].gain <= \
self.min_impurity_split + consts.FLOAT_ZERO:
self.tree_node_queue[i].is_leaf = True
else:
self.tree_node_queue[i].left_nodeid = self.tree_node_num + 1
self.tree_node_queue[i].right_nodeid = self.tree_node_num + 2
self.tree_node_num += 2
left_node = Node(id=self.tree_node_queue[i].left_nodeid,
sitename=self.sitename,
sum_grad=splitinfos[i].sum_grad,
sum_hess=splitinfos[i].sum_hess,
weight=self.splitter.node_weight(splitinfos[i].sum_grad, splitinfos[i].sum_hess))
right_node = Node(id=self.tree_node_queue[i].right_nodeid,
sitename=self.sitename,
sum_grad=sum_grad - splitinfos[i].sum_grad,
sum_hess=sum_hess - splitinfos[i].sum_hess,
weight=self.splitter.node_weight( \
sum_grad - splitinfos[i].sum_grad,
sum_hess - splitinfos[i].sum_hess))
new_tree_node_queue.append(left_node)
new_tree_node_queue.append(right_node)
self.tree_node_queue[i].sitename = splitinfos[i].sitename
if self.tree_node_queue[i].sitename == self.sitename:
self.tree_node_queue[i].fid = self.encode("feature_idx", splitinfos[i].best_fid)
self.tree_node_queue[i].bid = self.encode("feature_val", splitinfos[i].best_bid,
self.tree_node_queue[i].id)
self.tree_node_queue[i].missing_dir = self.encode("missing_dir",
splitinfos[i].missing_dir,
self.tree_node_queue[i].id)
else:
self.tree_node_queue[i].fid = splitinfos[i].best_fid
self.tree_node_queue[i].bid = splitinfos[i].best_bid
self.update_feature_importance(splitinfos[i])
self.tree_.append(self.tree_node_queue[i])
self.tree_node_queue = new_tree_node_queue
@staticmethod
def dispatch_node(value, tree_=None, decoder=None, sitename=consts.GUEST,
split_maskdict=None, bin_sparse_points=None,
use_missing=False, zero_as_missing=False,
missing_dir_maskdict=None):
unleaf_state, nodeid = value[1]
if tree_[nodeid].is_leaf is True:
return tree_[nodeid].weight
else:
if tree_[nodeid].sitename == sitename:
fid = decoder("feature_idx", tree_[nodeid].fid, split_maskdict=split_maskdict)
bid = decoder("feature_val", tree_[nodeid].bid, nodeid, split_maskdict=split_maskdict)
if not use_missing:
if value[0].features.get_data(fid, bin_sparse_points[fid]) <= bid:
return 1, tree_[nodeid].left_nodeid
else:
return 1, tree_[nodeid].right_nodeid
else:
missing_dir = decoder("missing_dir", tree_[nodeid].missing_dir, nodeid,
missing_dir_maskdict=missing_dir_maskdict)
missing_val = False
if zero_as_missing:
if value[0].features.get_data(fid, None) is None or \
value[0].features.get_data(fid) == NoneType():
missing_val = True
elif use_missing and value[0].features.get_data(fid) == NoneType():
missing_val = True
if missing_val:
if missing_dir == 1:
return 1, tree_[nodeid].right_nodeid
else:
return 1, tree_[nodeid].left_nodeid
else:
LOGGER.debug("fid is {}, bid is {}, sitename is {}".format(fid, bid, sitename))
if value[0].features.get_data(fid, bin_sparse_points[fid]) <= bid:
return 1, tree_[nodeid].left_nodeid
else:
return 1, tree_[nodeid].right_nodeid
else:
return (1, tree_[nodeid].fid, tree_[nodeid].bid, tree_[nodeid].sitename,
nodeid, tree_[nodeid].left_nodeid, tree_[nodeid].right_nodeid)
def sync_dispatch_node_host(self, dispatch_guest_data, dep=-1):
LOGGER.info("send node to host to dispath, depth is {}".format(dep))
self.transfer_inst.dispatch_node_host.remote(dispatch_guest_data,
role=consts.HOST,
idx=-1,
suffix=(dep,))
"""
federation.remote(obj=dispatch_guest_data,
name=self.transfer_inst.dispatch_node_host.name,
tag=self.transfer_inst.generate_transferid(self.transfer_inst.dispatch_node_host, dep),
role=consts.HOST,
idx=-1)
"""
def sync_dispatch_node_host_result(self, dep=-1):
LOGGER.info("get host dispatch result, depth is {}".format(dep))
dispatch_node_host_result = self.transfer_inst.dispatch_node_host_result.get(idx=-1,
suffix=(dep,))
"""
dispatch_node_host_result = federation.get(name=self.transfer_inst.dispatch_node_host_result.name,
tag=self.transfer_inst.generate_transferid(
self.transfer_inst.dispatch_node_host_result, dep),
idx=-1)
"""
return dispatch_node_host_result
def redispatch_node(self, dep=-1, max_depth_reach=False):
LOGGER.info("redispatch node of depth {}".format(dep))
dispatch_node_method = functools.partial(self.dispatch_node,
tree_=self.tree_,
decoder=self.decode,
sitename=self.sitename,
split_maskdict=self.split_maskdict,
bin_sparse_points=self.bin_sparse_points,
use_missing=self.use_missing,
zero_as_missing=self.zero_as_missing,
missing_dir_maskdict=self.missing_dir_maskdict)
dispatch_guest_result = self.data_bin_with_node_dispatch.mapValues(dispatch_node_method)
tree_node_num = self.tree_node_num
LOGGER.info("remask dispatch node result of depth {}".format(dep))
dispatch_to_host_result = dispatch_guest_result.filter(
lambda key, value: isinstance(value, tuple) and len(value) > 2)
dispatch_guest_result = dispatch_guest_result.subtractByKey(dispatch_to_host_result)
leaf = dispatch_guest_result.filter(lambda key, value: isinstance(value, tuple) is False)
if self.predict_weights is None:
self.predict_weights = leaf
else:
self.predict_weights = self.predict_weights.union(leaf)
if max_depth_reach:
return
dispatch_guest_result = dispatch_guest_result.subtractByKey(leaf)
self.sync_dispatch_node_host(dispatch_to_host_result, dep)
dispatch_node_host_result = self.sync_dispatch_node_host_result(dep)
self.node_dispatch = None
for idx in range(len(dispatch_node_host_result)):
if self.node_dispatch is None:
self.node_dispatch = dispatch_node_host_result[idx]
else:
self.node_dispatch = self.node_dispatch.join(dispatch_node_host_result[idx], \
lambda unleaf_state_nodeid1, unleaf_state_nodeid2: \
unleaf_state_nodeid1 if len(
unleaf_state_nodeid1) == 2 else unleaf_state_nodeid2)
self.node_dispatch = self.node_dispatch.union(dispatch_guest_result)
def sync_tree(self):
LOGGER.info("sync tree to host")
self.transfer_inst.tree.remote(self.tree_,
role=consts.HOST,
idx=-1)
"""
federation.remote(obj=self.tree_,
name=self.transfer_inst.tree.name,
tag=self.transfer_inst.generate_transferid(self.transfer_inst.tree),
role=consts.HOST,
idx=-1)
"""
def convert_bin_to_real(self):
LOGGER.info("convert tree node bins to real value")
for i in range(len(self.tree_)):
if self.tree_[i].is_leaf is True:
continue
if self.tree_[i].sitename == self.sitename:
fid = self.decode("feature_idx", self.tree_[i].fid, split_maskdict=self.split_maskdict)
bid = self.decode("feature_val", self.tree_[i].bid, self.tree_[i].id, self.split_maskdict)
real_splitval = self.encode("feature_val", self.bin_split_points[fid][bid], self.tree_[i].id)
self.tree_[i].bid = real_splitval
def fit(self):
LOGGER.info("begin to fit guest decision tree")
self.sync_encrypted_grad_and_hess()
# LOGGER.debug("self.grad and hess is {}".format(list(self.grad_and_hess.collect())))
root_sum_grad, root_sum_hess = self.get_grad_hess_sum(self.grad_and_hess)
root_node = Node(id=0, sitename=self.sitename, sum_grad=root_sum_grad, sum_hess=root_sum_hess,
weight=self.splitter.node_weight(root_sum_grad, root_sum_hess))
self.tree_node_queue = [root_node]
self.dispatch_all_node_to_root()
for dep in range(self.max_depth):
LOGGER.info("start to fit depth {}, tree node queue size is {}".format(dep, len(self.tree_node_queue)))
self.sync_tree_node_queue(self.tree_node_queue, dep)
if len(self.tree_node_queue) == 0:
break
self.sync_node_positions(dep)
self.data_bin_with_node_dispatch = self.data_bin.join(self.node_dispatch,
lambda data_inst, dispatch_info: (
data_inst, dispatch_info))
batch = 0
splitinfos = []
for i in range(0, len(self.tree_node_queue), self.max_split_nodes):
self.cur_split_nodes = self.tree_node_queue[i: i + self.max_split_nodes]
node_map = {}
node_num = 0
for tree_node in self.cur_split_nodes:
node_map[tree_node.id] = node_num
node_num += 1
acc_histograms = self.get_histograms(node_map=node_map)
self.best_splitinfo_guest = self.splitter.find_split(acc_histograms, self.valid_features,
self.data_bin._partitions,
self.sitename,
self.use_missing, self.zero_as_missing)
self.federated_find_split(dep, batch)
final_splitinfo_host = self.sync_final_split_host(dep, batch)
cur_splitinfos = self.merge_splitinfo(self.best_splitinfo_guest, final_splitinfo_host)
splitinfos.extend(cur_splitinfos)
batch += 1
self.update_tree_node_queue(splitinfos, False)
self.redispatch_node(dep)
if self.tree_node_queue:
self.update_tree_node_queue([], True)
self.data_bin_with_node_dispatch = self.data_bin.join(self.node_dispatch,
lambda data_inst, dispatch_info: (
data_inst, dispatch_info))
self.redispatch_node(self.max_depth, max_depth_reach=True)
self.sync_tree()
self.convert_bin_to_real()
tree_ = self.tree_
LOGGER.info("tree node num is %d" % len(tree_))
LOGGER.info("end to fit guest decision tree")
@staticmethod
def traverse_tree(predict_state, data_inst, tree_=None,
decoder=None, sitename=consts.GUEST, split_maskdict=None,
use_missing=None, zero_as_missing=None, missing_dir_maskdict=None):
nid, tag = predict_state
while tree_[nid].sitename == sitename:
if tree_[nid].is_leaf is True:
return tree_[nid].weight
fid = decoder("feature_idx", tree_[nid].fid, split_maskdict=split_maskdict)
bid = decoder("feature_val", tree_[nid].bid, nid, split_maskdict=split_maskdict)
if use_missing:
missing_dir = decoder("missing_dir", 1, nid, missing_dir_maskdict=missing_dir_maskdict)
else:
missing_dir = 1
if use_missing and zero_as_missing:
missing_dir = decoder("missing_dir", 1, nid, missing_dir_maskdict=missing_dir_maskdict)
if data_inst.features.get_data(fid) == NoneType() or data_inst.features.get_data(fid, None) is None:
if missing_dir == 1:
nid = tree_[nid].right_nodeid
else:
nid = tree_[nid].left_nodeid
elif data_inst.features.get_data(fid) <= bid:
nid = tree_[nid].left_nodeid
else:
nid = tree_[nid].right_nodeid
elif data_inst.features.get_data(fid) == NoneType():
if missing_dir == 1:
nid = tree_[nid].right_nodeid
else:
nid = tree_[nid].left_nodeid
elif data_inst.features.get_data(fid, 0) <= bid:
nid = tree_[nid].left_nodeid
else:
nid = tree_[nid].right_nodeid
return nid, 1
def sync_predict_finish_tag(self, finish_tag, send_times):
LOGGER.info("send the {}-th predict finish tag {} to host".format(finish_tag, send_times))
self.transfer_inst.predict_finish_tag.remote(finish_tag,
role=consts.HOST,
idx=-1,
suffix=(send_times,))
"""
federation.remote(obj=finish_tag,
name=self.transfer_inst.predict_finish_tag.name,
tag=self.transfer_inst.generate_transferid(self.transfer_inst.predict_finish_tag, send_times),
role=consts.HOST,
idx=-1)
"""
def sync_predict_data(self, predict_data, send_times):
LOGGER.info("send predict data to host, sending times is {}".format(send_times))
self.transfer_inst.predict_data.remote(predict_data,
role=consts.HOST,
idx=-1,
suffix=(send_times,))
"""
federation.remote(obj=predict_data,
name=self.transfer_inst.predict_data.name,
tag=self.transfer_inst.generate_transferid(self.transfer_inst.predict_data, send_times),
role=consts.HOST,
idx=-1)
"""
def sync_data_predicted_by_host(self, send_times):
LOGGER.info("get predicted data by host, recv times is {}".format(send_times))
predict_data = self.transfer_inst.predict_data_by_host.get(idx=-1,
suffix=(send_times,))
"""
predict_data = federation.get(name=self.transfer_inst.predict_data_by_host.name,
tag=self.transfer_inst.generate_transferid(
self.transfer_inst.predict_data_by_host, send_times),
idx=-1)
"""
return predict_data
@assert_io_num_rows_equal
def predict(self, data_inst):
LOGGER.info("start to predict!")
predict_data = data_inst.mapValues(lambda data_inst: (0, 1))
site_host_send_times = 0
predict_result = None
while True:
traverse_tree = functools.partial(self.traverse_tree,
tree_=self.tree_,
decoder=self.decode,
sitename=self.sitename,
split_maskdict=self.split_maskdict,
use_missing=self.use_missing,
zero_as_missing=self.zero_as_missing,
missing_dir_maskdict=self.missing_dir_maskdict)
predict_data = predict_data.join(data_inst, traverse_tree)
predict_leaf = predict_data.filter(lambda key, value: isinstance(value, tuple) is False)
if predict_result is None:
predict_result = predict_leaf
else:
predict_result = predict_result.union(predict_leaf)
predict_data = predict_data.subtractByKey(predict_leaf)
unleaf_node_count = predict_data.count()
if unleaf_node_count == 0:
self.sync_predict_finish_tag(True, site_host_send_times)
break
self.sync_predict_finish_tag(False, site_host_send_times)
self.sync_predict_data(predict_data, site_host_send_times)
predict_data_host = self.sync_data_predicted_by_host(site_host_send_times)
for i in range(len(predict_data_host)):
predict_data = predict_data.join(predict_data_host[i],
lambda state1_nodeid1, state2_nodeid2:
state1_nodeid1 if state1_nodeid1[
1] == 0 else state2_nodeid2)
site_host_send_times += 1
LOGGER.info("predict finish!")
return predict_result
def get_model_meta(self):
model_meta = DecisionTreeModelMeta()
model_meta.criterion_meta.CopyFrom(CriterionMeta(criterion_method=self.criterion_method,
criterion_param=self.criterion_params))
model_meta.max_depth = self.max_depth
model_meta.min_sample_split = self.min_sample_split
model_meta.min_impurity_split = self.min_impurity_split
model_meta.min_leaf_node = self.min_leaf_node
model_meta.use_missing = self.use_missing
model_meta.zero_as_missing = self.zero_as_missing
return model_meta
def set_model_meta(self, model_meta):
self.max_depth = model_meta.max_depth
self.min_sample_split = model_meta.min_sample_split
self.min_impurity_split = model_meta.min_impurity_split
self.min_leaf_node = model_meta.min_leaf_node
self.criterion_method = model_meta.criterion_meta.criterion_method
self.criterion_params = list(model_meta.criterion_meta.criterion_param)
self.use_missing = model_meta.use_missing
self.zero_as_missing = model_meta.zero_as_missing
def get_model_param(self):
model_param = DecisionTreeModelParam()
for node in self.tree_:
model_param.tree_.add(id=node.id,
sitename=node.sitename,
fid=node.fid,
bid=node.bid,
weight=node.weight,
is_leaf=node.is_leaf,
left_nodeid=node.left_nodeid,
right_nodeid=node.right_nodeid,
missing_dir=node.missing_dir)
LOGGER.debug("missing_dir is {}, sitename is {}, is_leaf is {}".format(node.missing_dir, node.sitename,
node.is_leaf))
model_param.split_maskdict.update(self.split_maskdict)
model_param.missing_dir_maskdict.update(self.missing_dir_maskdict)
return model_param
def set_model_param(self, model_param):
self.tree_ = []
for node_param in model_param.tree_:
_node = Node(id=node_param.id,
sitename=node_param.sitename,
fid=node_param.fid,
bid=node_param.bid,
weight=node_param.weight,
is_leaf=node_param.is_leaf,
left_nodeid=node_param.left_nodeid,
right_nodeid=node_param.right_nodeid,
missing_dir=node_param.missing_dir)
self.tree_.append(_node)
self.split_maskdict = dict(model_param.split_maskdict)
self.missing_dir_maskdict = dict(model_param.missing_dir_maskdict)
def get_model(self):
model_meta = self.get_model_meta()
model_param = self.get_model_param()
return model_meta, model_param
def load_model(self, model_meta=None, model_param=None):
LOGGER.info("load tree model")
self.set_model_meta(model_meta)
self.set_model_param(model_param)
def get_feature_importance(self):
return self.feature_importances_
| 48.449275
| 122
| 0.56541
|
45ffca98fc0060786042051e93f5594abba7b8a7
| 2,234
|
py
|
Python
|
gym-foo/test_agent.py
|
hieunq95/keras-rl
|
d965ea951220b5ede5ea1e11fab7d7eb45a8c2c5
|
[
"MIT"
] | null | null | null |
gym-foo/test_agent.py
|
hieunq95/keras-rl
|
d965ea951220b5ede5ea1e11fab7d7eb45a8c2c5
|
[
"MIT"
] | null | null | null |
gym-foo/test_agent.py
|
hieunq95/keras-rl
|
d965ea951220b5ede5ea1e11fab7d7eb45a8c2c5
|
[
"MIT"
] | null | null | null |
# From dqn_cartpole.py
import numpy as np
import gym
import pylab
import xlsxwriter
from keras.models import Sequential
from keras.layers import Dense, Activation, Flatten
from keras.optimizers import Adam
from rl.agents.dqn import DQNAgent
from rl.policy import BoltzmannQPolicy, GreedyQPolicy, EpsGreedyQPolicy
from rl.memory import SequentialMemory
from mcml_processor import MCMLProcessor
from mcml_env import MCML
from my_policy import MyEpsGreedy, RandomPolicy, GreedyPolicy
from parameters import Parameters
from results_writer import MCMLWriter
parameters = Parameters()
workbook = xlsxwriter.Workbook(parameters.XLSX_PATH)
writer = MCMLWriter(workbook)
env = MCML(writer)
policy = MyEpsGreedy(environment=env, eps_max=0.9, eps_min=0,
eps_training=parameters.EPISODES_TRAINING, writer=writer)
# policy = RandomPolicy(env, writer)
# policy = GreedyPolicy(env, writer)
processor = MCMLProcessor()
nb_actions = 4 ** len(env.action_space.nvec)
# Next, we build a very simple model.
model = Sequential()
model.add(Flatten(input_shape=(1,) + env.observation_space.shape)) # input #input_shape = (1,) + (4,)
model.add(Dense(32))
model.add(Activation('relu'))
model.add(Dense(32))
model.add(Activation('relu'))
model.add(Dense(32))
model.add(Activation('relu'))
model.add(Dense(nb_actions)) # output
model.add(Activation('linear'))
print(model.summary())
# Finally, we configure and compile our agent. You can use every built-in Keras optimizer and
# even the metrics!
memory = SequentialMemory(limit=50000, window_length=1)
dqn = DQNAgent(model=model, nb_actions=nb_actions, memory=memory, nb_steps_warmup=100,
target_model_update=1e-2, policy=policy, enable_double_dqn=True, processor=processor)
dqn.compile(Adam(lr=1e-3), metrics=['mae'])
# Okay, now it's time to learn something! We visualize the training here for show, but this
# slows down training quite a lot. You can always safely abort the training prematurely using
# Ctrl + C.
# print(dqn.metrics_names[:])
dqn.fit(env, nb_steps=parameters.NB_STEPS, visualize=False, verbose=2, nb_max_episode_steps=None)
workbook.close()
# Finally, evaluate our algorithm for 5 episodes.
# dqn.test(env, nb_episodes=5, visualize=False)
| 32.852941
| 101
| 0.772605
|
71a78c76ae5b0dfb53ce28929bd83664fdb1e6d3
| 2,554
|
py
|
Python
|
twitch_project/facebook_users/views.py
|
tejesh95/livecoding
|
327114d67d613ef7300528d62b5165cd215bc1b4
|
[
"MIT"
] | null | null | null |
twitch_project/facebook_users/views.py
|
tejesh95/livecoding
|
327114d67d613ef7300528d62b5165cd215bc1b4
|
[
"MIT"
] | null | null | null |
twitch_project/facebook_users/views.py
|
tejesh95/livecoding
|
327114d67d613ef7300528d62b5165cd215bc1b4
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.http import HttpResponseRedirect, HttpResponse
from .tasks import fetch_members_from_group, fetch_facebook_page_fans
import urllib2
import json
# Create your views here.
APP_ID = '818991478245556'
APP_SECRET = '2579d3f1a38b7f0ab5ea9d8c8c4bdfa5'
VERSION = '2.6'
REDIRECT_URI = 'http://162.243.104.58:8004/facebook/login_success'
HOST = 'https://graph.facebook.com'
VERSION = 'v2.6'
# DO NOT use this access token, will automate fetching soon
access_token = 'EAALo3lAiiLQBACfDtgMkmDQiuZAzGjvpOBodrlWX5ctBq5yd9XKxSQNd2QMNHBCdc90uw5nYZBxW6LXXxKfirloZBJOweiJvDJAJbk2HqZAPvbcwZCkUEcIOhcgZBAt7XyOePJFVvNG0MGfTZC9otlPOB9G5LATRmsZD'
def fetch_user_token(request):
"""
invoke the login dialog to ask user for access permissions
"""
login_url = "https://www.facebook.com/dialog/oauth?client_id=" + APP_ID + "&redirect_uri=" + REDIRECT_URI
return HttpResponseRedirect(login_url)
def redirect_uri_view(request):
code = request.GET['code']
exchange_code_url = "https://graph.facebook.com/v2.6/oauth/access_token?client_id=" + \
APP_ID + "&redirect_uri=" + REDIRECT_URI + "&client_secret=" + APP_SECRET + "&code=" + code
# response = urllib2.urlopen(exchange_code_url)
return HttpResponseRedirect(exchange_code_url)
def search_groups(request, query=None):
if query is None:
return HttpResponse('Enter keywords to search for groups')
return render(request, 'group_search.html', context=None)
def fetch_facebook_group_members(request, group_id=None):
if group_id is None:
return HttpResponse('Enter group_id Example: /facebook/group/123456789 ')
api_end_point = HOST + '/' + VERSION + '/' + group_id + '/members' + '?access_token=' + access_token
try:
response = urllib2.urlopen(api_end_point).read()
except urllib2.URLError as e:
print e.reason
return HttpResponseRedirect('/facebook/group/' + group_id)
members = json.loads(response)
fetch_members_from_group.delay(members, group_id)
return HttpResponse('DONE')
# def fetch_facebook_page_fans(request):
# page_id = '174179219354091'
# api_end_point = HOST + '/' + VERSION + '/' + page_id + '/likes' + '?access_token=' + access_token
# print(api_end_point)
# try:
# response = urllib2.urlopen(api_end_point).read()
# except urllib2.URLError as e:
# print(e.reason)
# fans = json.load(response)
# fetch_fans_of_page.delay(fans)
# return render(request, 'page_fans.html', context=None)
| 34.513514
| 182
| 0.727878
|
382a9aa33000f6cd741ca16ad8c61716b72e3a18
| 2,414
|
py
|
Python
|
pushservice/src/Controller/ListDir.py
|
Haehnchen/enigma2-plugins
|
23007eb0b78665cd3a2faf98d1d6145b4f0ada3f
|
[
"OLDAP-2.3"
] | 1
|
2020-01-27T22:53:56.000Z
|
2020-01-27T22:53:56.000Z
|
pushservice/src/Controller/ListDir.py
|
Haehnchen/enigma2-plugins
|
23007eb0b78665cd3a2faf98d1d6145b4f0ada3f
|
[
"OLDAP-2.3"
] | null | null | null |
pushservice/src/Controller/ListDir.py
|
Haehnchen/enigma2-plugins
|
23007eb0b78665cd3a2faf98d1d6145b4f0ada3f
|
[
"OLDAP-2.3"
] | null | null | null |
#######################################################################
#
# Push Service for Enigma-2
# Coded by betonme (c) 2012 <glaserfrank(at)gmail.com>
# Support: http://www.i-have-a-dreambox.com/wbb2/thread.php?threadid=167779
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
#######################################################################
# Config
from Components.config import ConfigYesNo, ConfigText, ConfigNumber, NoSave
# Plugin internal
from Plugins.Extensions.PushService.__init__ import _
from Plugins.Extensions.PushService.ControllerBase import ControllerBase
# Plugin specific
import os
# Constants
SUBJECT = _("List of Files")
class ListDir(ControllerBase):
ForceSingleInstance = True
def __init__(self):
# Is called on instance creation
ControllerBase.__init__(self)
self.movielist= []
# Default configuration
self.setOption( 'path', NoSave(ConfigText( default = "/media/hdd/movie/", fixed_size = False )), _("Where to check") )
self.setOption( 'ext', NoSave(ConfigText( default = ".ts", fixed_size = False )), _("file extension") )
def run(self, callback, errback):
# At the end a plugin has to call one of the functions: callback or errback
# Callback should return with at least one of the parameter: Header, body (List of Files)
# If empty or none is returned, nothing will be sent
path = self.getValue('path')
ext = self.getValue('ext')
movielist = []
for file in os.listdir( path ):
if file.endswith( ext ):
movielist.append(file)
body = "The following files were found: \n" + "\n".join(movielist)
if movielist:
callback( SUBJECT, body )
else:
callback()
| 36.029851
| 140
| 0.594035
|
af159253f4d4e9b8392e7ff0619cad59a7effa21
| 28,482
|
py
|
Python
|
parcels/grid.py
|
CKehl/parcels
|
6e4d6a10157a9d1a7935b08f35808b874a1cdf58
|
[
"MIT"
] | 1
|
2020-03-20T10:22:21.000Z
|
2020-03-20T10:22:21.000Z
|
parcels/grid.py
|
CKehl/parcels
|
6e4d6a10157a9d1a7935b08f35808b874a1cdf58
|
[
"MIT"
] | null | null | null |
parcels/grid.py
|
CKehl/parcels
|
6e4d6a10157a9d1a7935b08f35808b874a1cdf58
|
[
"MIT"
] | null | null | null |
import functools
import sys
from ctypes import c_double
from ctypes import c_float
from ctypes import c_int
from ctypes import c_void_p
from ctypes import cast
from ctypes import POINTER
from ctypes import pointer
from ctypes import Structure
from enum import IntEnum
import numpy as np
import dask.array as da
#from memory_profiler import profile
from parcels.tools.converters import TimeConverter
from parcels.tools.loggers import logger
__all__ = ['GridCode', 'RectilinearZGrid', 'RectilinearSGrid', 'CurvilinearZGrid', 'CurvilinearSGrid', 'CGrid', 'Grid']
class GridCode(IntEnum):
RectilinearZGrid = 0
RectilinearSGrid = 1
CurvilinearZGrid = 2
CurvilinearSGrid = 3
class CGrid(Structure):
_fields_ = [('gtype', c_int),
('grid', c_void_p)]
class Grid(object):
"""Grid class that defines a (spatial and temporal) grid on which Fields are defined
"""
def __init__(self, lon, lat, time, time_origin, mesh):
self.lon = lon
self.lat = lat
self.time = np.zeros(1, dtype=np.float64) if time is None else time
if not self.lon.dtype == np.float32:
logger.warning_once("Casting lon data to np.float32")
self.lon = self.lon.astype(np.float32)
if not self.lat.dtype == np.float32:
logger.warning_once("Casting lat data to np.float32")
self.lat = self.lat.astype(np.float32)
if not self.time.dtype == np.float64:
assert isinstance(self.time[0], (np.integer, np.floating, float, int)), 'Time vector must be an array of int or floats'
logger.warning_once("Casting time data to np.float64")
self.time = self.time.astype(np.float64)
self.time_full = self.time # needed for deferred_loaded Fields
self.time_origin = TimeConverter() if time_origin is None else time_origin
assert isinstance(self.time_origin, TimeConverter), 'time_origin needs to be a TimeConverter object'
self.mesh = mesh
self.cstruct = None
self.cell_edge_sizes = {}
self.zonal_periodic = False
self.zonal_halo = 0
self.meridional_halo = 0
self.lat_flipped = False
self.defer_load = False
self.lonlat_minmax = np.array([np.nanmin(lon), np.nanmax(lon), np.nanmin(lat), np.nanmax(lat)], dtype=np.float32)
self.periods = 0
self.load_chunk = []
self.chunk_info = None
self._add_last_periodic_data_timestep = False
@staticmethod
def create_grid(lon, lat, depth, time, time_origin, mesh, **kwargs):
if len(lon.shape) == 1:
if depth is None or len(depth.shape) == 1:
return RectilinearZGrid(lon, lat, depth, time, time_origin=time_origin, mesh=mesh, **kwargs)
else:
return RectilinearSGrid(lon, lat, depth, time, time_origin=time_origin, mesh=mesh, **kwargs)
else:
if depth is None or len(depth.shape) == 1:
return CurvilinearZGrid(lon, lat, depth, time, time_origin=time_origin, mesh=mesh, **kwargs)
else:
return CurvilinearSGrid(lon, lat, depth, time, time_origin=time_origin, mesh=mesh, **kwargs)
@property
def ctypes_struct(self):
# This is unnecessary for the moment, but it could be useful when going will fully unstructured grids
self.cgrid = cast(pointer(self.child_ctypes_struct), c_void_p)
cstruct = CGrid(self.gtype, self.cgrid.value)
return cstruct
@property
def child_ctypes_struct(self):
"""Returns a ctypes struct object containing all relevant
pointers and sizes for this grid."""
class CStructuredGrid(Structure):
# z4d is only to have same cstruct as RectilinearSGrid
_fields_ = [('xdim', c_int), ('ydim', c_int), ('zdim', c_int),
('tdim', c_int), ('z4d', c_int),
('mesh_spherical', c_int), ('zonal_periodic', c_int),
('chunk_info', POINTER(c_int)),
('load_chunk', POINTER(c_int)),
('tfull_min', c_double), ('tfull_max', c_double), ('periods', POINTER(c_int)),
('lonlat_minmax', POINTER(c_float)),
('lon', POINTER(c_float)), ('lat', POINTER(c_float)),
('depth', POINTER(c_float)), ('time', POINTER(c_double))
]
# Create and populate the c-struct object
if not self.cstruct: # Not to point to the same grid various times if grid in various fields
if not isinstance(self.periods, c_int):
self.periods = c_int()
self.periods.value = 0
self.cstruct = CStructuredGrid(self.xdim, self.ydim, self.zdim,
self.tdim, self.z4d,
self.mesh == 'spherical', self.zonal_periodic,
(c_int * len(self.chunk_info))(*self.chunk_info),
self.load_chunk.ctypes.data_as(POINTER(c_int)),
self.time_full[0], self.time_full[-1], pointer(self.periods),
self.lonlat_minmax.ctypes.data_as(POINTER(c_float)),
self.lon.ctypes.data_as(POINTER(c_float)),
self.lat.ctypes.data_as(POINTER(c_float)),
self.depth.ctypes.data_as(POINTER(c_float)),
self.time.ctypes.data_as(POINTER(c_double)))
return self.cstruct
def lon_grid_to_target(self):
if self.lon_remapping:
self.lon = self.lon_remapping.to_target(self.lon)
def lon_grid_to_source(self):
if self.lon_remapping:
self.lon = self.lon_remapping.to_source(self.lon)
def lon_particle_to_target(self, lon):
if self.lon_remapping:
return self.lon_remapping.particle_to_target(lon)
return lon
def advancetime(self, grid_new):
assert isinstance(grid_new.time_origin, type(self.time_origin)), 'time_origin of new and old grids must be either both None or both a date'
if self.time_origin:
grid_new.time = grid_new.time + self.time_origin.reltime(grid_new.time_origin)
if len(grid_new.time) != 1:
raise RuntimeError('New FieldSet needs to have only one snapshot')
if grid_new.time > self.time[-1]: # forward in time, so appending at end
self.time = np.concatenate((self.time[1:], grid_new.time))
return 1
elif grid_new.time < self.time[0]: # backward in time, so prepending at start
self.time = np.concatenate((grid_new.time, self.time[:-1]))
return -1
else:
raise RuntimeError("Time of field_new in Field.advancetime() overlaps with times in old Field")
def check_zonal_periodic(self):
if self.zonal_periodic or self.mesh == 'flat':
return
dx = (self.lon[1:] - self.lon[:-1]) if len(self.lon.shape) == 1 else self.lon[0, 1:] - self.lon[0, :-1]
dx = np.where(dx < -180, dx+360, dx)
dx = np.where(dx > 180, dx-360, dx)
self.zonal_periodic = sum(dx) > 359.9
def add_Sdepth_periodic_halo(self, zonal, meridional, halosize):
if zonal:
if len(self.depth.shape) == 3:
self.depth = np.concatenate((self.depth[:, :, -halosize:], self.depth,
self.depth[:, :, 0:halosize]), axis=len(self.depth.shape) - 1)
assert self.depth.shape[2] == self.xdim, "Third dim must be x."
else:
self.depth = np.concatenate((self.depth[:, :, :, -halosize:], self.depth,
self.depth[:, :, :, 0:halosize]), axis=len(self.depth.shape) - 1)
assert self.depth.shape[3] == self.xdim, "Fourth dim must be x."
if meridional:
if len(self.depth.shape) == 3:
self.depth = np.concatenate((self.depth[:, -halosize:, :], self.depth,
self.depth[:, 0:halosize, :]), axis=len(self.depth.shape) - 2)
assert self.depth.shape[1] == self.ydim, "Second dim must be y."
else:
self.depth = np.concatenate((self.depth[:, :, -halosize:, :], self.depth,
self.depth[:, :, 0:halosize, :]), axis=len(self.depth.shape) - 2)
assert self.depth.shape[2] == self.ydim, "Third dim must be y."
#@profile
def computeTimeChunk(self, f, time, signdt):
nextTime_loc = np.infty if signdt >= 0 else -np.infty
periods = self.periods.value if isinstance(self.periods, c_int) else self.periods
prev_time_indices = self.time
#sys.stdout.write("len(grid.time) = {}\n".format(len(prev_time_indices)))
if self.update_status == 'not_updated':
if self.ti >= 0:
if (time - periods*(self.time_full[-1]-self.time_full[0]) < self.time[0] or time - periods*(self.time_full[-1]-self.time_full[0]) > self.time[2]):
self.ti = -1 # reset
elif signdt >= 0 and (time - periods*(self.time_full[-1]-self.time_full[0]) < self.time_full[0] or time - periods*(self.time_full[-1]-self.time_full[0]) >= self.time_full[-1]):
self.ti = -1 # reset
elif signdt < 0 and (time - periods*(self.time_full[-1]-self.time_full[0]) <= self.time_full[0] or time - periods*(self.time_full[-1]-self.time_full[0]) > self.time_full[-1]):
self.ti = -1 # reset
elif signdt >= 0 and time - periods*(self.time_full[-1]-self.time_full[0]) >= self.time[1] and self.ti < len(self.time_full)-3:
self.ti += 1
self.time = self.time_full[self.ti:self.ti+3]
self.update_status = 'updated'
elif signdt == -1 and time - periods*(self.time_full[-1]-self.time_full[0]) < self.time[1] and self.ti > 0:
self.ti -= 1
self.time = self.time_full[self.ti:self.ti+3]
self.update_status = 'updated'
if self.ti == -1:
self.time = self.time_full
self.ti, ti_periods = f.time_index(time)
#self.ti, _ = f.time_index(time)
periods = self.periods.value if isinstance(self.periods, c_int) else self.periods
#sys.stdout.write("ti periods({}) vs. c-periods({})\n".format(ti_periods,periods))
# ==== experimentally verified: python-computed period and c-computed period are equivalent ==== #
if signdt == -1 and self.ti == 0 and (time - periods*(self.time_full[-1]-self.time_full[0])) == self.time[0] and f.time_periodic:
self.ti = len(self.time)-2
periods -= 1
if signdt == -1 and self.ti > 0:
self.ti -= 1
if self.ti >= len(self.time_full) - 2:
self.ti = len(self.time_full) - 3
self.time = self.time_full[self.ti:self.ti+3]
self.tdim = 3
# ==== this is so to avoid a 'first_updated' re-initialization for each time extrapolation step (which causes the memory to blow) ==== #
#self.update_status = 'first_updated'
if prev_time_indices is None or len(prev_time_indices)!=3 or len(prev_time_indices)!=len(self.time):
# sys.stdout.write("len(grid.prev_time)={} vs len(grid.time)={}\n".format(len(prev_time_indices), len(self.time)))
self.update_status = 'first_updated'
elif functools.reduce(lambda i, j : i and j, map(lambda m, k: m == k, self.time, prev_time_indices), True) and len(prev_time_indices)==len(self.time):
# sys.stdout.write("grid.prev_time={} (n={}) vs. grid.time={} (n={})\n".format(prev_time_indices, len(prev_time_indices), self.time, len(prev_time_indices)))
self.update_status = 'not_updated'
elif functools.reduce(lambda i, j : i and j, map(lambda m, k: m == k, self.time[:2], prev_time_indices[:2]), True) and len(prev_time_indices)==len(self.time):
# sys.stdout.write("grid.prev_time={} (n={}) vs. grid.time={} (n={})\n".format(prev_time_indices, len(prev_time_indices), self.time, len(prev_time_indices)))
self.update_status = 'updated'
else:
# sys.stdout.write("ti periods({}) vs. c-periods({})\n".format(ti_periods,periods))
# sys.stdout.write("grid.prev_time={} (n={}) vs. grid.time={} (n={}), period={}\n".format(prev_time_indices, len(prev_time_indices), self.time, len(prev_time_indices), periods))
self.update_status = 'first_updated'
#sys.stdout.write("grid.type={} update_status={}\n".format(type(self), self.update_status))
if signdt >= 0 and (self.ti < len(self.time_full)-3 or not f.allow_time_extrapolation):
nextTime_loc = self.time[2] + periods*(self.time_full[-1]-self.time_full[0])
elif signdt == -1 and (self.ti > 0 or not f.allow_time_extrapolation):
nextTime_loc = self.time[0] + periods*(self.time_full[-1]-self.time_full[0])
return nextTime_loc
class RectilinearGrid(Grid):
"""Rectilinear Grid
Private base class for RectilinearZGrid and RectilinearSGrid
"""
def __init__(self, lon, lat, time, time_origin, mesh):
assert(isinstance(lon, np.ndarray) and len(lon.shape) == 1), 'lon is not a numpy vector'
assert(isinstance(lat, np.ndarray) and len(lat.shape) == 1), 'lat is not a numpy vector'
assert (isinstance(time, np.ndarray) or not time), 'time is not a numpy array'
if isinstance(time, np.ndarray):
assert(len(time.shape) == 1), 'time is not a vector'
super(RectilinearGrid, self).__init__(lon, lat, time, time_origin, mesh)
self.xdim = self.lon.size
self.ydim = self.lat.size
self.tdim = self.time.size
if self.lat[-1] < self.lat[0]:
self.lat = np.flip(self.lat, axis=0)
self.lat_flipped = True
logger.warning_once("Flipping lat data from North-South to South-North")
def add_periodic_halo(self, zonal, meridional, halosize=5):
"""Add a 'halo' to the Grid, through extending the Grid (and lon/lat)
similarly to the halo created for the Fields
:param zonal: Create a halo in zonal direction (boolean)
:param meridional: Create a halo in meridional direction (boolean)
:param halosize: size of the halo (in grid points). Default is 5 grid points
"""
if zonal:
lonshift = (self.lon[-1] - 2 * self.lon[0] + self.lon[1])
if not np.allclose(self.lon[1]-self.lon[0], self.lon[-1]-self.lon[-2]):
logger.warning_once("The zonal halo is located at the east and west of current grid, with a dx = lon[1]-lon[0] between the last nodes of the original grid and the first ones of the halo. In your grid, lon[1]-lon[0] != lon[-1]-lon[-2]. Is the halo computed as you expect?")
self.lon = np.concatenate((self.lon[-halosize:] - lonshift,
self.lon, self.lon[0:halosize] + lonshift))
self.xdim = self.lon.size
self.zonal_periodic = True
self.zonal_halo = halosize
if meridional:
if not np.allclose(self.lat[1]-self.lat[0], self.lat[-1]-self.lat[-2]):
logger.warning_once("The meridional halo is located at the north and south of current grid, with a dy = lat[1]-lat[0] between the last nodes of the original grid and the first ones of the halo. In your grid, lat[1]-lat[0] != lat[-1]-lat[-2]. Is the halo computed as you expect?")
latshift = (self.lat[-1] - 2 * self.lat[0] + self.lat[1])
self.lat = np.concatenate((self.lat[-halosize:] - latshift,
self.lat, self.lat[0:halosize] + latshift))
self.ydim = self.lat.size
self.meridional_halo = halosize
self.lonlat_minmax = np.array([np.nanmin(self.lon), np.nanmax(self.lon), np.nanmin(self.lat), np.nanmax(self.lat)], dtype=np.float32)
if isinstance(self, RectilinearSGrid):
self.add_Sdepth_periodic_halo(zonal, meridional, halosize)
class RectilinearZGrid(RectilinearGrid):
"""Rectilinear Z Grid
:param lon: Vector containing the longitude coordinates of the grid
:param lat: Vector containing the latitude coordinates of the grid
:param depth: Vector containing the vertical coordinates of the grid, which are z-coordinates.
The depth of the different layers is thus constant.
:param time: Vector containing the time coordinates of the grid
:param time_origin: Time origin (TimeConverter object) of the time axis
:param mesh: String indicating the type of mesh coordinates and
units used during velocity interpolation:
1. spherical (default): Lat and lon in degree, with a
correction for zonal velocity U near the poles.
2. flat: No conversion, lat/lon are assumed to be in m.
"""
def __init__(self, lon, lat, depth=None, time=None, time_origin=None, mesh='flat'):
super(RectilinearZGrid, self).__init__(lon, lat, time, time_origin, mesh)
if isinstance(depth, np.ndarray):
assert(len(depth.shape) == 1), 'depth is not a vector'
self.gtype = GridCode.RectilinearZGrid
self.depth = np.zeros(1, dtype=np.float32) if depth is None else depth
self.zdim = self.depth.size
self.z4d = -1 # only used in RectilinearSGrid
if not self.depth.dtype == np.float32:
logger.warning_once("Casting depth data to np.float32")
self.depth = self.depth.astype(np.float32)
class RectilinearSGrid(RectilinearGrid):
"""Rectilinear S Grid. Same horizontal discretisation as a rectilinear z grid,
but with s vertical coordinates
:param lon: Vector containing the longitude coordinates of the grid
:param lat: Vector containing the latitude coordinates of the grid
:param depth: 4D (time-evolving) or 3D (time-independent) array containing the vertical coordinates of the grid,
which are s-coordinates.
s-coordinates can be terrain-following (sigma) or iso-density (rho) layers,
or any generalised vertical discretisation.
The depth of each node depends then on the horizontal position (lon, lat),
the number of the layer and the time is depth is a 4D array.
depth array is either a 4D array[xdim][ydim][zdim][tdim] or a 3D array[xdim][ydim[zdim].
:param time: Vector containing the time coordinates of the grid
:param time_origin: Time origin (TimeConverter object) of the time axis
:param mesh: String indicating the type of mesh coordinates and
units used during velocity interpolation:
1. spherical (default): Lat and lon in degree, with a
correction for zonal velocity U near the poles.
2. flat: No conversion, lat/lon are assumed to be in m.
"""
def __init__(self, lon, lat, depth, time=None, time_origin=None, mesh='flat'):
super(RectilinearSGrid, self).__init__(lon, lat, time, time_origin, mesh)
assert(isinstance(depth, np.ndarray) and len(depth.shape) in [3, 4]), 'depth is not a 3D or 4D numpy array'
self.gtype = GridCode.RectilinearSGrid
self.depth = depth
self.zdim = self.depth.shape[-3]
self.z4d = len(self.depth.shape) == 4
if self.z4d:
assert self.tdim == self.depth.shape[0], 'depth dimension has the wrong format. It should be [tdim, zdim, ydim, xdim]'
assert self.xdim == self.depth.shape[-1], 'depth dimension has the wrong format. It should be [tdim, zdim, ydim, xdim]'
assert self.ydim == self.depth.shape[-2], 'depth dimension has the wrong format. It should be [tdim, zdim, ydim, xdim]'
else:
assert self.xdim == self.depth.shape[-1], 'depth dimension has the wrong format. It should be [zdim, ydim, xdim]'
assert self.ydim == self.depth.shape[-2], 'depth dimension has the wrong format. It should be [zdim, ydim, xdim]'
if not self.depth.dtype == np.float32:
logger.warning_once("Casting depth data to np.float32")
self.depth = self.depth.astype(np.float32)
if self.lat_flipped:
self.depth = np.flip(self.depth, axis=-2)
class CurvilinearGrid(Grid):
def __init__(self, lon, lat, time=None, time_origin=None, mesh='flat'):
assert(isinstance(lon, np.ndarray) and len(lon.squeeze().shape) == 2), 'lon is not a 2D numpy array'
assert(isinstance(lat, np.ndarray) and len(lat.squeeze().shape) == 2), 'lat is not a 2D numpy array'
assert (isinstance(time, np.ndarray) or not time), 'time is not a numpy array'
if isinstance(time, np.ndarray):
assert(len(time.shape) == 1), 'time is not a vector'
lon = lon.squeeze()
lat = lat.squeeze()
super(CurvilinearGrid, self).__init__(lon, lat, time, time_origin, mesh)
self.xdim = self.lon.shape[1]
self.ydim = self.lon.shape[0]
self.tdim = self.time.size
def add_periodic_halo(self, zonal, meridional, halosize=5):
"""Add a 'halo' to the Grid, through extending the Grid (and lon/lat)
similarly to the halo created for the Fields
:param zonal: Create a halo in zonal direction (boolean)
:param meridional: Create a halo in meridional direction (boolean)
:param halosize: size of the halo (in grid points). Default is 5 grid points
"""
if zonal:
lonshift = self.lon[:, -1] - 2 * self.lon[:, 0] + self.lon[:, 1]
if not np.allclose(self.lon[:, 1]-self.lon[:, 0], self.lon[:, -1]-self.lon[:, -2]):
logger.warning_once("The zonal halo is located at the east and west of current grid, with a dx = lon[:,1]-lon[:,0] between the last nodes of the original grid and the first ones of the halo. In your grid, lon[:,1]-lon[:,0] != lon[:,-1]-lon[:,-2]. Is the halo computed as you expect?")
self.lon = np.concatenate((self.lon[:, -halosize:] - lonshift[:, np.newaxis],
self.lon, self.lon[:, 0:halosize] + lonshift[:, np.newaxis]),
axis=len(self.lon.shape)-1)
self.lat = np.concatenate((self.lat[:, -halosize:],
self.lat, self.lat[:, 0:halosize]),
axis=len(self.lat.shape)-1)
self.xdim = self.lon.shape[1]
self.ydim = self.lat.shape[0]
self.zonal_periodic = True
self.zonal_halo = halosize
if meridional:
if not np.allclose(self.lat[1, :]-self.lat[0, :], self.lat[-1, :]-self.lat[-2, :]):
logger.warning_once("The meridional halo is located at the north and south of current grid, with a dy = lat[1,:]-lat[0,:] between the last nodes of the original grid and the first ones of the halo. In your grid, lat[1,:]-lat[0,:] != lat[-1,:]-lat[-2,:]. Is the halo computed as you expect?")
latshift = self.lat[-1, :] - 2 * self.lat[0, :] + self.lat[1, :]
self.lat = np.concatenate((self.lat[-halosize:, :] - latshift[np.newaxis, :],
self.lat, self.lat[0:halosize, :] + latshift[np.newaxis, :]),
axis=len(self.lat.shape)-2)
self.lon = np.concatenate((self.lon[-halosize:, :],
self.lon, self.lon[0:halosize, :]),
axis=len(self.lon.shape)-2)
self.xdim = self.lon.shape[1]
self.ydim = self.lat.shape[0]
self.meridional_halo = halosize
if isinstance(self, CurvilinearSGrid):
self.add_Sdepth_periodic_halo(zonal, meridional, halosize)
class CurvilinearZGrid(CurvilinearGrid):
"""Curvilinear Z Grid.
:param lon: 2D array containing the longitude coordinates of the grid
:param lat: 2D array containing the latitude coordinates of the grid
:param depth: Vector containing the vertical coordinates of the grid, which are z-coordinates.
The depth of the different layers is thus constant.
:param time: Vector containing the time coordinates of the grid
:param time_origin: Time origin (TimeConverter object) of the time axis
:param mesh: String indicating the type of mesh coordinates and
units used during velocity interpolation:
1. spherical (default): Lat and lon in degree, with a
correction for zonal velocity U near the poles.
2. flat: No conversion, lat/lon are assumed to be in m.
"""
def __init__(self, lon, lat, depth=None, time=None, time_origin=None, mesh='flat'):
super(CurvilinearZGrid, self).__init__(lon, lat, time, time_origin, mesh)
if isinstance(depth, np.ndarray):
assert(len(depth.shape) == 1), 'depth is not a vector'
self.gtype = GridCode.CurvilinearZGrid
self.depth = np.zeros(1, dtype=np.float32) if depth is None else depth
self.zdim = self.depth.size
self.z4d = -1 # only for SGrid
if not self.depth.dtype == np.float32:
logger.warning_once("Casting depth data to np.float32")
self.depth = self.depth.astype(np.float32)
class CurvilinearSGrid(CurvilinearGrid):
"""Curvilinear S Grid.
:param lon: 2D array containing the longitude coordinates of the grid
:param lat: 2D array containing the latitude coordinates of the grid
:param depth: 4D (time-evolving) or 3D (time-independent) array containing the vertical coordinates of the grid,
which are s-coordinates.
s-coordinates can be terrain-following (sigma) or iso-density (rho) layers,
or any generalised vertical discretisation.
The depth of each node depends then on the horizontal position (lon, lat),
the number of the layer and the time is depth is a 4D array.
depth array is either a 4D array[xdim][ydim][zdim][tdim] or a 3D array[xdim][ydim[zdim].
:param time: Vector containing the time coordinates of the grid
:param time_origin: Time origin (TimeConverter object) of the time axis
:param mesh: String indicating the type of mesh coordinates and
units used during velocity interpolation:
1. spherical (default): Lat and lon in degree, with a
correction for zonal velocity U near the poles.
2. flat: No conversion, lat/lon are assumed to be in m.
"""
def __init__(self, lon, lat, depth, time=None, time_origin=None, mesh='flat'):
super(CurvilinearSGrid, self).__init__(lon, lat, time, time_origin, mesh)
assert(isinstance(depth, np.ndarray) and len(depth.shape) in [3, 4]), 'depth is not a 4D numpy array'
self.gtype = GridCode.CurvilinearSGrid
self.depth = depth
self.zdim = self.depth.shape[-3]
self.z4d = len(self.depth.shape) == 4
if self.z4d:
assert self.tdim == self.depth.shape[0], 'depth dimension has the wrong format. It should be [tdim, zdim, ydim, xdim]'
assert self.xdim == self.depth.shape[-1], 'depth dimension has the wrong format. It should be [tdim, zdim, ydim, xdim]'
assert self.ydim == self.depth.shape[-2], 'depth dimension has the wrong format. It should be [tdim, zdim, ydim, xdim]'
else:
assert self.xdim == self.depth.shape[-1], 'depth dimension has the wrong format. It should be [zdim, ydim, xdim]'
assert self.ydim == self.depth.shape[-2], 'depth dimension has the wrong format. It should be [zdim, ydim, xdim]'
if not self.depth.dtype == np.float32:
logger.warning_once("Casting depth data to np.float32")
self.depth = self.depth.astype(np.float32)
| 56.964
| 307
| 0.606699
|
44b0be72012ad69af3148f6dd108e863d031a465
| 1,209
|
py
|
Python
|
portia_server/portia_server/models.py
|
hackrush01/portia
|
c7414034361fecada76e1693666674c274b0421a
|
[
"BSD-3-Clause"
] | 223
|
2018-01-22T07:43:58.000Z
|
2022-03-25T12:52:30.000Z
|
portia_server/portia_server/models.py
|
hackrush01/portia
|
c7414034361fecada76e1693666674c274b0421a
|
[
"BSD-3-Clause"
] | 9
|
2018-03-17T15:16:26.000Z
|
2019-11-11T09:35:04.000Z
|
portia_server/portia_server/models.py
|
hackrush01/portia
|
c7414034361fecada76e1693666674c274b0421a
|
[
"BSD-3-Clause"
] | 76
|
2018-01-22T04:20:00.000Z
|
2021-11-13T09:53:26.000Z
|
from __future__ import unicode_literals
import getpass
import socket
from django.db.models.fields import CharField
from django.contrib.auth.models import AnonymousUser
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class LocalUser(AnonymousUser):
is_active = True
default_username = getpass.getuser()
# add this so that the methods in django.contrib.auth that expect a user
# model with a pk field work correctly
class _meta:
pk = CharField()
pk.set_attributes_from_name('pk')
def __init__(self, **kwargs):
super(LocalUser, self).__init__()
self.username = kwargs.get('username', self.default_username)
if not hasattr(self, 'id'):
self.id = None
@property
def pk(self):
return self.id
def __str__(self):
return 'LocalUser({})'.format(self.username)
def __eq__(self, other):
return isinstance(other, self.__class__) and other.id == self.id
def __hash__(self):
hash(self.id)
def is_anonymous(self):
return False
def is_authenticated(self):
return True
def save(self, *args, **kwargs):
pass
| 24.673469
| 76
| 0.674938
|
af77c1ced92284bcf3a87356613dbdf0f30ab59d
| 160
|
py
|
Python
|
backend/auth0login/apps.py
|
webclinic017/WallStreetBots
|
f4f73539b080ce7667b52ee8195919ec4e7b79b8
|
[
"MIT"
] | 4
|
2021-11-12T02:04:30.000Z
|
2022-01-03T22:56:41.000Z
|
backend/auth0login/apps.py
|
webclinic017/WallStreetBots
|
f4f73539b080ce7667b52ee8195919ec4e7b79b8
|
[
"MIT"
] | 5
|
2021-11-11T20:48:34.000Z
|
2022-03-12T18:08:40.000Z
|
backend/auth0login/apps.py
|
webclinic017/WallStreetBots
|
f4f73539b080ce7667b52ee8195919ec4e7b79b8
|
[
"MIT"
] | 2
|
2021-11-15T14:23:36.000Z
|
2021-11-27T19:44:38.000Z
|
from django.apps import AppConfig
class Auth0LoginConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'backend.auth0login'
| 22.857143
| 56
| 0.775
|
2cace4a8f9bcb7c4f5c5716dd16b609fb8396337
| 5,375
|
py
|
Python
|
simsapa/layouts/memo_dialog.py
|
ilius/simsapa
|
b6c46cb6cba39557f665c0aaf63b84659dd983b8
|
[
"MIT"
] | null | null | null |
simsapa/layouts/memo_dialog.py
|
ilius/simsapa
|
b6c46cb6cba39557f665c0aaf63b84659dd983b8
|
[
"MIT"
] | null | null | null |
simsapa/layouts/memo_dialog.py
|
ilius/simsapa
|
b6c46cb6cba39557f665c0aaf63b84659dd983b8
|
[
"MIT"
] | null | null | null |
import logging as _logging
import json
from PyQt5.QtCore import pyqtSignal, QItemSelectionModel
from PyQt5.QtWidgets import (QHBoxLayout, QDialog, QPushButton, QPlainTextEdit, QFormLayout)
from sqlalchemy.sql import func
# from ..app.db import appdata_models as Am
from ..app.db import userdata_models as Um
logger = _logging.getLogger(__name__)
class MemoDialog(QDialog):
accepted = pyqtSignal(dict)
def __init__(self, text=''):
super().__init__()
self.setWindowTitle("Create Memo")
self.front = QPlainTextEdit()
self.front.setFixedSize(300, 50)
self.front.textChanged.connect(self.unlock)
self.back = QPlainTextEdit(text)
self.back.setFixedSize(300, 50)
self.back.textChanged.connect(self.unlock)
self.add_btn = QPushButton('Add')
self.add_btn.setDisabled(True)
self.add_btn.clicked.connect(self.add_pressed)
self.close_btn = QPushButton('Close')
self.close_btn.clicked.connect(self.close_pressed)
form = QFormLayout(self)
form.addRow('Front', self.front)
form.addRow('Back', self.back)
self.buttons_layout = QHBoxLayout()
self.buttons_layout.addWidget(self.add_btn)
self.buttons_layout.addWidget(self.close_btn)
form.addRow(self.buttons_layout)
def not_blanks(self) -> bool:
front = self.front.toPlainText()
back = self.back.toPlainText()
return front.strip() != '' and back.strip() != ''
def unlock(self):
if self.not_blanks():
self.add_btn.setEnabled(True)
else:
self.add_btn.setDisabled(True)
def add_pressed(self):
values = {
'Front': self.front.toPlainText(),
'Back': self.back.toPlainText(),
}
self.accepted.emit(values)
self.accept()
def close_pressed(self):
self.close()
class HasMemoDialog:
def init_memo_dialog(self):
self.memo_dialog_fields = {}
def set_memo_dialog_fields(self, values):
self.memo_dialog_fields = values
def handle_create_memo_for_sutta(self):
text = self.content_html.selectedText()
deck = self._app_data.db_session.query(Um.Deck).first()
self.memo_dialog_fields = {
'Front': '',
'Back': '',
}
d = MemoDialog(text)
d.accepted.connect(self.set_memo_dialog_fields)
d.exec_()
if self.memo_dialog_fields['Front'] == '' or self.memo_dialog_fields['Back'] == '':
return
memo = Um.Memo(
deck_id=deck.id,
fields_json=json.dumps(self.memo_dialog_fields),
created_at=func.now(),
)
try:
self._app_data.db_session.add(memo)
self._app_data.db_session.commit()
schema = self._current_sutta.metadata.schema
if self._current_sutta is not None:
memo_assoc = Um.MemoAssociation(
memo_id=memo.id,
associated_table=f'{schema}.suttas',
associated_id=self._current_sutta.id,
)
self._app_data.db_session.add(memo_assoc)
self._app_data.db_session.commit()
except Exception as e:
logger.error(e)
if 'memos_sidebar' in self.features:
# Add to model
if self.model.memos:
self.model.memos.append(memo)
else:
self.model.memos = [memo]
index = self.model.index(len(self.model.memos) - 1)
self.memos_list.selectionModel().select(index, QItemSelectionModel.Select)
self.update_memos_list()
def handle_create_memo_for_dict_word(self):
text = self.content_html.selectedText()
deck = self._app_data.db_session.query(Um.Deck).first()
self.memo_dialog_fields = {
'Front': '',
'Back': '',
}
d = MemoDialog(text)
d.accepted.connect(self.set_memo_dialog_fields)
d.exec_()
if self.memo_dialog_fields['Front'] == '' or self.memo_dialog_fields['Back'] == '':
return
memo = Um.Memo(
deck_id=deck.id,
fields_json=json.dumps(self.memo_dialog_fields),
created_at=func.now(),
)
try:
self._app_data.db_session.add(memo)
self._app_data.db_session.commit()
schema = self._current_word.metadata.schema
if self._current_word is not None:
memo_assoc = Um.MemoAssociation(
memo_id=memo.id,
associated_table=f'{schema}.dict_words',
associated_id=self._current_word.id,
)
self._app_data.db_session.add(memo_assoc)
self._app_data.db_session.commit()
except Exception as e:
logger.error(e)
if 'memos_sidebar' in self.features:
# Add to model
if self.model.memos:
self.model.memos.append(memo)
else:
self.model.memos = [memo]
index = self.model.index(len(self.model.memos) - 1)
self.memos_list.selectionModel().select(index, QItemSelectionModel.Select)
self.update_memos_list()
| 28.590426
| 92
| 0.589395
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.