hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f53792904c91e5d7cd3980d8c43aef9503813fb4
| 1,761
|
py
|
Python
|
check_sap_shortdumps.py
|
kmodric/check_sap_shortdumps.py
|
4e6a46b6e6f9022abf4f5bdc14906b0ae3d371e4
|
[
"MIT"
] | null | null | null |
check_sap_shortdumps.py
|
kmodric/check_sap_shortdumps.py
|
4e6a46b6e6f9022abf4f5bdc14906b0ae3d371e4
|
[
"MIT"
] | null | null | null |
check_sap_shortdumps.py
|
kmodric/check_sap_shortdumps.py
|
4e6a46b6e6f9022abf4f5bdc14906b0ae3d371e4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import os
os.chdir('/tmp')
#sapnwrfc - A Python interface to SAP NetWeaver R/3 systems using the RFC protocol
#SAP RFC Connector using the SAP NW RFC SDK for Python http://www.piersharding.com/blog/
#https://github.com/piersharding/python-sapnwrfc
import sapnwrfc
import sys
import json
if len(sys.argv) <> 4:
print "Usage:" + sys.argv[0] +" <SID> <warning shortdumps> <critical shortdumps>"
sys.exit(3)
from datetime import date, timedelta
yesterday = date.today() - timedelta(1)
if os.path.exists("/etc/sapmon/"+sys.argv[1]+".yml"):
sapnwrfc.base.config_location = "/etc/sapmon/"+sys.argv[1]+".yml"
else:
print "File not found:" +"/etc/sapmon/"+sys.argv[1]+".yml"
sys.exit(3)
sapnwrfc.base.load_config()
#print "making a new connection:"
try:
conn = sapnwrfc.base.rfc_connect()
fd = conn.discover("/SDF/GET_DUMP_LOG")
f = fd.create_function_call()
f.DATE_FROM(yesterday.strftime('%Y%m%d'))
f.invoke()
d = f.ET_E2E_LOG.value
todo = {'results': d}
st22= str(len(d))
if len(d) >= int(sys.argv[3]):
print "CRITICAL: ShortDumps in last two days -w "+sys.argv[2] +" -c "+sys.argv[3]+": "+st22+" | ShortDump="+st22
sys.exit(2)
elif len(d) >= int(sys.argv[2]):
print "WARNING: ShortDumps in last two days -w "+sys.argv[2] +" -c "+sys.argv[3]+": "+st22+" | ShortDump="+st22
sys.exit(1)
else:
print ('OK: ShortDumps in last two days -w '+sys.argv[2] +' -c '+sys.argv[3]+': '+st22+' | ShortDump='+st22)
sys.exit(0)
conn.close()
except sapnwrfc.RFCCommunicationError as e:
if 'NO_DATA_FOUND' in e[0]:
print "OK: ShortDumps in last two days -w "+sys.argv[2] +" -c "+sys.argv[3]+": 0 | ShortDump=0"
else:
print "UKNOWN:" + e[0]
sys.exit(3)
| 30.894737
| 114
| 0.642249
|
e8bac25e6b9cb3cec37dd4d2a44cd17a5e5fd140
| 283
|
py
|
Python
|
sandbox/rocky/tf/spaces/box.py
|
RussellM2020/maml_gps
|
631560dfd4e23dc2da9bfbbd2e3c5252aa9775c5
|
[
"MIT"
] | 26
|
2019-04-02T16:19:10.000Z
|
2019-11-13T13:18:06.000Z
|
sandbox/rocky/tf/spaces/box.py
|
RussellM2020/maml_gps
|
631560dfd4e23dc2da9bfbbd2e3c5252aa9775c5
|
[
"MIT"
] | 1
|
2019-04-24T14:01:14.000Z
|
2019-04-25T14:16:25.000Z
|
sandbox/rocky/tf/spaces/box.py
|
RussellM2020/maml_gps
|
631560dfd4e23dc2da9bfbbd2e3c5252aa9775c5
|
[
"MIT"
] | 8
|
2019-12-17T09:11:58.000Z
|
2021-07-02T06:55:31.000Z
|
from rllab.spaces.box import Box as TheanoBox
import tensorflow as tf
class Box(TheanoBox):
def new_tensor_variable(self, name, extra_dims, add_to_flat_dim=0):
return tf.placeholder(tf.float32, shape=[None] * extra_dims + [self.flat_dim+add_to_flat_dim], name=name)
| 25.727273
| 113
| 0.75265
|
359b43c10e56b673021ef42ad4fc6c65875f08b6
| 401
|
py
|
Python
|
urlshortener/wsgi.py
|
vikaschandak/urlshortener
|
de0fad8dbfe031f553efcf95c3b6a985a97b97e5
|
[
"MIT"
] | 2
|
2021-04-02T15:46:52.000Z
|
2021-04-02T20:05:56.000Z
|
urlshortener/wsgi.py
|
vikaschandak/urlshortener
|
de0fad8dbfe031f553efcf95c3b6a985a97b97e5
|
[
"MIT"
] | null | null | null |
urlshortener/wsgi.py
|
vikaschandak/urlshortener
|
de0fad8dbfe031f553efcf95c3b6a985a97b97e5
|
[
"MIT"
] | 2
|
2020-09-30T15:53:35.000Z
|
2020-09-30T16:38:47.000Z
|
"""
WSGI config for urlshortener project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'urlshortener.settings')
application = get_wsgi_application()
| 23.588235
| 78
| 0.790524
|
1673d68325bf8de993ca041d671c343255ad019d
| 8,489
|
py
|
Python
|
symphony/cli/pyinventory/api/property_type.py
|
idoshveki/magma
|
8022267bd8b8d94913fbb9a0836880361d785446
|
[
"BSD-3-Clause"
] | null | null | null |
symphony/cli/pyinventory/api/property_type.py
|
idoshveki/magma
|
8022267bd8b8d94913fbb9a0836880361d785446
|
[
"BSD-3-Clause"
] | null | null | null |
symphony/cli/pyinventory/api/property_type.py
|
idoshveki/magma
|
8022267bd8b8d94913fbb9a0836880361d785446
|
[
"BSD-3-Clause"
] | 1
|
2021-05-18T06:54:58.000Z
|
2021-05-18T06:54:58.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2004-present Facebook All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
from typing import List, Sequence
from pysymphony import SymphonyClient
from .._utils import format_property_definitions, get_property_type_input
from ..common.cache import EQUIPMENT_TYPES, LOCATION_TYPES, PORT_TYPES, SERVICE_TYPES
from ..common.data_class import PropertyDefinition
from ..common.data_enum import Entity
from ..exceptions import EntityNotFoundError
from ..graphql.fragment.property_type import PropertyTypeFragment
from ..graphql.input.property_type import PropertyTypeInput
def get_property_types(
client: SymphonyClient, entity_type: Entity, entity_name: str
) -> Sequence[PropertyTypeFragment]:
"""Get property types on specific entity. `entity_type` - ["LocationType", "EquipmentType", "ServiceType", "EquipmentPortType"]
Args:
entity_type ( `pyinventory.common.data_enum.Entity` ): existing entity type
entity_name (str): existing entity name
Returns:
Sequence[ `pyinventory.graphql.fragment.property_type.PropertyTypeFragment` ]
Raises:
`pyinventory.exceptions.EntityNotFoundError`: if entity type does not found or does not have property types
Example:
```
property_type = client.get_property_types(
entity_type=Entity.EquipmentType,
entity_name="Card",
)
```
"""
existing_entity_types = {
Entity.LocationType: LOCATION_TYPES,
Entity.EquipmentType: EQUIPMENT_TYPES,
Entity.ServiceType: SERVICE_TYPES,
Entity.EquipmentPortType: PORT_TYPES,
}.get(entity_type, None)
if existing_entity_types is None:
raise EntityNotFoundError(entity=entity_type)
existing_entity_type = None
# pyre-fixme[16]: `None` has no attribute `__getitem__`.
if entity_name in existing_entity_types:
existing_entity_type = existing_entity_types[entity_name]
if existing_entity_type is None:
raise EntityNotFoundError(entity=entity_type, entity_name=entity_name)
return existing_entity_type.property_types
def get_property_type(
client: SymphonyClient, entity_type: Entity, entity_name: str, property_type_id: str
) -> PropertyTypeFragment:
"""Get property type on specific entity. `entity_type` - ["LocationType", "EquipmentType", "ServiceType", "EquipmentPortType"]
Args:
entity_type ( `pyinventory.common.data_enum.Entity` ): existing entity type
entity_name (str): existing entity name
property_type_id (str): property type ID
Returns:
`pyinventory.graphql.fragment.property_type.PropertyTypeFragment` object
Raises:
`pyinventory.exceptions.EntityNotFoundError`: if property type with id=`property_type_id` does not found
Example:
```
property_type = client.get_property_type(
entity_type=Entity.EquipmentType,
entity_name="Card",
property_type_id="12345",
)
```
"""
property_types = get_property_types(
client=client, entity_type=entity_type, entity_name=entity_name
)
for property_type in property_types:
if property_type.id == property_type_id:
return property_type
raise EntityNotFoundError(entity=Entity.PropertyType, entity_id=property_type_id)
def get_property_type_id(
client: SymphonyClient,
entity_type: Entity,
entity_name: str,
property_type_name: str,
) -> str:
"""Get property type ID on specific entity. `entity_type` - ["LocationType", "EquipmentType", "ServiceType", "EquipmentPortType"]
Args:
entity_type ( `pyinventory.common.data_enum.Entity` ): existing entity type
entity_name (str): existing entity name
property_type_name (str): property type ID
Returns:
property type ID (str): property type ID
Raises:
`pyinventory.exceptions.EntityNotFoundError`: if property type with id=`property_type_id` does not found
Example:
```
property_type = client.get_property_type_id(
entity_type=Entity.EquipmentType,
entity_name="Card",
property_type_name="IP",
)
```
"""
property_types = get_property_types(
client=client, entity_type=entity_type, entity_name=entity_name
)
for property_type in property_types:
if property_type.name == property_type_name:
return property_type.id
raise EntityNotFoundError(
entity=Entity.PropertyType, entity_name=property_type_name
)
def get_property_type_by_external_id(
client: SymphonyClient,
entity_type: Entity,
entity_name: str,
property_type_external_id: str,
) -> PropertyTypeFragment:
"""Get property type by external ID on specific entity. `entity_type` - ["LocationType", "EquipmentType", "ServiceType", "EquipmentPortType"]
Args:
entity_type ( `pyinventory.common.data_enum.Entity` ): existing entity type
entity_name (str): existing entity name
property_type_external_id (str): property type external ID
Returns:
`pyinventory.graphql.fragment.property_type.PropertyTypeFragment` object
Raises:
`pyinventory.exceptions.EntityNotFoundError`: property type with external_id=`property_type_external_id` is not found
Example:
```
property_type = client.get_property_type_by_external_id(
entity_type=Entity.EquipmentType,
entity_name="Card",
property_type_external_id="12345",
)
```
"""
property_types = get_property_types(
client=client, entity_type=entity_type, entity_name=entity_name
)
for property_type in property_types:
if property_type.externalId == property_type_external_id:
return property_type
raise EntityNotFoundError(
entity=Entity.PropertyType, msg=f"<external_id: {property_type_external_id}>"
)
def edit_property_type(
client: SymphonyClient,
entity_type: Entity,
entity_name: str,
property_type_id: str,
new_property_definition: PropertyDefinition,
) -> List[PropertyTypeInput]:
"""Edit specific property type on specific entity. `entity_type` - ["LocationType", "EquipmentType", "ServiceType", "EquipmentPortType"]
Args:
entity_type ( `pyinventory.common.data_enum.Entity` ): existing entity type
entity_name (str): existing entity name
property_type_id (str): existing property type ID
new_property_definition ( `pyinventory.common.data_class.PropertyDefinition` ): new property definition
Returns:
List[ `pyinventory.graphql.input.property_type.PropertyTypeInput` ]
Raises:
`pyinventory.exceptions.EntityNotFoundError`: property type with external_id=`property_type_external_id` is not found
Example:
```
property_types = client.edit_property_type(
entity_type=Entity.EquipmentType,
entity_name="Card",
property_type_id="12345",
property_definition=PropertyDefinition(
property_name="new_name",
property_kind=PropertyKind.string,
default_value=None,
is_fixed=False,
external_id="ex_12345",
),
)
```
"""
property_types = get_property_types(
client=client, entity_type=entity_type, entity_name=entity_name
)
edited_property_types = []
for property_type in property_types:
property_type_input = get_property_type_input(property_type, is_new=False)
if property_type_input.id == property_type_id:
formated_property_definitions = format_property_definitions(
[new_property_definition]
)
formated_property_definitions[0].id = property_type_input.id
property_type_input = formated_property_definitions[0]
edited_property_types.append(property_type_input)
return edited_property_types
| 36.433476
| 145
| 0.671457
|
560b145db1bbd2da69d9e678b2822124d1995b5b
| 1,583
|
py
|
Python
|
binary_search.py
|
gsuryalss/searching
|
25f4ed7df411a0af657b5763e55bf42c1168d6d7
|
[
"MIT"
] | null | null | null |
binary_search.py
|
gsuryalss/searching
|
25f4ed7df411a0af657b5763e55bf42c1168d6d7
|
[
"MIT"
] | null | null | null |
binary_search.py
|
gsuryalss/searching
|
25f4ed7df411a0af657b5763e55bf42c1168d6d7
|
[
"MIT"
] | null | null | null |
"""
Binary search is used on a collection of sorted items. It relies on the technique of repeatedly dividing the
input into half until it finds the search value.
* Divide the array element into half & find the median value (//2)
* check the median element, if it is equal return it
* if the median element is greater than key, reduce the upper bound to median-1
* if the median value is lesser than key, increase the lower bound to median+1
As we dispose off one part of the search case during every step of binary search, and perform the search
operation on the other half, this results in a worst case time complexity of O(log2N).
"""
from bubble_sort import bubble_sort
def binary_search(arr_param, item):
first = 0
last = len(arr_param) - 1
found = False
while first <= last and not found:
mid_pos = (first + last)//2
if arr_param[mid_pos] < item:
first = mid_pos + 1
elif arr_param[mid_pos] > item:
last = mid_pos - 1
else:
print("Position", mid_pos)
found = True
return found
arr = []
print("Binary Search\n")
# array size
m = int(input("Enter the array size:>>"))
# array input
print("Enter the array elements(new line):\n")
for l in range(m):
arr.append(int(input()))
# input search element
find = int(input("Enter the search value:>>"))
# sort the input array
sorted_arr = bubble_sort(arr)
print("Sorted Array: ", sorted_arr)
# search the element in input array
print("Value Found" if binary_search(sorted_arr, find) else "Value Not Found")
| 29.314815
| 108
| 0.679722
|
db42fa58dda8b65448ed7c100210974cc1d21937
| 3,551
|
py
|
Python
|
cogs/game/minigames/game_of_life/functions.py
|
FellowHashbrown/omega-psi-py
|
4ea33cdbef15ffaa537f2c9e382de508c58093fc
|
[
"MIT"
] | 4
|
2018-12-23T08:49:40.000Z
|
2021-03-25T16:51:43.000Z
|
cogs/game/minigames/game_of_life/functions.py
|
FellowHashbrown/omega-psi-py
|
4ea33cdbef15ffaa537f2c9e382de508c58093fc
|
[
"MIT"
] | 23
|
2020-11-03T17:40:40.000Z
|
2022-02-01T17:12:59.000Z
|
cogs/game/minigames/game_of_life/functions.py
|
FellowHashbrown/omega-psi-py
|
4ea33cdbef15ffaa537f2c9e382de508c58093fc
|
[
"MIT"
] | 1
|
2019-07-11T23:40:13.000Z
|
2019-07-11T23:40:13.000Z
|
from random import choice
def choose_house(player, *, buy = True, house_one = None, house_two = None):
"""Has the AI intelligently choose a house depending on the purchase prices
and the sell prices
:param player: The player object that must choose between two houses
:param buy: Whether or not to decide
:param house_one: The first house card the player must decide on
Note: this is only used when deciding on buying a house
:param house_two: The second house card the player must decide on
Note: this is only used when deciding on buying a house
"""
# Check if the player is choosing between 2 houses
if buy:
# Check if the purchase prices are the same
if house_one.purchase == house_two.purchase:
# Check if the low sell prices are the same
if house_one.spin_black == house_two.spin_black:
# Check if the high sell prices are the same
# if so, choose a random house
if house_one.spin_red == house_two.spin_red:
return choice([house_one, house_two])
# Choose the higher high sell price
return max([house_one, house_two], key = lambda house: house.spin_black)
# Choose the higher low sell price
return max([house_one, house_two], key = lambda house: house.spin_red)
# If the player has enough money for the higher one, choose it
# if not, choose the lower one as long as the player does
# not need to take out any loans
max_house = max([house_one, house_two], key = lambda house: house.purchase)
min_house = min([house_one, house_two], key = lambda house: house.purchase)
if player.cash >= max_house.purchase:
return max_house
if player.cash >= min_house.purchase:
return min_house
return None
# The player is selling a house
else:
# Look through the player's houses for the highest low sell price
highest_low = [player.house_cards[0]]
highest_sell = highest_low[0].spin_red
for house in player.house_cards:
# Check if the house has a higher low sell than the previous
if house.spin_red > highest_sell:
highest_low = [house]
highest_sell = house.spin_red
# Check if the house has an equivalent low sell with previous houses
elif house.spin_red == highest_sell:
highest_low.append(house)
# Look through the highest low sells for the highest high sell
# # First, check if there is only 1 house
if len(highest_low) == 1:
return highest_low[0]
# # There is more than 1 house, find the highest high sell
else:
highest_high = [highest_low[0]]
highest_sell = highest_low[0].spin_black
for house in highest_low:
# Check if the house has a higher high sell than the previous
if house.spin_black > highest_sell:
highest_high = [house]
highest_sell = house.spin_black
# Check if the house has an equivalent high sell with previous houses
elif house.spin_black == highest_sell:
highest_high.append(house)
# Choose a house at random from the list
return choice(highest_high)
| 42.27381
| 88
| 0.604618
|
ddcf189d16247230172843d2da786dfc39eebba5
| 2,339
|
py
|
Python
|
deletetweets/__main__.py
|
theandrebass/delete-tweets
|
308053df8221f0a8ddd56456e12ada233f3b1525
|
[
"0BSD"
] | null | null | null |
deletetweets/__main__.py
|
theandrebass/delete-tweets
|
308053df8221f0a8ddd56456e12ada233f3b1525
|
[
"0BSD"
] | null | null | null |
deletetweets/__main__.py
|
theandrebass/delete-tweets
|
308053df8221f0a8ddd56456e12ada233f3b1525
|
[
"0BSD"
] | null | null | null |
#!/usr/bin/env python
import argparse
import os
import sys
from deletetweets import deletetweets
__author__ = "Koen Rouwhorst"
__version__ = "1.0.6"
def main():
parser = argparse.ArgumentParser(description="Delete old tweets.")
parser.add_argument("--since", dest="since_date", help="Delete tweets since this date")
parser.add_argument("--until", dest="until_date", help="Delete tweets until this date")
parser.add_argument("--filter", action="append", dest="filters", choices=["replies", "retweets"],
help="Filter replies or retweets", default=[])
parser.add_argument("file", help="Path to the tweet.js file",
type=str)
parser.add_argument("--spare-ids", dest="spare_ids", help="A list of tweet ids to spare",
type=str, nargs="+", default=[])
parser.add_argument("--spare-min-likes", dest="min_likes",
help="Spare tweets with more than the provided likes", type=int, default=0)
parser.add_argument("--spare-min-retweets", dest="min_retweets",
help="Spare tweets with more than the provided retweets", type=int, default=0)
parser.add_argument("--dry-run", dest="dry_run", action="store_true", default=False)
parser.add_argument('--version', action='version', version='%(prog)s ' + __version__)
# legacy options
parser.add_argument("-d", dest="until_date", help=argparse.SUPPRESS)
parser.add_argument("-r", dest="restrict", choices=["reply", "retweet"], help=argparse.SUPPRESS)
args = parser.parse_args()
if not ("TWITTER_CONSUMER_KEY" in os.environ and
"TWITTER_CONSUMER_SECRET" in os.environ and
"TWITTER_ACCESS_TOKEN" in os.environ and
"TWITTER_ACCESS_TOKEN_SECRET" in os.environ):
sys.stderr.write("Twitter API credentials not set.\n")
exit(1)
filters = []
if args.restrict == "reply":
filters.append("replies")
elif args.restrict == "retweet":
filters.append("retweets")
for f in args.filters:
if f not in filters:
filters.append(f)
deletetweets.delete(args.file, args.since_date, args.until_date, filters, args.spare_ids,
args.min_likes, args.min_retweets, args.dry_run)
if __name__ == "__main__":
main()
| 39.644068
| 102
| 0.645575
|
5ba5c8395dcd246376056a1c767d60bb8579c3c9
| 42,075
|
py
|
Python
|
zhaquirks/xiaomi/aqara/opple_remote.py
|
guirem/zha-device-handlers
|
9dc8a68d43e064b8c4ba7dfd29a4b4382e030a05
|
[
"Apache-2.0"
] | null | null | null |
zhaquirks/xiaomi/aqara/opple_remote.py
|
guirem/zha-device-handlers
|
9dc8a68d43e064b8c4ba7dfd29a4b4382e030a05
|
[
"Apache-2.0"
] | null | null | null |
zhaquirks/xiaomi/aqara/opple_remote.py
|
guirem/zha-device-handlers
|
9dc8a68d43e064b8c4ba7dfd29a4b4382e030a05
|
[
"Apache-2.0"
] | null | null | null |
"""Xiaomi aqara opple remote devices."""
import logging
from zigpy.profiles import zha
import zigpy.types as types
from zigpy.zcl.clusters.general import (
Basic,
Identify,
LevelControl,
MultistateInput,
OnOff,
)
from zigpy.zcl.clusters.lighting import Color
from zigpy.zdo.types import NodeDescriptor
from zhaquirks import CustomCluster
from .. import LUMI, BasicCluster, XiaomiCustomDevice
from ... import PowerConfigurationCluster
from ...const import (
ALT_DOUBLE_PRESS,
ALT_LONG_PRESS,
ALT_SHORT_PRESS,
ARGS,
ATTR_ID,
BUTTON,
BUTTON_1,
BUTTON_2,
BUTTON_3,
BUTTON_4,
BUTTON_5,
BUTTON_6,
COMMAND,
COMMAND_MOVE,
COMMAND_MOVE_COLOR_TEMP,
COMMAND_OFF,
COMMAND_ON,
COMMAND_STEP,
COMMAND_STEP_COLOR_TEMP,
DEVICE_TYPE,
DOUBLE_PRESS,
ENDPOINT_ID,
ENDPOINTS,
INPUT_CLUSTERS,
LONG_PRESS,
LONG_RELEASE,
MODELS_INFO,
NODE_DESCRIPTOR,
OUTPUT_CLUSTERS,
PRESS_TYPE,
PROFILE_ID,
SHORT_PRESS,
TRIPLE_PRESS,
VALUE,
ZHA_SEND_EVENT,
)
PRESS_TYPES = {0: "long press", 1: "single", 2: "double", 3: "triple", 255: "release"}
STATUS_TYPE_ATTR = 0x0055 # decimal = 85
COMMAND_1_SINGLE = "1_single"
COMMAND_1_DOUBLE = "1_double"
COMMAND_1_TRIPLE = "1_triple"
COMMAND_1_HOLD = "1_hold"
COMMAND_1_RELEASE = "1_release"
COMMAND_2_SINGLE = "2_single"
COMMAND_2_DOUBLE = "2_double"
COMMAND_2_TRIPLE = "2_triple"
COMMAND_2_HOLD = "2_hold"
COMMAND_2_RELEASE = "2_release"
COMMAND_3_SINGLE = "3_single"
COMMAND_3_DOUBLE = "3_double"
COMMAND_3_TRIPLE = "3_triple"
COMMAND_3_HOLD = "3_hold"
COMMAND_3_RELEASE = "3_release"
COMMAND_4_SINGLE = "4_single"
COMMAND_4_DOUBLE = "4_double"
COMMAND_4_TRIPLE = "4_triple"
COMMAND_4_HOLD = "4_hold"
COMMAND_4_RELEASE = "4_release"
COMMAND_5_SINGLE = "5_single"
COMMAND_5_DOUBLE = "5_double"
COMMAND_5_TRIPLE = "5_triple"
COMMAND_5_HOLD = "5_hold"
COMMAND_5_RELEASE = "5_release"
COMMAND_6_SINGLE = "6_single"
COMMAND_6_DOUBLE = "6_double"
COMMAND_6_TRIPLE = "6_triple"
COMMAND_6_HOLD = "6_hold"
COMMAND_6_RELEASE = "6_release"
OPPLE_CLUSTER_ID = 0xFCC0
OPPLE_MFG_CODE = 0x115F
_LOGGER = logging.getLogger(__name__)
class MultistateInputCluster(CustomCluster, MultistateInput):
"""Multistate input cluster."""
cluster_id = MultistateInput.cluster_id
def __init__(self, *args, **kwargs):
"""Init."""
self._current_state = None
super().__init__(*args, **kwargs)
async def configure_reporting(
self,
attribute,
min_interval,
max_interval,
reportable_change,
manufacturer=None,
):
"""Configure reporting."""
pass
def _update_attribute(self, attrid, value):
super()._update_attribute(attrid, value)
if attrid == STATUS_TYPE_ATTR:
self._current_state = PRESS_TYPES.get(value)
event_args = {
BUTTON: self.endpoint.endpoint_id,
PRESS_TYPE: self._current_state,
ATTR_ID: attrid,
VALUE: value,
}
action = "{}_{}".format(self.endpoint.endpoint_id, self._current_state)
self.listener_event(ZHA_SEND_EVENT, action, event_args)
# show something in the sensor in HA
super()._update_attribute(0, action)
class OppleCluster(CustomCluster):
"""Opple cluster."""
ep_attribute = "opple_cluster"
cluster_id = OPPLE_CLUSTER_ID
manufacturer_attributes = {0x0009: ("mode", types.uint8_t)}
attr_config = {0x0009: 0x01}
def __init__(self, *args, **kwargs):
"""Init."""
self._current_state = None
super().__init__(*args, **kwargs)
async def bind(self):
"""Bind cluster."""
result = await super().bind()
await self.write_attributes(self.attr_config, manufacturer=OPPLE_MFG_CODE)
return result
class RemoteB286OPCN01(XiaomiCustomDevice):
"""Aqara Opple 2 button remote device."""
signature = {
# <SimpleDescriptor endpoint=1 profile=260 device_type=261
# device_version=1
# input_clusters=[0, 3, 1]
# output_clusters=[3, 6, 8, 768]>
MODELS_INFO: [(LUMI, "lumi.remote.b286opcn01")],
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
Basic.cluster_id,
Identify.cluster_id,
PowerConfigurationCluster.cluster_id,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
},
# <SimpleDescriptor endpoint=2 profile=260 device_type=259
# device_version=1
# input_clusters=[3]
# output_clusters=[6, 3]>
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [Identify.cluster_id],
OUTPUT_CLUSTERS: [Identify.cluster_id, OnOff.cluster_id],
},
3: {},
4: {},
5: {},
6: {},
},
}
replacement = {
NODE_DESCRIPTOR: NodeDescriptor(
0x02, 0x40, 0x80, 0x115F, 0x7F, 0x0064, 0x2C00, 0x0064, 0x00
),
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
BasicCluster,
Identify.cluster_id,
PowerConfigurationCluster,
OppleCluster,
MultistateInputCluster,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
},
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [Identify.cluster_id, MultistateInputCluster],
OUTPUT_CLUSTERS: [Identify.cluster_id, OnOff.cluster_id],
},
3: {},
4: {},
5: {},
6: {},
},
}
device_automation_triggers = {
(DOUBLE_PRESS, BUTTON_1): {
COMMAND: COMMAND_STEP,
ENDPOINT_ID: 1,
ARGS: [1, 85, 7],
},
(SHORT_PRESS, BUTTON_1): {COMMAND: COMMAND_OFF, ENDPOINT_ID: 1},
(LONG_PRESS, BUTTON_1): {
COMMAND: COMMAND_STEP_COLOR_TEMP,
ENDPOINT_ID: 1,
ARGS: [1, 69, 7, 0, 0],
},
(DOUBLE_PRESS, BUTTON_2): {
COMMAND: COMMAND_STEP,
ENDPOINT_ID: 1,
ARGS: [0, 85, 7],
},
(SHORT_PRESS, BUTTON_2): {COMMAND: COMMAND_ON, ENDPOINT_ID: 1},
(LONG_PRESS, BUTTON_2): {
COMMAND: COMMAND_STEP_COLOR_TEMP,
ENDPOINT_ID: 1,
ARGS: [3, 69, 7, 0, 0],
},
(ALT_SHORT_PRESS, BUTTON_1): {COMMAND: COMMAND_1_SINGLE},
(ALT_DOUBLE_PRESS, BUTTON_1): {COMMAND: COMMAND_1_DOUBLE},
(TRIPLE_PRESS, BUTTON_1): {COMMAND: COMMAND_1_TRIPLE},
(ALT_LONG_PRESS, BUTTON_1): {COMMAND: COMMAND_1_HOLD},
(LONG_RELEASE, BUTTON_1): {COMMAND: COMMAND_1_RELEASE},
(ALT_SHORT_PRESS, BUTTON_2): {COMMAND: COMMAND_2_SINGLE},
(ALT_DOUBLE_PRESS, BUTTON_2): {COMMAND: COMMAND_2_DOUBLE},
(TRIPLE_PRESS, BUTTON_2): {COMMAND: COMMAND_2_TRIPLE},
(ALT_LONG_PRESS, BUTTON_2): {COMMAND: COMMAND_2_HOLD},
(LONG_RELEASE, BUTTON_2): {COMMAND: COMMAND_2_RELEASE},
}
class RemoteB286OPCN01Alt(XiaomiCustomDevice):
"""Aqara Opple 2 button remote device (after alternate mode is enabled)."""
signature = {
# <SimpleDescriptor endpoint=1 profile=260 device_type=261
# device_version=1
# input_clusters=[0, 3, 1]
# output_clusters=[3, 6, 8, 768]>
MODELS_INFO: [(LUMI, "lumi.remote.b286opcn01")],
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
Basic.cluster_id,
Identify.cluster_id,
PowerConfigurationCluster.cluster_id,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
},
2: {},
3: {},
4: {},
5: {},
6: {},
},
}
replacement = {
NODE_DESCRIPTOR: NodeDescriptor(
0x02, 0x40, 0x80, 0x115F, 0x7F, 0x0064, 0x2C00, 0x0064, 0x00
),
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
BasicCluster,
Identify.cluster_id,
PowerConfigurationCluster,
OppleCluster,
MultistateInputCluster,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
},
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [Identify.cluster_id, MultistateInputCluster],
OUTPUT_CLUSTERS: [Identify.cluster_id, OnOff.cluster_id],
},
3: {},
4: {},
5: {},
6: {},
},
}
device_automation_triggers = RemoteB286OPCN01.device_automation_triggers
class RemoteB486OPCN01(XiaomiCustomDevice):
"""Aqara Opple 4 button remote device."""
signature = {
# <SimpleDescriptor endpoint=1 profile=260 device_type=261
# device_version=1
# input_clusters=[0, 3, 1]
# output_clusters=[3, 6, 8, 768]>
MODELS_INFO: [(LUMI, "lumi.remote.b486opcn01")],
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
Basic.cluster_id,
Identify.cluster_id,
PowerConfigurationCluster.cluster_id,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
},
# <SimpleDescriptor endpoint=2 profile=260 device_type=259
# device_version=1
# input_clusters=[3]
# output_clusters=[6, 3]>
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [Identify.cluster_id],
OUTPUT_CLUSTERS: [Identify.cluster_id, OnOff.cluster_id],
},
3: {},
4: {},
5: {},
6: {},
},
}
replacement = {
NODE_DESCRIPTOR: NodeDescriptor(
0x02, 0x40, 0x80, 0x115F, 0x7F, 0x0064, 0x2C00, 0x0064, 0x00
),
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
BasicCluster,
Identify.cluster_id,
PowerConfigurationCluster,
OppleCluster,
MultistateInputCluster,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
},
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [Identify.cluster_id, MultistateInputCluster],
OUTPUT_CLUSTERS: [Identify.cluster_id, OnOff.cluster_id],
},
3: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster],
OUTPUT_CLUSTERS: [],
},
4: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster],
OUTPUT_CLUSTERS: [],
},
5: {},
6: {},
},
}
device_automation_triggers = {
(SHORT_PRESS, BUTTON_1): {COMMAND: COMMAND_OFF, ENDPOINT_ID: 1},
(SHORT_PRESS, BUTTON_2): {COMMAND: COMMAND_ON, ENDPOINT_ID: 1},
(SHORT_PRESS, BUTTON_3): {
COMMAND: COMMAND_STEP,
ENDPOINT_ID: 1,
ARGS: [1, 85, 7],
},
(DOUBLE_PRESS, BUTTON_3): {
COMMAND: COMMAND_STEP_COLOR_TEMP,
ENDPOINT_ID: 1,
ARGS: [1, 69, 7, 0, 0],
},
(SHORT_PRESS, BUTTON_4): {
COMMAND: COMMAND_STEP,
ENDPOINT_ID: 1,
ARGS: [0, 85, 7],
},
(DOUBLE_PRESS, BUTTON_4): {
COMMAND: COMMAND_STEP_COLOR_TEMP,
ENDPOINT_ID: 1,
ARGS: [3, 69, 7, 0, 0],
},
(ALT_SHORT_PRESS, BUTTON_1): {COMMAND: COMMAND_1_SINGLE},
(ALT_DOUBLE_PRESS, BUTTON_1): {COMMAND: COMMAND_1_DOUBLE},
(TRIPLE_PRESS, BUTTON_1): {COMMAND: COMMAND_1_TRIPLE},
(ALT_LONG_PRESS, BUTTON_1): {COMMAND: COMMAND_1_HOLD},
(LONG_RELEASE, BUTTON_1): {COMMAND: COMMAND_1_RELEASE},
(ALT_SHORT_PRESS, BUTTON_2): {COMMAND: COMMAND_2_SINGLE},
(ALT_DOUBLE_PRESS, BUTTON_2): {COMMAND: COMMAND_2_DOUBLE},
(TRIPLE_PRESS, BUTTON_2): {COMMAND: COMMAND_2_TRIPLE},
(ALT_LONG_PRESS, BUTTON_2): {COMMAND: COMMAND_2_HOLD},
(LONG_RELEASE, BUTTON_2): {COMMAND: COMMAND_2_RELEASE},
(ALT_SHORT_PRESS, BUTTON_3): {COMMAND: COMMAND_3_SINGLE},
(ALT_DOUBLE_PRESS, BUTTON_3): {COMMAND: COMMAND_3_DOUBLE},
(TRIPLE_PRESS, BUTTON_3): {COMMAND: COMMAND_3_TRIPLE},
(ALT_LONG_PRESS, BUTTON_3): {COMMAND: COMMAND_3_HOLD},
(LONG_RELEASE, BUTTON_3): {COMMAND: COMMAND_3_RELEASE},
(ALT_SHORT_PRESS, BUTTON_4): {COMMAND: COMMAND_4_SINGLE},
(ALT_DOUBLE_PRESS, BUTTON_4): {COMMAND: COMMAND_4_DOUBLE},
(TRIPLE_PRESS, BUTTON_4): {COMMAND: COMMAND_4_TRIPLE},
(ALT_LONG_PRESS, BUTTON_4): {COMMAND: COMMAND_4_HOLD},
(LONG_RELEASE, BUTTON_4): {COMMAND: COMMAND_4_RELEASE},
}
class RemoteB686OPCN01(XiaomiCustomDevice):
"""Aqara Opple 6 button remote device."""
signature = {
# <SimpleDescriptor endpoint=1 profile=260 device_type=261
# device_version=1
# input_clusters=[0, 3, 1]
# output_clusters=[3, 6, 8, 768]>
MODELS_INFO: [(LUMI, "lumi.remote.b686opcn01")],
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
Basic.cluster_id,
Identify.cluster_id,
PowerConfigurationCluster.cluster_id,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
},
# <SimpleDescriptor endpoint=2 profile=260 device_type=259
# device_version=1
# input_clusters=[3]
# output_clusters=[6, 3]>
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [Identify.cluster_id],
OUTPUT_CLUSTERS: [Identify.cluster_id, OnOff.cluster_id],
},
3: {},
4: {},
5: {},
6: {},
},
}
replacement = {
NODE_DESCRIPTOR: NodeDescriptor(
0x02, 0x40, 0x80, 0x115F, 0x7F, 0x0064, 0x2C00, 0x0064, 0x00
),
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
BasicCluster,
Identify.cluster_id,
PowerConfigurationCluster,
OppleCluster,
MultistateInputCluster,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
},
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [Identify.cluster_id, MultistateInputCluster],
OUTPUT_CLUSTERS: [Identify.cluster_id, OnOff.cluster_id],
},
3: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster],
OUTPUT_CLUSTERS: [],
},
4: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster],
OUTPUT_CLUSTERS: [],
},
5: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster],
OUTPUT_CLUSTERS: [],
},
6: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster],
OUTPUT_CLUSTERS: [],
},
},
}
device_automation_triggers = {
(SHORT_PRESS, BUTTON_1): {COMMAND: COMMAND_OFF, ENDPOINT_ID: 1},
(SHORT_PRESS, BUTTON_2): {COMMAND: COMMAND_ON, ENDPOINT_ID: 1},
(SHORT_PRESS, BUTTON_3): {
COMMAND: COMMAND_STEP,
ENDPOINT_ID: 1,
ARGS: [1, 85, 7],
},
(LONG_PRESS, BUTTON_3): {COMMAND: COMMAND_MOVE, ENDPOINT_ID: 1, ARGS: [1, 15]},
(SHORT_PRESS, BUTTON_4): {
COMMAND: COMMAND_STEP,
ENDPOINT_ID: 1,
ARGS: [0, 85, 7],
},
(LONG_PRESS, BUTTON_4): {COMMAND: COMMAND_MOVE, ENDPOINT_ID: 1, ARGS: [0, 15]},
(SHORT_PRESS, BUTTON_5): {
COMMAND: COMMAND_STEP_COLOR_TEMP,
ENDPOINT_ID: 1,
ARGS: [1, 69, 7, 0, 0],
},
(LONG_PRESS, BUTTON_5): {
COMMAND: COMMAND_MOVE_COLOR_TEMP,
ENDPOINT_ID: 1,
ARGS: [1, 15, 0, 0],
},
(SHORT_PRESS, BUTTON_6): {
COMMAND: COMMAND_STEP_COLOR_TEMP,
ENDPOINT_ID: 1,
ARGS: [3, 69, 7, 0, 0],
},
(LONG_PRESS, BUTTON_6): {
COMMAND: COMMAND_MOVE_COLOR_TEMP,
ENDPOINT_ID: 1,
ARGS: [3, 15, 0, 0],
},
(ALT_SHORT_PRESS, BUTTON_1): {COMMAND: COMMAND_1_SINGLE},
(ALT_DOUBLE_PRESS, BUTTON_1): {COMMAND: COMMAND_1_DOUBLE},
(TRIPLE_PRESS, BUTTON_1): {COMMAND: COMMAND_1_TRIPLE},
(ALT_LONG_PRESS, BUTTON_1): {COMMAND: COMMAND_1_HOLD},
(LONG_RELEASE, BUTTON_1): {COMMAND: COMMAND_1_RELEASE},
(ALT_SHORT_PRESS, BUTTON_2): {COMMAND: COMMAND_2_SINGLE},
(ALT_DOUBLE_PRESS, BUTTON_2): {COMMAND: COMMAND_2_DOUBLE},
(TRIPLE_PRESS, BUTTON_2): {COMMAND: COMMAND_2_TRIPLE},
(ALT_LONG_PRESS, BUTTON_2): {COMMAND: COMMAND_2_HOLD},
(LONG_RELEASE, BUTTON_2): {COMMAND: COMMAND_2_RELEASE},
(ALT_SHORT_PRESS, BUTTON_3): {COMMAND: COMMAND_3_SINGLE},
(ALT_DOUBLE_PRESS, BUTTON_3): {COMMAND: COMMAND_3_DOUBLE},
(TRIPLE_PRESS, BUTTON_3): {COMMAND: COMMAND_3_TRIPLE},
(ALT_LONG_PRESS, BUTTON_3): {COMMAND: COMMAND_3_HOLD},
(LONG_RELEASE, BUTTON_3): {COMMAND: COMMAND_3_RELEASE},
(ALT_SHORT_PRESS, BUTTON_4): {COMMAND: COMMAND_4_SINGLE},
(ALT_DOUBLE_PRESS, BUTTON_4): {COMMAND: COMMAND_4_DOUBLE},
(TRIPLE_PRESS, BUTTON_4): {COMMAND: COMMAND_4_TRIPLE},
(ALT_LONG_PRESS, BUTTON_4): {COMMAND: COMMAND_4_HOLD},
(LONG_RELEASE, BUTTON_4): {COMMAND: COMMAND_4_RELEASE},
(ALT_SHORT_PRESS, BUTTON_5): {COMMAND: COMMAND_5_SINGLE},
(ALT_DOUBLE_PRESS, BUTTON_5): {COMMAND: COMMAND_5_DOUBLE},
(TRIPLE_PRESS, BUTTON_5): {COMMAND: COMMAND_5_TRIPLE},
(ALT_LONG_PRESS, BUTTON_5): {COMMAND: COMMAND_5_HOLD},
(LONG_RELEASE, BUTTON_5): {COMMAND: COMMAND_5_RELEASE},
(ALT_SHORT_PRESS, BUTTON_6): {COMMAND: COMMAND_6_SINGLE},
(ALT_DOUBLE_PRESS, BUTTON_6): {COMMAND: COMMAND_6_DOUBLE},
(TRIPLE_PRESS, BUTTON_6): {COMMAND: COMMAND_6_TRIPLE},
(ALT_LONG_PRESS, BUTTON_6): {COMMAND: COMMAND_6_HOLD},
(LONG_RELEASE, BUTTON_6): {COMMAND: COMMAND_6_RELEASE},
}
class RemoteB286OPCN01V2(XiaomiCustomDevice):
"""Aqara Opple 2 button remote device."""
signature = {
# <SimpleDescriptor endpoint=1 profile=260 device_type=261
# device_version=1
# input_clusters=[0, 3, 1]
# output_clusters=[3, 6, 8, 768]>
MODELS_INFO: [(LUMI, "lumi.remote.b286opcn01")],
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
Basic.cluster_id,
Identify.cluster_id,
PowerConfigurationCluster.cluster_id,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
},
# <SimpleDescriptor endpoint=2 profile=260 device_type=259
# device_version=1
# input_clusters=[3]
# output_clusters=[6, 3]>
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [Identify.cluster_id],
OUTPUT_CLUSTERS: [Identify.cluster_id, OnOff.cluster_id],
},
},
}
replacement = {
NODE_DESCRIPTOR: NodeDescriptor(
0x02, 0x40, 0x80, 0x115F, 0x7F, 0x0064, 0x2C00, 0x0064, 0x00
),
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
BasicCluster,
Identify.cluster_id,
PowerConfigurationCluster,
OppleCluster,
MultistateInputCluster,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
}
},
}
device_automation_triggers = RemoteB286OPCN01.device_automation_triggers
class RemoteB486OPCN01V2(XiaomiCustomDevice):
"""Aqara Opple 4 button remote device."""
signature = {
# <SimpleDescriptor endpoint=1 profile=260 device_type=261
# device_version=1
# input_clusters=[0, 3, 1]
# output_clusters=[3, 6, 8, 768]>
MODELS_INFO: [(LUMI, "lumi.remote.b486opcn01")],
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
Basic.cluster_id,
Identify.cluster_id,
PowerConfigurationCluster.cluster_id,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
}
},
}
replacement = {
NODE_DESCRIPTOR: NodeDescriptor(
0x02, 0x40, 0x80, 0x115F, 0x7F, 0x0064, 0x2C00, 0x0064, 0x00
),
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
BasicCluster,
Identify.cluster_id,
PowerConfigurationCluster,
OppleCluster,
MultistateInputCluster,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
},
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [Identify.cluster_id, MultistateInputCluster],
OUTPUT_CLUSTERS: [Identify.cluster_id, OnOff.cluster_id],
},
3: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster],
OUTPUT_CLUSTERS: [],
},
4: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster],
OUTPUT_CLUSTERS: [],
},
},
}
device_automation_triggers = RemoteB486OPCN01.device_automation_triggers
class RemoteB686OPCN01V2(XiaomiCustomDevice):
"""Aqara Opple 6 button remote device."""
signature = {
# <SimpleDescriptor endpoint=1 profile=260 device_type=261
# device_version=1
# input_clusters=[0, 3, 1]
# output_clusters=[3, 6, 8, 768]>
MODELS_INFO: [(LUMI, "lumi.remote.b686opcn01")],
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
Basic.cluster_id,
Identify.cluster_id,
PowerConfigurationCluster.cluster_id,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
}
},
}
replacement = {
NODE_DESCRIPTOR: NodeDescriptor(
0x02, 0x40, 0x80, 0x115F, 0x7F, 0x0064, 0x2C00, 0x0064, 0x00
),
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
BasicCluster,
Identify.cluster_id,
PowerConfigurationCluster,
OppleCluster,
MultistateInputCluster,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
},
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [Identify.cluster_id, MultistateInputCluster],
OUTPUT_CLUSTERS: [Identify.cluster_id, OnOff.cluster_id],
},
3: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster],
OUTPUT_CLUSTERS: [],
},
4: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster],
OUTPUT_CLUSTERS: [],
},
5: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster],
OUTPUT_CLUSTERS: [],
},
6: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster],
OUTPUT_CLUSTERS: [],
},
},
}
device_automation_triggers = RemoteB686OPCN01.device_automation_triggers
class RemoteB686OPCN01V3(XiaomiCustomDevice):
"""Aqara Opple 6 button remote device."""
signature = {
# <SimpleDescriptor endpoint=1 profile=260 device_type=261
# device_version=1
# input_clusters=[0, 3, 1]
# output_clusters=[3, 6, 8, 768]>
MODELS_INFO: [(LUMI, "lumi.remote.b686opcn01")],
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
Basic.cluster_id,
Identify.cluster_id,
PowerConfigurationCluster.cluster_id,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
},
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [Identify.cluster_id],
OUTPUT_CLUSTERS: [OnOff.cluster_id, Identify.cluster_id],
},
3: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [
MultistateInputCluster.cluster_id,
Identify.cluster_id,
],
OUTPUT_CLUSTERS: [OnOff.cluster_id],
},
4: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [
MultistateInputCluster.cluster_id,
Identify.cluster_id,
],
OUTPUT_CLUSTERS: [OnOff.cluster_id],
},
5: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [
MultistateInputCluster.cluster_id,
Identify.cluster_id,
],
OUTPUT_CLUSTERS: [OnOff.cluster_id],
},
6: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [
MultistateInputCluster.cluster_id,
Identify.cluster_id,
],
OUTPUT_CLUSTERS: [OnOff.cluster_id],
},
},
}
replacement = {
NODE_DESCRIPTOR: NodeDescriptor(
0x02, 0x40, 0x80, 0x115F, 0x7F, 0x0064, 0x2C00, 0x0064, 0x00
),
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
BasicCluster,
Identify.cluster_id,
PowerConfigurationCluster,
OppleCluster,
MultistateInputCluster,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
},
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [Identify.cluster_id],
OUTPUT_CLUSTERS: [OnOff.cluster_id, Identify.cluster_id],
},
3: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster, Identify.cluster_id],
OUTPUT_CLUSTERS: [OnOff.cluster_id],
},
4: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster, Identify.cluster_id],
OUTPUT_CLUSTERS: [OnOff.cluster_id],
},
5: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster, Identify.cluster_id],
OUTPUT_CLUSTERS: [OnOff.cluster_id],
},
6: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster, Identify.cluster_id],
OUTPUT_CLUSTERS: [OnOff.cluster_id],
},
},
}
device_automation_triggers = RemoteB686OPCN01.device_automation_triggers
class RemoteB686OPCN01V4(XiaomiCustomDevice):
"""Aqara Opple 6 button remote device."""
signature = {
# <SimpleDescriptor endpoint=1 profile=260 device_type=261
# device_version=1
# input_clusters=[0, 3, 1]
# output_clusters=[3, 6, 8, 768]>
MODELS_INFO: [(LUMI, "lumi.remote.b686opcn01")],
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
Basic.cluster_id,
Identify.cluster_id,
PowerConfigurationCluster.cluster_id,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
},
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [Identify.cluster_id],
OUTPUT_CLUSTERS: [OnOff.cluster_id, Identify.cluster_id],
},
3: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [
MultistateInputCluster.cluster_id,
Identify.cluster_id,
],
OUTPUT_CLUSTERS: [OnOff.cluster_id],
},
4: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [
MultistateInputCluster.cluster_id,
Identify.cluster_id,
],
OUTPUT_CLUSTERS: [OnOff.cluster_id],
},
5: {},
6: {},
},
}
replacement = {
NODE_DESCRIPTOR: NodeDescriptor(
0x02, 0x40, 0x80, 0x115F, 0x7F, 0x0064, 0x2C00, 0x0064, 0x00
),
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
BasicCluster,
Identify.cluster_id,
PowerConfigurationCluster,
OppleCluster,
MultistateInputCluster,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
},
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [Identify.cluster_id],
OUTPUT_CLUSTERS: [OnOff.cluster_id, Identify.cluster_id],
},
3: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster, Identify.cluster_id],
OUTPUT_CLUSTERS: [OnOff.cluster_id],
},
4: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster, Identify.cluster_id],
OUTPUT_CLUSTERS: [OnOff.cluster_id],
},
5: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster, Identify.cluster_id],
OUTPUT_CLUSTERS: [OnOff.cluster_id],
},
6: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster, Identify.cluster_id],
OUTPUT_CLUSTERS: [OnOff.cluster_id],
},
},
}
device_automation_triggers = RemoteB686OPCN01.device_automation_triggers
class RemoteB686OPCN01V5(XiaomiCustomDevice):
"""Aqara Opple 6 button remote device."""
signature = {
# <SimpleDescriptor endpoint=1 profile=260 device_type=261
# device_version=1
# input_clusters=[0, 3, 1]
# output_clusters=[3, 6, 8, 768]>
MODELS_INFO: [(LUMI, "lumi.remote.b686opcn01")],
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
Basic.cluster_id,
Identify.cluster_id,
PowerConfigurationCluster.cluster_id,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
},
2: {},
3: {},
4: {},
5: {},
6: {},
},
}
replacement = {
NODE_DESCRIPTOR: NodeDescriptor(
0x02, 0x40, 0x80, 0x115F, 0x7F, 0x0064, 0x2C00, 0x0064, 0x00
),
ENDPOINTS: {
1: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.COLOR_DIMMER_SWITCH,
INPUT_CLUSTERS: [
BasicCluster,
Identify.cluster_id,
PowerConfigurationCluster,
OppleCluster,
MultistateInputCluster,
],
OUTPUT_CLUSTERS: [
Identify.cluster_id,
OnOff.cluster_id,
LevelControl.cluster_id,
Color.cluster_id,
],
},
2: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [Identify.cluster_id],
OUTPUT_CLUSTERS: [OnOff.cluster_id, Identify.cluster_id],
},
3: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster, Identify.cluster_id],
OUTPUT_CLUSTERS: [OnOff.cluster_id],
},
4: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster, Identify.cluster_id],
OUTPUT_CLUSTERS: [OnOff.cluster_id],
},
5: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster, Identify.cluster_id],
OUTPUT_CLUSTERS: [OnOff.cluster_id],
},
6: {
PROFILE_ID: zha.PROFILE_ID,
DEVICE_TYPE: zha.DeviceType.ON_OFF_LIGHT_SWITCH,
INPUT_CLUSTERS: [MultistateInputCluster, Identify.cluster_id],
OUTPUT_CLUSTERS: [OnOff.cluster_id],
},
},
}
device_automation_triggers = RemoteB686OPCN01.device_automation_triggers
| 35.150376
| 87
| 0.528913
|
ad052808a0ac0e54d6820b3d6e38f64f1483fe4a
| 44,565
|
py
|
Python
|
python/ccxt/async_support/digifinex.py
|
orikalinski/ccxt_new
|
318caa4f8db7ffb719edab2c060a0989d2a9cd28
|
[
"MIT"
] | 1
|
2019-09-26T09:16:37.000Z
|
2019-09-26T09:16:37.000Z
|
python/ccxt/async_support/digifinex.py
|
orikalinski/ccxt_new
|
318caa4f8db7ffb719edab2c060a0989d2a9cd28
|
[
"MIT"
] | 1
|
2020-09-03T10:11:29.000Z
|
2020-09-03T10:11:29.000Z
|
python/ccxt/async_support/digifinex.py
|
orikalinski/ccxt_new
|
318caa4f8db7ffb719edab2c060a0989d2a9cd28
|
[
"MIT"
] | 3
|
2019-09-26T09:17:26.000Z
|
2021-02-01T11:51:49.000Z
|
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import AccountSuspended
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import BadResponse
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import NetworkError
from ccxt.base.errors import DDoSProtection
from ccxt.base.errors import InvalidNonce
class digifinex(Exchange):
def describe(self):
return self.deep_extend(super(digifinex, self).describe(), {
'id': 'digifinex',
'name': 'DigiFinex',
'countries': ['SG'],
'version': 'v3',
'rateLimit': 900, # 300 for posts
'has': {
'cancelOrder': True,
'cancelOrders': True,
'createOrder': True,
'fetchBalance': True,
'fetchLedger': True,
'fetchMarkets': True,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': True,
'fetchStatus': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': True,
'fetchTrades': True,
},
'timeframes': {
'1m': '1',
'5m': '5',
'15m': '15',
'30m': '30',
'1h': '60',
'4h': '240',
'12h': '720',
'1d': '1D',
'1w': '1W',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/51840849/87443315-01283a00-c5fe-11ea-8628-c2a0feaf07ac.jpg',
'api': 'https://openapi.digifinex.com',
'www': 'https://www.digifinex.com',
'doc': [
'https://docs.digifinex.com',
],
'fees': 'https://digifinex.zendesk.com/hc/en-us/articles/360000328422-Fee-Structure-on-DigiFinex',
'referral': 'https://www.digifinex.com/en-ww/from/DhOzBg/3798****5114',
},
'api': {
'v2': {
'get': [
'ticker',
],
},
'public': {
'get': [
'{market}/symbols',
'kline',
'margin/currencies',
'margin/symbols',
'markets', # undocumented
'order_book',
'ping',
'spot/symbols',
'time',
'trades',
'trades/symbols',
],
},
'private': {
'get': [
'{market}/financelog',
'{market}/mytrades',
'{market}/order',
'{market}/order/current',
'{market}/order/history',
'margin/assets',
'margin/financelog',
'margin/mytrades',
'margin/order',
'margin/order/current',
'margin/order/history',
'margin/positions',
'otc/financelog',
'spot/assets',
'spot/financelog',
'spot/mytrades',
'spot/order',
'spot/order/current',
'spot/order/history',
],
'post': [
'{market}/order/cancel',
'{market}/order/new',
'margin/order/cancel',
'margin/order/new',
'margin/position/close',
'spot/order/cancel',
'spot/order/new',
'transfer',
],
},
},
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'maker': 0.002,
'taker': 0.002,
},
},
'exceptions': {
'exact': {
'10001': [BadRequest, "Wrong request method, please check it's a GET ot POST request"],
'10002': [AuthenticationError, 'Invalid ApiKey'],
'10003': [AuthenticationError, "Sign doesn't match"],
'10004': [BadRequest, 'Illegal request parameters'],
'10005': [DDoSProtection, 'Request frequency exceeds the limit'],
'10006': [PermissionDenied, 'Unauthorized to execute self request'],
'10007': [PermissionDenied, 'IP address Unauthorized'],
'10008': [InvalidNonce, 'Timestamp for self request is invalid, timestamp must within 1 minute'],
'10009': [NetworkError, 'Unexist endpoint, please check endpoint URL'],
'10011': [AccountSuspended, 'ApiKey expired. Please go to client side to re-create an ApiKey'],
'20001': [PermissionDenied, 'Trade is not open for self trading pair'],
'20002': [PermissionDenied, 'Trade of self trading pair is suspended'],
'20003': [InvalidOrder, 'Invalid price or amount'],
'20007': [InvalidOrder, 'Price precision error'],
'20008': [InvalidOrder, 'Amount precision error'],
'20009': [InvalidOrder, 'Amount is less than the minimum requirement'],
'20010': [InvalidOrder, 'Cash Amount is less than the minimum requirement'],
'20011': [InsufficientFunds, 'Insufficient balance'],
'20012': [BadRequest, 'Invalid trade type, valid value: buy/sell)'],
'20013': [InvalidOrder, 'No order info found'],
'20014': [BadRequest, 'Invalid date, Valid format: 2018-07-25)'],
'20015': [BadRequest, 'Date exceeds the limit'],
'20018': [PermissionDenied, 'Your trading rights have been banned by the system'],
'20019': [BadRequest, 'Wrong trading pair symbol. Correct format:"usdt_btc". Quote asset is in the front'],
'20020': [DDoSProtection, "You have violated the API operation trading rules and temporarily forbid trading. At present, we have certain restrictions on the user's transaction rate and withdrawal rate."],
'50000': [ExchangeError, 'Exception error'],
},
'broad': {
},
},
'options': {
'defaultType': 'spot',
'types': ['spot', 'margin', 'otc'],
},
'commonCurrencies': {
'BHT': 'Black House Test',
},
})
async def fetch_markets(self, params={}):
options = self.safe_value(self.options, 'fetchMarkets', {})
method = self.safe_string(options, 'method', 'fetch_markets_v2')
return await getattr(self, method)(params)
async def fetch_markets_v2(self, params={}):
response = await self.publicGetTradesSymbols(params)
#
# {
# "symbol_list":[
# {
# "order_types":["LIMIT","MARKET"],
# "quote_asset":"USDT",
# "minimum_value":2,
# "amount_precision":4,
# "status":"TRADING",
# "minimum_amount":0.0001,
# "symbol":"BTC_USDT",
# "is_allow":1,
# "zone":"MAIN",
# "base_asset":"BTC",
# "price_precision":2
# }
# ],
# "code":0
# }
#
markets = self.safe_value(response, 'symbol_list', [])
result = []
for i in range(0, len(markets)):
market = markets[i]
id = self.safe_string(market, 'symbol')
baseId = self.safe_string(market, 'base_asset')
quoteId = self.safe_string(market, 'quote_asset')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
precision = {
'amount': self.safe_integer(market, 'amount_precision'),
'price': self.safe_integer(market, 'price_precision'),
}
limits = {
'amount': {
'min': self.safe_float(market, 'minimum_amount'),
'max': None,
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': self.safe_float(market, 'minimum_value'),
'max': None,
},
}
#
# The status is documented in the exchange API docs as follows:
# TRADING, HALT(delisted), BREAK(trading paused)
# https://docs.digifinex.vip/en-ww/v3/#/public/spot/symbols
# However, all spot markets actually have status == 'HALT'
# despite that they appear to be active on the exchange website.
# Apparently, we can't trust self status.
# status = self.safe_string(market, 'status')
# active = (status == 'TRADING')
#
isAllowed = self.safe_value(market, 'is_allow', 1)
active = True if isAllowed else False
type = 'spot'
spot = (type == 'spot')
margin = (type == 'margin')
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': active,
'type': type,
'spot': spot,
'margin': margin,
'precision': precision,
'limits': limits,
'info': market,
})
return result
async def fetch_markets_v1(self, params={}):
response = await self.publicGetMarkets(params)
#
# {
# "data": [
# {
# "volume_precision":4,
# "price_precision":2,
# "market":"btc_usdt",
# "min_amount":2,
# "min_volume":0.0001
# },
# ],
# "date":1564507456,
# "code":0
# }
#
markets = self.safe_value(response, 'data', [])
result = []
for i in range(0, len(markets)):
market = markets[i]
id = self.safe_string(market, 'market')
baseId, quoteId = id.split('_')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
precision = {
'amount': self.safe_integer(market, 'volume_precision'),
'price': self.safe_integer(market, 'price_precision'),
}
limits = {
'amount': {
'min': self.safe_float(market, 'min_volume'),
'max': None,
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': self.safe_float(market, 'min_amount'),
'max': None,
},
}
active = None
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': active,
'precision': precision,
'limits': limits,
'info': market,
})
return result
async def fetch_balance(self, params={}):
defaultType = self.safe_string(self.options, 'defaultType', 'spot')
type = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
method = 'privateGet' + self.capitalize(type) + 'Assets'
response = await getattr(self, method)(params)
#
# {
# "code": 0,
# "list": [
# {
# "currency": "BTC",
# "free": 4723846.89208129,
# "total": 0
# }
# ]
# }
balances = self.safe_value(response, 'list', [])
result = {'info': response}
for i in range(0, len(balances)):
balance = balances[i]
currencyId = self.safe_string(balance, 'currency')
code = self.safe_currency_code(currencyId)
account = self.account()
account['used'] = self.safe_float(balance, 'frozen')
account['free'] = self.safe_float(balance, 'free')
account['total'] = self.safe_float(balance, 'total')
result[code] = account
return self.parse_balance(result)
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if limit is not None:
request['limit'] = limit # default 10, max 150
response = await self.publicGetOrderBook(self.extend(request, params))
#
# {
# "bids": [
# [9605.77,0.0016],
# [9605.46,0.0003],
# [9602.04,0.0127],
# ],
# "asks": [
# [9627.22,0.025803],
# [9627.12,0.168543],
# [9626.52,0.0011529],
# ],
# "date":1564509499,
# "code":0
# }
#
timestamp = self.safe_timestamp(response, 'date')
return self.parse_order_book(response, timestamp)
async def fetch_tickers(self, symbols=None, params={}):
apiKey = self.safe_value(params, 'apiKey', self.apiKey)
if not apiKey:
raise ArgumentsRequired(self.id + ' fetchTicker is a private v2 endpoint that requires an `exchange.apiKey` credential or an `apiKey` extra parameter')
await self.load_markets()
request = {
'apiKey': apiKey,
}
response = await self.v2GetTicker(self.extend(request, params))
#
# {
# "ticker":{
# "btc_eth":{
# "last":0.021957,
# "base_vol":2249.3521732227,
# "change":-0.6,
# "vol":102443.5111,
# "sell":0.021978,
# "low":0.021791,
# "buy":0.021946,
# "high":0.022266
# }
# },
# "date":1564518452,
# "code":0
# }
#
result = {}
tickers = self.safe_value(response, 'ticker', {})
date = self.safe_integer(response, 'date')
reversedMarketIds = list(tickers.keys())
for i in range(0, len(reversedMarketIds)):
reversedMarketId = reversedMarketIds[i]
ticker = self.extend({
'date': date,
}, tickers[reversedMarketId])
quoteId, baseId = reversedMarketId.split('_')
marketId = baseId + '_' + quoteId
market = None
symbol = None
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
symbol = market['symbol']
else:
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
result[symbol] = self.parse_ticker(ticker, market)
return result
async def fetch_ticker(self, symbol, params={}):
apiKey = self.safe_value(params, 'apiKey', self.apiKey)
if not apiKey:
raise ArgumentsRequired(self.id + ' fetchTicker is a private v2 endpoint that requires an `exchange.apiKey` credential or an `apiKey` extra parameter')
await self.load_markets()
market = self.market(symbol)
# reversed base/quote in v2
marketId = market['quoteId'] + '_' + market['baseId']
request = {
'symbol': marketId,
'apiKey': apiKey,
}
response = await self.v2GetTicker(self.extend(request, params))
#
# {
# "ticker":{
# "btc_eth":{
# "last":0.021957,
# "base_vol":2249.3521732227,
# "change":-0.6,
# "vol":102443.5111,
# "sell":0.021978,
# "low":0.021791,
# "buy":0.021946,
# "high":0.022266
# }
# },
# "date":1564518452,
# "code":0
# }
#
date = self.safe_integer(response, 'date')
ticker = self.safe_value(response, 'ticker', {})
result = self.safe_value(ticker, marketId, {})
result = self.extend({'date': date}, result)
return self.parse_ticker(result, market)
def parse_ticker(self, ticker, market=None):
#
# fetchTicker, fetchTickers
#
# {
# "last":0.021957,
# "base_vol":2249.3521732227,
# "change":-0.6,
# "vol":102443.5111,
# "sell":0.021978,
# "low":0.021791,
# "buy":0.021946,
# "high":0.022266,
# "date"1564518452, # injected from fetchTicker/fetchTickers
# }
#
symbol = None
if market is not None:
symbol = market['symbol']
timestamp = self.safe_timestamp(ticker, 'date')
last = self.safe_float(ticker, 'last')
percentage = self.safe_float(ticker, 'change')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': self.safe_float(ticker, 'buy'),
'bidVolume': None,
'ask': self.safe_float(ticker, 'sell'),
'askVolume': None,
'vwap': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': percentage,
'average': None,
'baseVolume': self.safe_float(ticker, 'vol'),
'quoteVolume': self.safe_float(ticker, 'base_vol'),
'info': ticker,
}
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {
# "date":1564520003,
# "id":1596149203,
# "amount":0.7073,
# "type":"buy",
# "price":0.02193,
# }
#
# fetchMyTrades(private)
#
# {
# "symbol": "BTC_USDT",
# "order_id": "6707cbdcda0edfaa7f4ab509e4cbf966",
# "id": 28457,
# "price": 0.1,
# "amount": 0,
# "fee": 0.096,
# "fee_currency": "USDT",
# "timestamp": 1499865549,
# "side": "buy",
# "is_maker": True
# }
#
id = self.safe_string(trade, 'id')
orderId = self.safe_string(trade, 'order_id')
timestamp = self.safe_timestamp_2(trade, 'date', 'timestamp')
side = self.safe_string_2(trade, 'type', 'side')
price = self.safe_float(trade, 'price')
amount = self.safe_float(trade, 'amount')
cost = None
if price is not None:
if amount is not None:
cost = price * amount
symbol = None
marketId = self.safe_string(trade, 'symbol')
if marketId is not None:
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
symbol = market['symbol']
else:
baseId, quoteId = marketId.split('_')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
if symbol is None:
if market is not None:
symbol = market['symbol']
takerOrMaker = self.safe_value(trade, 'is_maker')
feeCost = self.safe_float(trade, 'fee')
fee = None
if feeCost is not None:
feeCurrencyId = self.safe_string(trade, 'fee_currency')
feeCurrencyCode = self.safe_currency_code(feeCurrencyId)
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
}
return {
'id': id,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'type': None,
'order': orderId,
'side': side,
'price': price,
'amount': amount,
'cost': cost,
'takerOrMaker': takerOrMaker,
'fee': fee,
}
async def fetch_time(self, params={}):
response = await self.publicGetTime(params)
#
# {
# "server_time": 1589873762,
# "code": 0
# }
#
return self.safe_timestamp(response, 'server_time')
async def fetch_status(self, params={}):
await self.publicGetPing(params)
#
# {
# "msg": "pong",
# "code": 0
# }
#
self.status = self.extend(self.status, {
'status': 'ok',
'updated': self.milliseconds(),
})
return self.status
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if limit is not None:
request['limit'] = limit # default 100, max 500
response = await self.publicGetTrades(self.extend(request, params))
#
# {
# "data":[
# {
# "date":1564520003,
# "id":1596149203,
# "amount":0.7073,
# "type":"buy",
# "price":0.02193,
# },
# {
# "date":1564520002,
# "id":1596149165,
# "amount":0.3232,
# "type":"sell",
# "price":0.021927,
# },
# ],
# "code": 0,
# "date": 1564520003,
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_trades(data, market, since, limit)
def parse_ohlcv(self, ohlcv, market=None):
#
# [
# 1556712900,
# 2205.899,
# 0.029967,
# 0.02997,
# 0.029871,
# 0.029927
# ]
#
return [
self.safe_timestamp(ohlcv, 0),
self.safe_float(ohlcv, 5), # open
self.safe_float(ohlcv, 3), # high
self.safe_float(ohlcv, 4), # low
self.safe_float(ohlcv, 2), # close
self.safe_float(ohlcv, 1), # volume
]
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'period': self.timeframes[timeframe],
# 'start_time': 1564520003, # starting timestamp, 200 candles before end_time by default
# 'end_time': 1564520003, # ending timestamp, current timestamp by default
}
if since is not None:
startTime = int(since / 1000)
request['start_time'] = startTime
if limit is not None:
duration = self.parse_timeframe(timeframe)
request['end_time'] = self.sum(startTime, limit * duration)
elif limit is not None:
endTime = self.seconds()
duration = self.parse_timeframe(timeframe)
request['startTime'] = self.sum(endTime, -limit * duration)
response = await self.publicGetKline(self.extend(request, params))
#
# {
# "code":0,
# "data":[
# [1556712900,2205.899,0.029967,0.02997,0.029871,0.029927],
# [1556713800,1912.9174,0.029992,0.030014,0.029955,0.02996],
# [1556714700,1556.4795,0.029974,0.030019,0.029969,0.02999],
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_ohlcvs(data, market, timeframe, since, limit)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
defaultType = self.safe_string(self.options, 'defaultType', 'spot')
orderType = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
request = {
'market': orderType,
'symbol': market['id'],
'amount': self.amount_to_precision(symbol, amount),
# 'post_only': 0, # 0 by default, if set to 1 the order will be canceled if it can be executed immediately, making sure there will be no market taking
}
suffix = ''
if type == 'market':
suffix = '_market'
else:
request['price'] = self.price_to_precision(symbol, price)
request['type'] = side + suffix
response = await self.privatePostMarketOrderNew(self.extend(request, params))
#
# {
# "code": 0,
# "order_id": "198361cecdc65f9c8c9bb2fa68faec40"
# }
#
result = self.parse_order(response, market)
return self.extend(result, {
'symbol': symbol,
'side': side,
'type': type,
'amount': amount,
'price': price,
})
async def cancel_order(self, id, symbol=None, params={}):
await self.load_markets()
defaultType = self.safe_string(self.options, 'defaultType', 'spot')
orderType = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
request = {
'market': orderType,
'order_id': id,
}
response = await self.privatePostMarketOrderCancel(self.extend(request, params))
#
# {
# "code": 0,
# "success": [
# "198361cecdc65f9c8c9bb2fa68faec40",
# "3fb0d98e51c18954f10d439a9cf57de0"
# ],
# "error": [
# "78a7104e3c65cc0c5a212a53e76d0205"
# ]
# }
#
canceledOrders = self.safe_value(response, 'success', [])
numCanceledOrders = len(canceledOrders)
if numCanceledOrders != 1:
raise OrderNotFound(self.id + ' cancelOrder ' + id + ' not found')
return response
async def cancel_orders(self, ids, symbol=None, params={}):
await self.load_markets()
defaultType = self.safe_string(self.options, 'defaultType', 'spot')
orderType = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
request = {
'market': orderType,
'order_id': ','.join(ids),
}
response = await self.privatePostCancelOrder(self.extend(request, params))
#
# {
# "code": 0,
# "success": [
# "198361cecdc65f9c8c9bb2fa68faec40",
# "3fb0d98e51c18954f10d439a9cf57de0"
# ],
# "error": [
# "78a7104e3c65cc0c5a212a53e76d0205"
# ]
# }
#
canceledOrders = self.safe_value(response, 'success', [])
numCanceledOrders = len(canceledOrders)
if numCanceledOrders < 1:
raise OrderNotFound(self.id + ' cancelOrders error')
return response
def parse_order_status(self, status):
statuses = {
'0': 'open',
'1': 'open', # partially filled
'2': 'closed',
'3': 'canceled',
'4': 'canceled', # partially filled and canceled
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# createOrder
#
# {
# "code": 0,
# "order_id": "198361cecdc65f9c8c9bb2fa68faec40"
# }
#
# fetchOrder, fetchOpenOrders, fetchOrders
#
# {
# "symbol": "BTC_USDT",
# "order_id": "dd3164b333a4afa9d5730bb87f6db8b3",
# "created_date": 1562303547,
# "finished_date": 0,
# "price": 0.1,
# "amount": 1,
# "cash_amount": 1,
# "executed_amount": 0,
# "avg_price": 0,
# "status": 1,
# "type": "buy",
# "kind": "margin"
# }
#
id = self.safe_string(order, 'order_id')
timestamp = self.safe_timestamp(order, 'created_date')
lastTradeTimestamp = self.safe_timestamp(order, 'finished_date')
side = self.safe_string(order, 'type')
type = None
if side is not None:
parts = side.split('_')
numParts = len(parts)
if numParts > 1:
side = parts[0]
type = parts[1]
else:
type = 'limit'
status = self.parse_order_status(self.safe_string(order, 'status'))
if market is None:
exchange = order['symbol'].upper()
if exchange in self.markets_by_id:
market = self.markets_by_id[exchange]
symbol = None
marketId = self.safe_string(order, 'symbol')
if marketId is not None:
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
symbol = market['symbol']
else:
baseId, quoteId = marketId.split('_')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
amount = self.safe_float(order, 'amount')
filled = self.safe_float(order, 'executed_amount')
price = self.safe_float(order, 'price')
average = self.safe_float(order, 'avg_price')
remaining = None
cost = None
if filled is not None:
if average is not None:
cost = filled * average
if amount is not None:
remaining = max(0, amount - filled)
return {
'info': order,
'id': id,
'clientOrderId': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': lastTradeTimestamp,
'symbol': symbol,
'type': type,
'side': side,
'price': price,
'amount': amount,
'filled': filled,
'remaining': remaining,
'cost': cost,
'average': average,
'status': status,
'fee': None,
'trades': None,
}
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
defaultType = self.safe_string(self.options, 'defaultType', 'spot')
orderType = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
await self.load_markets()
market = None
request = {
'market': orderType,
}
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
response = await self.privateGetMarketOrderCurrent(self.extend(request, params))
#
# {
# "code": 0,
# "data": [
# {
# "symbol": "BTC_USDT",
# "order_id": "dd3164b333a4afa9d5730bb87f6db8b3",
# "created_date": 1562303547,
# "finished_date": 0,
# "price": 0.1,
# "amount": 1,
# "cash_amount": 1,
# "executed_amount": 0,
# "avg_price": 0,
# "status": 1,
# "type": "buy",
# "kind": "margin"
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_orders(data, market, since, limit)
async def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
defaultType = self.safe_string(self.options, 'defaultType', 'spot')
orderType = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
await self.load_markets()
market = None
request = {
'market': orderType,
}
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['start_time'] = int(since / 1000) # default 3 days from now, max 30 days
if limit is not None:
request['limit'] = limit # default 10, max 100
response = await self.privateGetMarketOrderHistory(self.extend(request, params))
#
# {
# "code": 0,
# "data": [
# {
# "symbol": "BTC_USDT",
# "order_id": "dd3164b333a4afa9d5730bb87f6db8b3",
# "created_date": 1562303547,
# "finished_date": 0,
# "price": 0.1,
# "amount": 1,
# "cash_amount": 1,
# "executed_amount": 0,
# "avg_price": 0,
# "status": 1,
# "type": "buy",
# "kind": "margin"
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_orders(data, market, since, limit)
async def fetch_order(self, id, symbol=None, params={}):
defaultType = self.safe_string(self.options, 'defaultType', 'spot')
orderType = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
await self.load_markets()
market = None
if symbol is not None:
market = self.market(symbol)
request = {
'market': orderType,
'order_id': id,
}
response = await self.privateGetMarketOrder(self.extend(request, params))
#
# {
# "code": 0,
# "data": [
# {
# "symbol": "BTC_USDT",
# "order_id": "dd3164b333a4afa9d5730bb87f6db8b3",
# "created_date": 1562303547,
# "finished_date": 0,
# "price": 0.1,
# "amount": 1,
# "cash_amount": 1,
# "executed_amount": 0,
# "avg_price": 0,
# "status": 1,
# "type": "buy",
# "kind": "margin"
# }
# ]
# }
#
data = self.safe_value(response, 'data', {})
return self.parse_order(data, market)
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
defaultType = self.safe_string(self.options, 'defaultType', 'spot')
orderType = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
await self.load_markets()
market = None
request = {
'market': orderType,
}
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['start_time'] = int(since / 1000) # default 3 days from now, max 30 days
if limit is not None:
request['limit'] = limit # default 10, max 100
response = await self.privateGetMarketMytrades(self.extend(request, params))
#
# {
# "code": 0,
# "list": [
# {
# "symbol": "BTC_USDT",
# "order_id": "6707cbdcda0edfaa7f4ab509e4cbf966",
# "id": 28457,
# "price": 0.1,
# "amount": 0,
# "fee": 0.096,
# "fee_currency": "USDT",
# "timestamp": 1499865549,
# "side": "buy",
# "is_maker": True
# }
# ]
# }
#
data = self.safe_value(response, 'list', [])
return self.parse_trades(data, market, since, limit)
def parse_ledger_entry_type(self, type):
types = {}
return self.safe_string(types, type, type)
def parse_ledger_entry(self, item, currency=None):
#
# {
# "currency_mark": "BTC",
# "type": 100234,
# "num": 28457,
# "balance": 0.1,
# "time": 1546272000
# }
#
id = self.safe_string(item, 'num')
account = None
type = self.parse_ledger_entry_type(self.safe_string(item, 'type'))
code = self.safe_currency_code(self.safe_string(item, 'currency_mark'), currency)
timestamp = self.safe_timestamp(item, 'time')
before = None
after = self.safe_float(item, 'balance')
status = 'ok'
return {
'info': item,
'id': id,
'direction': None,
'account': account,
'referenceId': None,
'referenceAccount': None,
'type': type,
'currency': code,
'amount': None,
'before': before,
'after': after,
'status': status,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'fee': None,
}
async def fetch_ledger(self, code=None, since=None, limit=None, params={}):
defaultType = self.safe_string(self.options, 'defaultType', 'spot')
orderType = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
await self.load_markets()
request = {
'market': orderType,
}
currency = None
if code is not None:
currency = self.currency(code)
request['currency_mark'] = currency['id']
if since is not None:
request['start_time'] = int(since / 1000)
if limit is not None:
request['limit'] = limit # default 100, max 1000
response = await self.privateGetMarketFinancelog(self.extend(request, params))
#
# {
# "code": 0,
# "data": {
# "total": 521,
# "finance": [
# {
# "currency_mark": "BTC",
# "type": 100234,
# "num": 28457,
# "balance": 0.1,
# "time": 1546272000
# }
# ]
# }
# }
#
data = self.safe_value(response, 'data', {})
items = self.safe_value(data, 'finance', [])
return self.parse_ledger(items, currency, since, limit)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
version = api if (api == 'v2') else self.version
url = self.urls['api'] + '/' + version + '/' + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
urlencoded = self.urlencode(self.keysort(query))
if api == 'private':
nonce = str(self.nonce())
auth = urlencoded
# the signature is not time-limited :\
signature = self.hmac(self.encode(auth), self.encode(self.secret))
if method == 'GET':
if urlencoded:
url += '?' + urlencoded
elif method == 'POST':
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
}
if urlencoded:
body = urlencoded
headers = {
'ACCESS-KEY': self.apiKey,
'ACCESS-SIGN': signature,
'ACCESS-TIMESTAMP': nonce,
}
else:
if urlencoded:
url += '?' + urlencoded
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def date_utc8(self, timestampMS):
timedelta = self.safe_value(self.options, 'timedelta', 8 * 60 * 60 * 1000) # eight hours
return self.ymd(timestampMS + timedelta)
def handle_errors(self, statusCode, statusText, url, method, responseHeaders, responseBody, response, requestHeaders, requestBody):
if not response:
return # fall back to default error handler
code = self.safe_string(response, 'code')
if code == '0':
return # no error
feedback = self.id + ' ' + responseBody
if code is None:
raise BadResponse(feedback)
unknownError = [ExchangeError, feedback]
ExceptionClass, message = self.safe_value(self.exceptions['exact'], code, unknownError)
raise ExceptionClass(message)
| 38.319003
| 224
| 0.459598
|
9009e68767d4ad906e1933878fa8648230201d72
| 2,443
|
py
|
Python
|
soft/lib.nntest/customize.py
|
G4V/ck-nntest
|
0c1a52cfe2c6634f0edae3f14911fa43838a17f6
|
[
"BSD-3-Clause"
] | 9
|
2018-08-08T03:17:06.000Z
|
2021-01-03T13:24:40.000Z
|
soft/lib.nntest/customize.py
|
G4V/ck-nntest
|
0c1a52cfe2c6634f0edae3f14911fa43838a17f6
|
[
"BSD-3-Clause"
] | 9
|
2018-07-12T09:32:14.000Z
|
2020-06-30T13:35:01.000Z
|
soft/lib.nntest/customize.py
|
G4V/ck-nntest
|
0c1a52cfe2c6634f0edae3f14911fa43838a17f6
|
[
"BSD-3-Clause"
] | 4
|
2018-07-31T13:12:31.000Z
|
2020-08-09T19:25:37.000Z
|
#
# Copyright (c) 2017 cTuning foundation.
# See CK COPYRIGHT.txt for copyright details.
#
# SPDX-License-Identifier: BSD-3-Clause.
# See CK LICENSE.txt for licensing details.
#
# Collective Knowledge (individual environment - setup)
#
# Developer: Grigori Fursin, Grigori.Fursin@cTuning.org, http://fursin.net
#
##############################################################################
# setup environment setup
def setup(i):
"""
Input: {
cfg - meta of this soft entry
self_cfg - meta of module soft
ck_kernel - import CK kernel module (to reuse functions)
host_os_uoa - host OS UOA
host_os_uid - host OS UID
host_os_dict - host OS meta
target_os_uoa - target OS UOA
target_os_uid - target OS UID
target_os_dict - target OS meta
target_device_id - target device ID (if via ADB)
tags - list of tags used to search this entry
env - updated environment vars from meta
customize - updated customize vars from meta
deps - resolved dependencies for this soft
interactive - if 'yes', can ask questions, otherwise quiet
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
bat - prepared string for bat file
}
"""
import os
# Get variables
ck=i['ck_kernel']
iv=i.get('interactive','')
env=i.get('env',{})
cfg=i.get('cfg',{})
deps=i.get('deps',{})
tags=i.get('tags',[])
cus=i.get('customize',{})
target_d=i.get('target_os_dict',{})
win=target_d.get('windows_base','')
remote=target_d.get('remote','')
mingw=target_d.get('mingw','')
tbits=target_d.get('bits','')
envp=cus.get('env_prefix','')
pi=cus.get('path_install','')
host_d=i.get('host_os_dict',{})
sdirs=host_d.get('dir_sep','')
fp=cus.get('full_path','')
if fp!='':
p1=os.path.dirname(fp)
pi=os.path.dirname(p1)
cus['path_include']=pi+sdirs+'include'
ep=cus.get('env_prefix','')
if pi!='' and ep!='':
env[ep]=pi
return {'return':0, 'bat': ''}
| 27.144444
| 78
| 0.513713
|
0098ec7de6358061e4a726139dfdcca6b7921da7
| 9,271
|
py
|
Python
|
master/python_games_sweigart/python_games_sweigart/reversegam.py
|
AlexRogalskiy/DevArtifacts
|
931aabb8cbf27656151c54856eb2ea7d1153203a
|
[
"MIT"
] | 4
|
2018-09-07T15:35:24.000Z
|
2019-03-27T09:48:12.000Z
|
master/python_games_sweigart/python_games_sweigart/reversegam.py
|
AlexRogalskiy/DevArtifacts
|
931aabb8cbf27656151c54856eb2ea7d1153203a
|
[
"MIT"
] | 371
|
2020-03-04T21:51:56.000Z
|
2022-03-31T20:59:11.000Z
|
master/python_games_sweigart/python_games_sweigart/reversegam.py
|
AlexRogalskiy/DevArtifacts
|
931aabb8cbf27656151c54856eb2ea7d1153203a
|
[
"MIT"
] | 3
|
2019-06-18T19:57:17.000Z
|
2020-11-06T03:55:08.000Z
|
# "Реверси": клон "Отелло".
import random
import sys
WIDTH = 8 # Игровое поле содержит 8 клеток по ширине.
HEIGHT = 8 # Игровое поле содержит 8 клеток по высоте.
def drawBoard(board):
# Вывести игровое поле, переданное этой функции. Ничего не возвращать.
print(' 12345678')
print(' +--------+')
for y in range(HEIGHT):
print('%s|' % (y+1), end='')
for x in range(WIDTH):
print(board[x][y], end='')
print('|%s' % (y+1))
print(' +--------+')
print(' 12345678')
def getNewBoard():
# Создать структуру данных нового чистого игрового поля.
board = []
for i in range(WIDTH):
board.append([' ', ' ', ' ', ' ', ' ', ' ', ' ', ' '])
return board
def isValidMove(board, tile, xstart, ystart):
# Вернуть False, если ход игрока в клетку с координатами xstart, ystart – недопустимый.
# Если это допустимый ход, вернуть список клеток, которые "присвоил" бы игрок, если бы сделал туда ход.
if board[xstart][ystart] != ' ' or not isOnBoard(xstart, ystart):
return False
if tile == 'Х':
otherTile = 'О'
else:
otherTile = 'Х'
tilesToFlip = []
for xdirection, ydirection in [[0, 1], [1, 1], [1, 0], [1, -1], [0, -1], [-1, -1], [-1, 0], [-1, 1]]:
x, y = xstart, ystart
x += xdirection # Первый шаг в направлении x
y += ydirection # Первый шаг в направлении y
while isOnBoard(x, y) and board[x][y] == otherTile:
# Продолжать двигаться в этом направлении x и y.
x += xdirection
y += ydirection
if isOnBoard(x, y) and board[x][y] == tile:
# Есть фишки, которые можно перевернуть. Двигаться в обратном направлении до достижения исходной клетки, отмечая все фишки на этом пути.
while True:
x -= xdirection
y -= ydirection
if x == xstart and y == ystart:
break
tilesToFlip.append([x, y])
if len(tilesToFlip) == 0: # Если ни одна из фишек не перевернулась, это недопустимый ход.
return False
return tilesToFlip
def isOnBoard(x, y):
# Вернуть True, если координаты есть на игровом поле.
return x >= 0 and x <= WIDTH - 1 and y >= 0 and y <= HEIGHT - 1
def getBoardWithValidMoves(board, tile):
# Вернуть новое поле с точками, обозначающими допустимые ходы, которые может сделать игрок.
boardCopy = getBoardCopy(board)
for x, y in getValidMoves(boardCopy, tile):
boardCopy[x][y] = '.'
return boardCopy
def getValidMoves(board, tile):
# Вернуть список списков с координатами x и y допустимых ходов для данного игрока на данном игровом поле.
validMoves = []
for x in range(WIDTH):
for y in range(HEIGHT):
if isValidMove(board, tile, x, y) != False:
validMoves.append([x, y])
return validMoves
def getScoreOfBoard(board):
# Определить количество очков, подсчитав фишки. Вернуть словарь с ключами 'Х' и 'О'.
xscore = 0
oscore = 0
for x in range(WIDTH):
for y in range(HEIGHT):
if board[x][y] == 'Х':
xscore += 1
if board[x][y] == 'О':
oscore += 1
return {'Х':xscore, 'О':oscore}
def enterPlayerTile():
# Позволить игроку ввести выбранную фишку.
# Возвращает список с фишкой игрока в качестве первого элемента и фишкой компьютера в качестве второго.
tile = ''
while not (tile == 'Х' or tile == 'О'):
print('Вы играете за Х или О?')
tile = input().upper()
# Первый элемент в списке – фишка игрока, второй элемент – фишка компьютера.
if tile == 'Х':
return ['Х', 'О']
else:
return ['О', 'Х']
def whoGoesFirst():
# Случайно выбрать, кто ходит первым.
if random.randint(0, 1) == 0:
return 'Компьютер'
else:
return 'Человек'
def makeMove(board, tile, xstart, ystart):
# Поместить фишку на игровое поле в позицию xstart, ystart и перевернуть какую-либо фишку противника.
# Вернуть False, если это недопустимый ход; вернуть True, если допустимый.
tilesToFlip = isValidMove(board, tile, xstart, ystart)
if tilesToFlip == False:
return False
board[xstart][ystart] = tile
for x, y in tilesToFlip:
board[x][y] = tile
return True
def getBoardCopy(board):
# Сделать копию списка board и вернуть ее.
boardCopy = getNewBoard()
for x in range(WIDTH):
for y in range(HEIGHT):
boardCopy[x][y] = board[x][y]
return boardCopy
def isOnCorner(x, y):
# Вернуть True, если указанная позиция находится в одном из четырех углов.
return (x == 0 or x == WIDTH - 1) and (y == 0 or y == HEIGHT - 1)
def getPlayerMove(board, playerTile):
# Позволить игроку ввести свой ход.
# Вернуть ход в виде [x, y] (или вернуть строки 'подсказка' или 'выход').
DIGITS1TO8 = '1 2 3 4 5 6 7 8'.split()
while True:
print('Укажите ход, текст "выход" для завершения игры или "подсказка" для вывода подсказки.')
move = input().lower()
if move == 'выход' or move == 'подсказка':
return move
if len(move) == 2 and move[0] in DIGITS1TO8 and move[1] in DIGITS1TO8:
x = int(move[0]) - 1
y = int(move[1]) - 1
if isValidMove(board, playerTile, x, y) == False:
continue
else:
break
else:
print('Это недопустимый ход. Введите номер столбца (1-8) и номер ряда (1-8).')
print('К примеру, значение 81 перемещает в верхний правый угол.')
return [x, y]
def getComputerMove(board, computerTile):
# Учитывая данное игровое поле и данную фишку компьютера, определить,
# куда сделать ход, и вернуть этот ход в виде списка [x, y].
possibleMoves = getValidMoves(board, computerTile)
random.shuffle(possibleMoves) # Сделать случайным порядок ходов
# Всегда делать ход в угол, если это возможно.
for x, y in possibleMoves:
if isOnCorner(x, y):
return [x, y]
# Найти ход с наибольшим возможным количеством очков.
bestScore = -1
for x, y in possibleMoves:
boardCopy = getBoardCopy(board)
makeMove(boardCopy, computerTile, x, y)
score = getScoreOfBoard(boardCopy)[computerTile]
if score > bestScore:
bestMove = [x, y]
bestScore = score
return bestMove
def printScore(board, playerTile, computerTile):
scores = getScoreOfBoard(board)
print('Ваш счет: %s. Счет компьютера: %s.' % (scores[playerTile], scores[computerTile]))
def playGame(playerTile, computerTile):
showHints = False
turn = whoGoesFirst()
print(turn + ' ходит первым.')
# Очистить игровое поле и выставить стартовые фишки.
board = getNewBoard()
board[3][3] = 'Х'
board[3][4] = 'О'
board[4][3] = 'О'
board[4][4] = 'Х'
while True:
playerValidMoves = getValidMoves(board, playerTile)
computerValidMoves = getValidMoves(board, computerTile)
if playerValidMoves == [] and computerValidMoves == []:
return board # Ходов нет ни у кого, так что окончить игру.
elif turn == 'Человек': # Ход человека
if playerValidMoves != []:
if showHints:
validMovesBoard = getBoardWithValidMoves(board, playerTile)
drawBoard(validMovesBoard)
else:
drawBoard(board)
printScore(board, playerTile, computerTile)
move = getPlayerMove(board, playerTile)
if move == 'выход':
print('Благодарим за игру!')
sys.exit() # Завершить работу программы.
elif move == 'подсказка':
showHints = not showHints
continue
else:
makeMove(board, playerTile, move[0], move[1])
turn = 'Компьютер'
elif turn == 'Компьютер': # Ход компьютера
if computerValidMoves != []:
drawBoard(board)
printScore(board, playerTile, computerTile)
input('Нажмите клавишу Enter для просмотра хода компьютера.')
move = getComputerMove(board, computerTile)
makeMove(board, computerTile, move[0], move[1])
turn = 'Человек'
print('Приветствуем в игре "Реверси"!')
playerTile, computerTile = enterPlayerTile()
while True:
finalBoard = playGame(playerTile, computerTile)
# Отобразить итоговый счет.
drawBoard(finalBoard)
scores = getScoreOfBoard(finalBoard)
print('X набрал %s очков. O набрал %s очков.' % (scores['Х'], scores['О']))
if scores[playerTile] > scores[computerTile]:
print('Вы победили компьютер, обогнав его на %s очков! Поздравления!' % (scores[playerTile] - scores[computerTile]))
elif scores[playerTile] < scores[computerTile]:
print('Вы проиграли. Компьютер победил вас, обогнав на %s очков.' % (scores[computerTile] - scores[playerTile]))
else:
print('Ничья!')
print('Хотите сыграть еще раз? (да или нет)')
if not input().lower().startswith('д'):
break
| 35.657692
| 152
| 0.594758
|
d0b5eeb2fddf6054e343b702c4a2c92cf97abc76
| 7,070
|
py
|
Python
|
built-in/PyTorch/Official/cv/image_classification/Gluon_ResNet50_v1d_for_PyTorch/timm/models/layers/eca.py
|
Ascend/modelzoo
|
f018cfed33dbb1cc2110b9ea2e233333f71cc509
|
[
"Apache-2.0"
] | 12
|
2020-12-13T08:34:24.000Z
|
2022-03-20T15:17:17.000Z
|
built-in/PyTorch/Official/cv/image_classification/Gluon_ResNet50_v1d_for_PyTorch/timm/models/layers/eca.py
|
Ascend/modelzoo
|
f018cfed33dbb1cc2110b9ea2e233333f71cc509
|
[
"Apache-2.0"
] | 1
|
2022-01-20T03:11:05.000Z
|
2022-01-20T06:53:39.000Z
|
built-in/PyTorch/Official/cv/image_classification/Gluon_ResNet50_v1d_for_PyTorch/timm/models/layers/eca.py
|
Ascend/modelzoo
|
f018cfed33dbb1cc2110b9ea2e233333f71cc509
|
[
"Apache-2.0"
] | 2
|
2021-07-10T12:40:46.000Z
|
2021-12-17T07:55:15.000Z
|
# Copyright [yyyy] [name of copyright owner]
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
ECA module from ECAnet
paper: ECA-Net: Efficient Channel Attention for Deep Convolutional Neural Networks
https://arxiv.org/abs/1910.03151
Original ECA model borrowed from https://github.com/BangguWu/ECANet
Modified circular ECA implementation and adaption for use in timm package
by Chris Ha https://github.com/VRandme
Original License:
MIT License
Copyright (c) 2019 BangguWu, Qilong Wang
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import math
from torch import nn
import torch.nn.functional as F
from .create_act import create_act_layer
from .helpers import make_divisible
class EcaModule(nn.Module):
"""Constructs an ECA module.
Args:
channels: Number of channels of the input feature map for use in adaptive kernel sizes
for actual calculations according to channel.
gamma, beta: when channel is given parameters of mapping function
refer to original paper https://arxiv.org/pdf/1910.03151.pdf
(default=None. if channel size not given, use k_size given for kernel size.)
kernel_size: Adaptive selection of kernel size (default=3)
gamm: used in kernel_size calc, see above
beta: used in kernel_size calc, see above
act_layer: optional non-linearity after conv, enables conv bias, this is an experiment
gate_layer: gating non-linearity to use
"""
def __init__(
self, channels=None, kernel_size=3, gamma=2, beta=1, act_layer=None, gate_layer='sigmoid',
rd_ratio=1/8, rd_channels=None, rd_divisor=8, use_mlp=False):
super(EcaModule, self).__init__()
if channels is not None:
t = int(abs(math.log(channels, 2) + beta) / gamma)
kernel_size = max(t if t % 2 else t + 1, 3)
assert kernel_size % 2 == 1
padding = (kernel_size - 1) // 2
if use_mlp:
# NOTE 'mlp' mode is a timm experiment, not in paper
assert channels is not None
if rd_channels is None:
rd_channels = make_divisible(channels * rd_ratio, divisor=rd_divisor)
act_layer = act_layer or nn.ReLU
self.conv = nn.Conv1d(1, rd_channels, kernel_size=1, padding=0, bias=True)
self.act = create_act_layer(act_layer)
self.conv2 = nn.Conv1d(rd_channels, 1, kernel_size=kernel_size, padding=padding, bias=True)
else:
self.conv = nn.Conv1d(1, 1, kernel_size=kernel_size, padding=padding, bias=False)
self.act = None
self.conv2 = None
self.gate = create_act_layer(gate_layer)
def forward(self, x):
y = x.mean((2, 3)).view(x.shape[0], 1, -1) # view for 1d conv
y = self.conv(y)
if self.conv2 is not None:
y = self.act(y)
y = self.conv2(y)
y = self.gate(y).view(x.shape[0], -1, 1, 1)
return x * y.expand_as(x)
EfficientChannelAttn = EcaModule # alias
class CecaModule(nn.Module):
"""Constructs a circular ECA module.
ECA module where the conv uses circular padding rather than zero padding.
Unlike the spatial dimension, the channels do not have inherent ordering nor
locality. Although this module in essence, applies such an assumption, it is unnecessary
to limit the channels on either "edge" from being circularly adapted to each other.
This will fundamentally increase connectivity and possibly increase performance metrics
(accuracy, robustness), without significantly impacting resource metrics
(parameter size, throughput,latency, etc)
Args:
channels: Number of channels of the input feature map for use in adaptive kernel sizes
for actual calculations according to channel.
gamma, beta: when channel is given parameters of mapping function
refer to original paper https://arxiv.org/pdf/1910.03151.pdf
(default=None. if channel size not given, use k_size given for kernel size.)
kernel_size: Adaptive selection of kernel size (default=3)
gamm: used in kernel_size calc, see above
beta: used in kernel_size calc, see above
act_layer: optional non-linearity after conv, enables conv bias, this is an experiment
gate_layer: gating non-linearity to use
"""
def __init__(self, channels=None, kernel_size=3, gamma=2, beta=1, act_layer=None, gate_layer='sigmoid'):
super(CecaModule, self).__init__()
if channels is not None:
t = int(abs(math.log(channels, 2) + beta) / gamma)
kernel_size = max(t if t % 2 else t + 1, 3)
has_act = act_layer is not None
assert kernel_size % 2 == 1
# PyTorch circular padding mode is buggy as of pytorch 1.4
# see https://github.com/pytorch/pytorch/pull/17240
# implement manual circular padding
self.padding = (kernel_size - 1) // 2
self.conv = nn.Conv1d(1, 1, kernel_size=kernel_size, padding=0, bias=has_act)
self.gate = create_act_layer(gate_layer)
def forward(self, x):
y = x.mean((2, 3)).view(x.shape[0], 1, -1)
# Manually implement circular padding, F.pad does not seemed to be bugged
y = F.pad(y, (self.padding, self.padding), mode='circular')
y = self.conv(y)
y = self.gate(y).view(x.shape[0], -1, 1, 1)
return x * y.expand_as(x)
CircularEfficientChannelAttn = CecaModule
| 43.641975
| 108
| 0.68727
|
2375f135e6b8ec09a00b6715e99018c53e729d65
| 2,491
|
py
|
Python
|
tests/test_newsitem.py
|
erwinelling/wagtailnews
|
90b2a1b9b9fd9e9e365165ff757d10cbfd1b24fe
|
[
"BSD-2-Clause"
] | 35
|
2019-02-24T15:04:54.000Z
|
2022-02-28T09:34:35.000Z
|
tests/test_newsitem.py
|
erwinelling/wagtailnews
|
90b2a1b9b9fd9e9e365165ff757d10cbfd1b24fe
|
[
"BSD-2-Clause"
] | 10
|
2019-04-25T08:43:57.000Z
|
2021-09-12T23:57:24.000Z
|
tests/test_newsitem.py
|
erwinelling/wagtailnews
|
90b2a1b9b9fd9e9e365165ff757d10cbfd1b24fe
|
[
"BSD-2-Clause"
] | 14
|
2019-02-25T10:56:33.000Z
|
2021-07-22T06:33:11.000Z
|
# -*- coding: utf8 -*-
from __future__ import absolute_import, unicode_literals
import datetime
from django.test import TestCase
from django.utils import timezone
from django.utils.http import urlquote
from wagtail.core.models import Site
from wagtail.tests.utils import WagtailTestUtils
from tests.app.models import NewsIndex, NewsItem
class TestNewsItem(TestCase, WagtailTestUtils):
def setUp(self):
super(TestNewsItem, self).setUp()
site = Site.objects.get(is_default_site=True)
root_page = site.root_page
self.index = NewsIndex(
title='News', slug='news')
root_page.add_child(instance=self.index)
ni_date = timezone.make_aware(datetime.datetime(2017, 4, 13, 12, 0, 0))
self.newsitem = NewsItem.objects.create(
newsindex=self.index,
title='A post',
date=ni_date)
def test_view(self):
response = self.client.get(self.newsitem.url())
# Check the right NewsIndex was used, and is its most specific type
self.assertIsInstance(response.context['self'], NewsIndex)
self.assertEqual(response.context['self'], self.index)
self.assertEqual(response.context['page'], self.index)
# Check the right NewsItem was used
self.assertEqual(response.context['newsitem'], self.newsitem)
# Check the NewsIndex context is used as a base
self.assertEqual(response.context['extra'], 'foo')
# Check the context can be overridden using NewsItem.get_context()
self.assertEqual(response.context['foo'], 'bar')
def test_bad_url_redirect(self):
response = self.client.get(
'/news/1234/2/3/{}-bad-title/'.format(self.newsitem.pk),
follow=True)
self.assertEqual(
self.newsitem.url(),
urlquote('/news/2017/4/13/{}-a-post/'.format(self.newsitem.pk)))
self.assertEqual(
response.redirect_chain,
[(self.newsitem.url(), 301)])
def test_bad_url_redirect_unicode(self):
self.newsitem.title = '你好,世界!'
self.newsitem.save()
response = self.client.get(
'/news/1234/2/3/{}-bad-title/'.format(self.newsitem.pk),
follow=True)
self.assertEqual(
self.newsitem.url(),
urlquote('/news/2017/4/13/{}-你好世界/'.format(self.newsitem.pk)))
self.assertEqual(
response.redirect_chain,
[(self.newsitem.url(), 301)])
| 34.123288
| 79
| 0.638298
|
ed9167acca9cad61b695e4b526bdf372c25e155c
| 5,769
|
py
|
Python
|
tensorflow_probability/python/bijectors/cholesky_to_inv_cholesky_test.py
|
brianwa84/probability
|
6f8e78d859ac41170be5147c8c7bde54cc5aa83e
|
[
"Apache-2.0"
] | 2
|
2020-12-17T20:43:24.000Z
|
2021-06-11T22:09:16.000Z
|
tensorflow_probability/python/bijectors/cholesky_to_inv_cholesky_test.py
|
brianwa84/probability
|
6f8e78d859ac41170be5147c8c7bde54cc5aa83e
|
[
"Apache-2.0"
] | 2
|
2021-08-25T16:14:51.000Z
|
2022-02-10T04:47:11.000Z
|
tensorflow_probability/python/bijectors/cholesky_to_inv_cholesky_test.py
|
brianwa84/probability
|
6f8e78d859ac41170be5147c8c7bde54cc5aa83e
|
[
"Apache-2.0"
] | 1
|
2020-12-19T13:05:15.000Z
|
2020-12-19T13:05:15.000Z
|
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for CholeskyToInvCholesky bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
import tensorflow.compat.v2 as tf
from tensorflow_probability.python import bijectors as tfb
from tensorflow_probability.python.internal import test_util
@test_util.test_all_tf_execution_regimes
class CholeskyToInvCholeskyTest(test_util.TestCase):
def testBijector(self):
bijector = tfb.CholeskyToInvCholesky()
self.assertStartsWith(bijector.name, "cholesky_to_inv_cholesky")
x = np.array([[3., 0.], [2., 7.]], dtype=np.float32)
m = x.dot(x.T)
m_inv = np.linalg.inv(m)
y = np.linalg.cholesky(m_inv)
x_fwd = bijector.forward(x)
y_inv = bijector.inverse(x_fwd)
x_fwd_, y_inv_ = self.evaluate([x_fwd, y_inv])
self.assertAllClose(y, x_fwd_, atol=1.e-5, rtol=1.e-5)
self.assertAllClose(x, y_inv_, atol=1.e-5, rtol=1.e-5)
def testBijectorWithTensors(self):
bijector = tfb.CholeskyToInvCholesky()
x = np.array([
[[3., 0.], [1., 4.]],
[[2., 0.], [7., 1.]]], dtype=np.float32)
y = bijector.forward(x)
y0 = bijector.forward(x[0, :])
y1 = bijector.forward(x[1, :])
y_inv = bijector.inverse(y)
y_inv0 = bijector.inverse(y[0, :])
y_inv1 = bijector.inverse(y[1, :])
y_, y0_, y1_, y_inv_, y_inv0_, y_inv1_ = self.evaluate(
[y, y0, y1, y_inv, y_inv0, y_inv1])
self.assertAllClose(y_[0, :], y0_, atol=1.e-5, rtol=1.e-5)
self.assertAllClose(y_[1, :], y1_, atol=1.e-5, rtol=1.e-5)
self.assertAllClose(y_inv_[0, :], y_inv0_, atol=1.e-5, rtol=1.e-5)
self.assertAllClose(y_inv_[1, :], y_inv1_, atol=1.e-5, rtol=1.e-5)
self.assertAllClose(y_inv_, x, atol=1.e-5, rtol=1.e-5)
def _get_fldj_numerical(self, bijector, x, event_ndims,
eps=1.e-6,
input_to_vector=tfb.Identity,
output_to_vector=tfb.Identity):
"""Numerically approximate the forward log det Jacobian of a bijector.
Args:
bijector: the bijector whose Jacobian we wish to approximate
x: the value for which we want to approximate the Jacobian
event_ndims: number of dimensions in an event
eps: epsilon to add when forming (f(x+eps)-f(x)) / eps
input_to_vector: a bijector that maps the input value to a vector
output_to_vector: a bijector that maps the output value to a vector
Returns:
A numerical approximation to the log det Jacobian of bijector.forward
evaluated at x.
"""
x_vector = input_to_vector.forward(x) # [B, n]
n = tf.shape(x_vector)[-1]
x_plus_eps_vector = (
x_vector[..., tf.newaxis, :] +
eps * tf.eye(n, dtype=x_vector.dtype)) # [B, n, n]
x_plus_eps = input_to_vector.inverse(x_plus_eps_vector) # [B, n, d, d]
f_x_plus_eps = bijector.forward(x_plus_eps) # [B, n, d, d]
f_x_plus_eps_vector = output_to_vector.forward(f_x_plus_eps) # [B, n, n]
f_x = bijector.forward(x) # [B, d, d]
f_x_vector = output_to_vector.forward(f_x) # [B, n]
jacobian_numerical = (f_x_plus_eps_vector -
f_x_vector[..., tf.newaxis, :]) / eps
return (
tf.math.log(tf.abs(tf.linalg.det(jacobian_numerical))) +
input_to_vector.forward_log_det_jacobian(x, event_ndims=event_ndims) -
output_to_vector.forward_log_det_jacobian(f_x, event_ndims=event_ndims)
)
def testJacobian(self):
cholesky_to_vector = tfb.Invert(
tfb.FillScaleTriL(diag_bijector=tfb.Exp(), diag_shift=None))
bijector = tfb.CholeskyToInvCholesky()
for x in [np.array([[2.]],
dtype=np.float64),
np.array([[2., 0.],
[3., 4.]],
dtype=np.float64),
np.array([[2., 0., 0.],
[3., 4., 0.],
[5., 6., 7.]],
dtype=np.float64)]:
fldj = bijector.forward_log_det_jacobian(x, event_ndims=2)
fldj_numerical = self._get_fldj_numerical(
bijector, x, event_ndims=2,
input_to_vector=cholesky_to_vector,
output_to_vector=cholesky_to_vector)
fldj_, fldj_numerical_ = self.evaluate([fldj, fldj_numerical])
self.assertAllClose(fldj_, fldj_numerical_, rtol=1e-2)
def testJacobianWithTensors(self):
bijector = tfb.CholeskyToInvCholesky()
x = np.array([
[[3., 0.],
[1., 4.]],
[[2., 0.],
[7., 1.]]], dtype=np.float32)
fldj = bijector.forward_log_det_jacobian(x, event_ndims=2)
fldj0 = bijector.forward_log_det_jacobian(x[0], event_ndims=2)
fldj1 = bijector.forward_log_det_jacobian(x[1], event_ndims=2)
fldj_, fldj0_, fldj1_ = self.evaluate([fldj, fldj0, fldj1])
self.assertAllClose(fldj_[0], fldj0_, rtol=1e-5)
self.assertAllClose(fldj_[1], fldj1_, rtol=1e-5)
if __name__ == "__main__":
tf.test.main()
| 41.503597
| 80
| 0.627318
|
a0e92b03b6599d22dbd5579d261e8240f11a17ae
| 6,292
|
py
|
Python
|
google/cloud/forseti/scanner/scanners/audit_logging_scanner.py
|
mcunha/forseti-security
|
cbf25f6173c1a25d4e43a9738eca73f927361cb8
|
[
"Apache-2.0"
] | 1
|
2018-10-06T23:16:59.000Z
|
2018-10-06T23:16:59.000Z
|
google/cloud/forseti/scanner/scanners/audit_logging_scanner.py
|
mcunha/forseti-security
|
cbf25f6173c1a25d4e43a9738eca73f927361cb8
|
[
"Apache-2.0"
] | null | null | null |
google/cloud/forseti/scanner/scanners/audit_logging_scanner.py
|
mcunha/forseti-security
|
cbf25f6173c1a25d4e43a9738eca73f927361cb8
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 The Forseti Security Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Scanner for Audit Logging."""
import json
from google.cloud.forseti.common.gcp_type import iam_policy
from google.cloud.forseti.common.gcp_type.project import Project
from google.cloud.forseti.common.util import logger
from google.cloud.forseti.scanner.audit import audit_logging_rules_engine
from google.cloud.forseti.scanner.scanners import base_scanner
from google.cloud.forseti.services import utils
LOGGER = logger.get_logger(__name__)
class AuditLoggingScanner(base_scanner.BaseScanner):
"""Scanner for Audit Logging."""
def __init__(self, global_configs, scanner_configs, service_config,
model_name, snapshot_timestamp, rules):
"""Initialization.
Args:
global_configs (dict): Global configurations.
scanner_configs (dict): Scanner configurations.
service_config (ServiceConfig): Forseti 2.0 service configs
model_name (str): name of the data model
snapshot_timestamp (str): Timestamp, formatted as YYYYMMDDTHHMMSSZ.
rules (str): Fully-qualified path and filename of the rules file.
"""
super(AuditLoggingScanner, self).__init__(
global_configs,
scanner_configs,
service_config,
model_name,
snapshot_timestamp,
rules)
self.rules_engine = audit_logging_rules_engine.AuditLoggingRulesEngine(
rules_file_path=self.rules,
snapshot_timestamp=self.snapshot_timestamp)
self.rules_engine.build_rule_book(self.global_configs)
@staticmethod
def _flatten_violations(violations):
"""Flatten RuleViolations into a dict for each violation.
Args:
violations (list): The RuleViolations to flatten.
Yields:
dict: Iterator of RuleViolations as a dict per member.
"""
for violation in violations:
violation_data = {
'full_name': violation.full_name,
'service': violation.service,
'log_type': violation.log_type,
}
if violation.unexpected_exemptions:
violation_data['unexpected_exemptions'] = list(
violation.unexpected_exemptions)
yield {
'resource_id': violation.resource_id,
'resource_type': violation.resource_type,
'full_name': violation.full_name,
'rule_index': violation.rule_index,
'rule_name': violation.rule_name,
'violation_type': violation.violation_type,
'violation_data': violation_data,
'resource_data': violation.resource_data
}
def _output_results(self, all_violations):
"""Output results.
Args:
all_violations (list): A list of violations
"""
all_violations = list(self._flatten_violations(all_violations))
self._output_results_to_db(all_violations)
def _find_violations(self, audit_logging_data):
"""Find violations in the audit log configs.
Args:
audit_logging_data (list): audit log data to find violations in.
Returns:
list: A list of all violations
"""
all_violations = []
LOGGER.info('Finding audit logging violations...')
for project, audit_config in audit_logging_data:
violations = self.rules_engine.find_violations(
project, audit_config)
LOGGER.debug(violations)
all_violations.extend(violations)
return all_violations
def _retrieve(self):
"""Retrieves the data for scanner.
Returns:
list: List of projects' audit logging config data.
"""
model_manager = self.service_config.model_manager
scoped_session, data_access = model_manager.get(self.model_name)
with scoped_session as session:
project_configs = []
ancestor_configs = {}
# Types that can contain AuditConfigs in the IamPolicy.
audit_policy_types = frozenset([
'organization', 'folder', 'project'])
for policy in data_access.scanner_iter(session, 'iam_policy'):
if policy.parent.type not in audit_policy_types:
continue
audit_config = iam_policy.IamAuditConfig.create_from(
json.loads(policy.data).get('auditConfigs', []))
if policy.parent.type == 'project':
project_configs.append(
(Project(policy.parent.name,
policy.parent.full_name,
policy.data),
audit_config))
elif audit_config.service_configs:
ancestor_configs[policy.parent.type, policy.parent.name] = (
audit_config)
# Merge ancestor configs into project level configs.
for project, audit_config in project_configs:
ancestors = utils.get_resources_from_full_name(project.full_name)
next(ancestors)
for res_type, res_name in ancestors:
ancestor_config = ancestor_configs.get((res_type, res_name))
if ancestor_config:
audit_config.merge_configs(ancestor_config)
return project_configs
def run(self):
"""Runs the data collection."""
audit_logging_data = self._retrieve()
all_violations = self._find_violations(audit_logging_data)
self._output_results(all_violations)
| 38.839506
| 80
| 0.63398
|
324aae1cf0b2eeaf8b973beda2c374bdf2660784
| 1,241
|
py
|
Python
|
urls.py
|
Lornatang/opencv201907
|
777b11780cdef4cd5656f8bb9bdef8b461dd55ab
|
[
"Apache-2.0"
] | 4
|
2019-08-11T04:43:23.000Z
|
2020-04-10T00:31:56.000Z
|
urls.py
|
Lornatang/opencv201907
|
777b11780cdef4cd5656f8bb9bdef8b461dd55ab
|
[
"Apache-2.0"
] | null | null | null |
urls.py
|
Lornatang/opencv201907
|
777b11780cdef4cd5656f8bb9bdef8b461dd55ab
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 DayHR Authors. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
====================================WARNING====================================
Do not delete this file unless you know how to refactor it!
====================================WARNING====================================
"""
from django.conf.urls import url
from dlcv.api import ImageToSketch
from dlcv.api import VideoCover
from dlcv.api import OCR
# noinspection PyInterpreter
urlpatterns = [
url(r"^api/image_to_sketch/$", ImageToSketch.as_view()),
url(r"^api/video_cover/$", VideoCover.as_view()),
url(r"^api/ocr/$", OCR.as_view())
]
| 37.606061
| 80
| 0.609186
|
7f35e518be03e89a29f95dba06e3016816872ea7
| 518
|
py
|
Python
|
bnlp/bengali_fasttext.py
|
aaloman/bnlp
|
de7d58b7ce7e4e92386e0edd40f5845cae0bd5e5
|
[
"MIT"
] | null | null | null |
bnlp/bengali_fasttext.py
|
aaloman/bnlp
|
de7d58b7ce7e4e92386e0edd40f5845cae0bd5e5
|
[
"MIT"
] | null | null | null |
bnlp/bengali_fasttext.py
|
aaloman/bnlp
|
de7d58b7ce7e4e92386e0edd40f5845cae0bd5e5
|
[
"MIT"
] | null | null | null |
import fasttext
class Bengali_Fasttext(object):
def __init__(self, is_train=False):
self.is_train = is_train
def train_fasttext(self, data, model_name, epoch):
if self.is_train:
model = fasttext.train_unsupervised(data, model='skipgram', minCount=1, epoch=epoch)
model.save_model(model_name)
def generate_word_vector(self, model_path, word):
model = fasttext.load_model(model_path)
word_vector = model[word]
return word_vector
| 25.9
| 96
| 0.667954
|
29e3e067b14856d2b102ff489ac6c3cab4939c00
| 668
|
py
|
Python
|
ocr/celery.py
|
radtomas/simple-web-ocr
|
4b2dcca918ebf6117d6fa025ba8b32d1fde0b288
|
[
"MIT"
] | null | null | null |
ocr/celery.py
|
radtomas/simple-web-ocr
|
4b2dcca918ebf6117d6fa025ba8b32d1fde0b288
|
[
"MIT"
] | 3
|
2021-06-09T18:05:45.000Z
|
2022-02-10T09:30:41.000Z
|
ocr/celery.py
|
radtomas/simple-web-ocr-backend
|
4b2dcca918ebf6117d6fa025ba8b32d1fde0b288
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
from django.conf import settings
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ocr.settings')
app = Celery('ocr')
app.conf.update(
result_backend=settings.CELERY_BACKEND,
broker_url=settings.CELERY_BROKER,
task_routes={
'ocr.tasks.*': {'queue': 'ocr'},
},
default_queue='ocr',
task_serializer='json',
accept_content=['json'],
result_serializer='json',
enable_utc=True,
)
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
| 24.740741
| 66
| 0.732036
|
7efa52bc670826a2e9479e7700fe9ce22fe29290
| 4,328
|
py
|
Python
|
contrib/seeds/generate-seeds.py
|
iexcoin/Iex
|
3a80d2f241228c3ff7687301f6a59e1d34babd31
|
[
"MIT"
] | null | null | null |
contrib/seeds/generate-seeds.py
|
iexcoin/Iex
|
3a80d2f241228c3ff7687301f6a59e1d34babd31
|
[
"MIT"
] | null | null | null |
contrib/seeds/generate-seeds.py
|
iexcoin/Iex
|
3a80d2f241228c3ff7687301f6a59e1d34babd31
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2014-2017 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Script to generate list of seed nodes for chainparams.cpp.
This script expects two text files in the directory that is passed as an
argument:
nodes_main.txt
nodes_test.txt
These files must consist of lines in the format
<ip>
<ip>:<port>
[<ipv6>]
[<ipv6>]:<port>
<onion>.onion
0xDDBBCCAA (IPv4 little-endian old pnSeeds format)
The output will be two data structures with the peers in binary format:
static SeedSpec6 pnSeed6_main[]={
...
}
static SeedSpec6 pnSeed6_test[]={
...
}
These should be pasted into `src/chainparamsseeds.h`.
'''
from base64 import b32decode
from binascii import a2b_hex
import sys, os
import re
# ipv4 in ipv6 prefix
pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff])
# tor-specific ipv6 prefix
pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43])
def name_to_ipv6(addr):
if len(addr)>6 and addr.endswith('.onion'):
vchAddr = b32decode(addr[0:-6], True)
if len(vchAddr) != 16-len(pchOnionCat):
raise ValueError('Invalid onion %s' % s)
return pchOnionCat + vchAddr
elif '.' in addr: # IPv4
return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
return bytearray(sub[0] + ([0] * nullbytes) + sub[1])
elif addr.startswith('0x'): # IPv4-in-little-endian
return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:])))
else:
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s, defaultport):
match = re.match('\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: # ipv6
host = match.group(1)
port = match.group(2)
elif s.count(':') > 1: # ipv6, no port
host = s
port = ''
else:
(host,_,port) = s.partition(':')
if not port:
port = defaultport
else:
port = int(port)
host = name_to_ipv6(host)
return (host,port)
def process_nodes(g, f, structname, defaultport):
g.write('static SeedSpec6 %s[] = {\n' % structname)
first = True
for line in f:
comment = line.find('#')
if comment != -1:
line = line[0:comment]
line = line.strip()
if not line:
continue
if not first:
g.write(',\n')
first = False
(host,port) = parse_spec(line, defaultport)
hoststr = ','.join(('0x%02x' % b) for b in host)
g.write(' {{%s}, %i}' % (hoststr, port))
g.write('\n};\n')
def main():
if len(sys.argv)<2:
print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr)
exit(1)
g = sys.stdout
indir = sys.argv[1]
g.write('#ifndef BITCOIN_CHAINPARAMSSEEDS_H\n')
g.write('#define BITCOIN_CHAINPARAMSSEEDS_H\n')
g.write('/**\n')
g.write(' * List of fixed seed nodes for the bitcoin network\n')
g.write(' * AUTOGENERATED by contrib/seeds/generate-seeds.py\n')
g.write(' *\n')
g.write(' * Each line contains a 16-byte IPv6 address and a port.\n')
g.write(' * IPv4 as well as onion addresses are wrapped inside a IPv6 address accordingly.\n')
g.write(' */\n')
with open(os.path.join(indir,'nodes_main.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_main', 14873)
g.write('\n')
with open(os.path.join(indir,'nodes_test.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_test', 14321)
g.write('#endif // BITCOIN_CHAINPARAMSSEEDS_H\n')
if __name__ == '__main__':
main()
| 31.362319
| 98
| 0.581562
|
719b417ab6a812e3afff9da3106a372cd7ac6682
| 30,246
|
py
|
Python
|
astropy/units/tests/test_equivalencies.py
|
gpdf/astropy
|
c487542611276a3361a38d6c4b3954dcd637f847
|
[
"BSD-3-Clause"
] | null | null | null |
astropy/units/tests/test_equivalencies.py
|
gpdf/astropy
|
c487542611276a3361a38d6c4b3954dcd637f847
|
[
"BSD-3-Clause"
] | null | null | null |
astropy/units/tests/test_equivalencies.py
|
gpdf/astropy
|
c487542611276a3361a38d6c4b3954dcd637f847
|
[
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Separate tests specifically for equivalencies."""
# THIRD-PARTY
import pytest
import numpy as np
from numpy.testing import assert_allclose
# LOCAL
from astropy import units as u
from astropy.units.equivalencies import Equivalency
from astropy import constants, cosmology
from astropy.tests.helper import assert_quantity_allclose, catch_warnings
from astropy.utils.exceptions import AstropyDeprecationWarning
def test_dimensionless_angles():
# test that the angles_dimensionless option allows one to change
# by any order in radian in the unit (#1161)
rad1 = u.dimensionless_angles()
assert u.radian.to(1, equivalencies=rad1) == 1.
assert u.deg.to(1, equivalencies=rad1) == u.deg.to(u.rad)
assert u.steradian.to(1, equivalencies=rad1) == 1.
assert u.dimensionless_unscaled.to(u.steradian, equivalencies=rad1) == 1.
# now quantities
assert (1.*u.radian).to_value(1, equivalencies=rad1) == 1.
assert (1.*u.deg).to_value(1, equivalencies=rad1) == u.deg.to(u.rad)
assert (1.*u.steradian).to_value(1, equivalencies=rad1) == 1.
# more complicated example
I = 1.e45 * u.g * u.cm**2 # noqa
Omega = u.cycle / (1.*u.s)
Erot = 0.5 * I * Omega**2
# check that equivalency makes this work
Erot_in_erg1 = Erot.to(u.erg, equivalencies=rad1)
# and check that value is correct
assert_allclose(Erot_in_erg1.value, (Erot/u.radian**2).to_value(u.erg))
# test build-in equivalency in subclass
class MyRad1(u.Quantity):
_equivalencies = rad1
phase = MyRad1(1., u.cycle)
assert phase.to_value(1) == u.cycle.to(u.radian)
@pytest.mark.parametrize('log_unit', (u.mag, u.dex, u.dB))
def test_logarithmic(log_unit):
# check conversion of mag, dB, and dex to dimensionless and vice versa
with pytest.raises(u.UnitsError):
log_unit.to(1, 0.)
with pytest.raises(u.UnitsError):
u.dimensionless_unscaled.to(log_unit)
assert log_unit.to(1, 0., equivalencies=u.logarithmic()) == 1.
assert u.dimensionless_unscaled.to(log_unit,
equivalencies=u.logarithmic()) == 0.
# also try with quantities
q_dex = np.array([0., -1., 1., 2.]) * u.dex
q_expected = 10.**q_dex.value * u.dimensionless_unscaled
q_log_unit = q_dex.to(log_unit)
assert np.all(q_log_unit.to(1, equivalencies=u.logarithmic()) ==
q_expected)
assert np.all(q_expected.to(log_unit, equivalencies=u.logarithmic()) ==
q_log_unit)
with u.set_enabled_equivalencies(u.logarithmic()):
assert np.all(np.abs(q_log_unit - q_expected.to(log_unit)) <
1.e-10*log_unit)
doppler_functions = [u.doppler_optical, u.doppler_radio, u.doppler_relativistic]
@pytest.mark.parametrize(('function'), doppler_functions)
def test_doppler_frequency_0(function):
rest = 105.01 * u.GHz
velo0 = rest.to(u.km/u.s, equivalencies=function(rest))
assert velo0.value == 0
@pytest.mark.parametrize(('function'), doppler_functions)
def test_doppler_wavelength_0(function):
rest = 105.01 * u.GHz
q1 = 0.00285489437196 * u.m
velo0 = q1.to(u.km/u.s, equivalencies=function(rest))
np.testing.assert_almost_equal(velo0.value, 0, decimal=6)
@pytest.mark.parametrize(('function'), doppler_functions)
def test_doppler_energy_0(function):
rest = 105.01 * u.GHz
q1 = 0.0004342864648539744 * u.eV
velo0 = q1.to(u.km/u.s, equivalencies=function(rest))
np.testing.assert_almost_equal(velo0.value, 0, decimal=6)
@pytest.mark.parametrize(('function'), doppler_functions)
def test_doppler_frequency_circle(function):
rest = 105.01 * u.GHz
shifted = 105.03 * u.GHz
velo = shifted.to(u.km/u.s, equivalencies=function(rest))
freq = velo.to(u.GHz, equivalencies=function(rest))
np.testing.assert_almost_equal(freq.value, shifted.value, decimal=7)
@pytest.mark.parametrize(('function'), doppler_functions)
def test_doppler_wavelength_circle(function):
rest = 105.01 * u.nm
shifted = 105.03 * u.nm
velo = shifted.to(u.km / u.s, equivalencies=function(rest))
wav = velo.to(u.nm, equivalencies=function(rest))
np.testing.assert_almost_equal(wav.value, shifted.value, decimal=7)
@pytest.mark.parametrize(('function'), doppler_functions)
def test_doppler_energy_circle(function):
rest = 1.0501 * u.eV
shifted = 1.0503 * u.eV
velo = shifted.to(u.km / u.s, equivalencies=function(rest))
en = velo.to(u.eV, equivalencies=function(rest))
np.testing.assert_almost_equal(en.value, shifted.value, decimal=7)
values_ghz = (999.899940784289, 999.8999307714406, 999.8999357778647)
@pytest.mark.parametrize(('function', 'value'),
list(zip(doppler_functions, values_ghz)))
def test_30kms(function, value):
rest = 1000 * u.GHz
velo = 30 * u.km/u.s
shifted = velo.to(u.GHz, equivalencies=function(rest))
np.testing.assert_almost_equal(shifted.value, value, decimal=7)
bad_values = (5, 5*u.Jy, None)
@pytest.mark.parametrize(('function', 'value'),
list(zip(doppler_functions, bad_values)))
def test_bad_restfreqs(function, value):
with pytest.raises(u.UnitsError):
function(value)
def test_massenergy():
# The relative tolerance of these tests is set by the uncertainties
# in the charge of the electron, which is known to about
# 3e-9 (relative tolerance). Therefore, we limit the
# precision of the tests to 1e-7 to be safe. The masses are
# (loosely) known to ~ 5e-8 rel tolerance, so we couldn't test to
# 1e-7 if we used the values from astropy.constants; that is,
# they might change by more than 1e-7 in some future update, so instead
# they are hardwired here.
# Electron, proton, neutron, muon, 1g
mass_eV = u.Quantity([510.998928e3, 938.272046e6, 939.565378e6,
105.6583715e6, 5.60958884539e32], u.eV)
mass_g = u.Quantity([9.10938291e-28, 1.672621777e-24, 1.674927351e-24,
1.88353147e-25, 1], u.g)
# Test both ways
assert np.allclose(mass_eV.to_value(u.g, equivalencies=u.mass_energy()),
mass_g.value, rtol=1e-7)
assert np.allclose(mass_g.to_value(u.eV, equivalencies=u.mass_energy()),
mass_eV.value, rtol=1e-7)
# Basic tests of 'derived' equivalencies
# Surface density
sdens_eV = u.Quantity(5.60958884539e32, u.eV / u.m**2)
sdens_g = u.Quantity(1e-4, u.g / u.cm**2)
assert np.allclose(sdens_eV.to_value(u.g / u.cm**2,
equivalencies=u.mass_energy()),
sdens_g.value, rtol=1e-7)
assert np.allclose(sdens_g.to_value(u.eV / u.m**2,
equivalencies=u.mass_energy()),
sdens_eV.value, rtol=1e-7)
# Density
dens_eV = u.Quantity(5.60958884539e32, u.eV / u.m**3)
dens_g = u.Quantity(1e-6, u.g / u.cm**3)
assert np.allclose(dens_eV.to_value(u.g / u.cm**3,
equivalencies=u.mass_energy()),
dens_g.value, rtol=1e-7)
assert np.allclose(dens_g.to_value(u.eV / u.m**3,
equivalencies=u.mass_energy()),
dens_eV.value, rtol=1e-7)
# Power
pow_eV = u.Quantity(5.60958884539e32, u.eV / u.s)
pow_g = u.Quantity(1, u.g / u.s)
assert np.allclose(pow_eV.to_value(u.g / u.s,
equivalencies=u.mass_energy()),
pow_g.value, rtol=1e-7)
assert np.allclose(pow_g.to_value(u.eV / u.s,
equivalencies=u.mass_energy()),
pow_eV.value, rtol=1e-7)
def test_is_equivalent():
assert u.m.is_equivalent(u.pc)
assert u.cycle.is_equivalent(u.mas)
assert not u.cycle.is_equivalent(u.dimensionless_unscaled)
assert u.cycle.is_equivalent(u.dimensionless_unscaled,
u.dimensionless_angles())
assert not (u.Hz.is_equivalent(u.J))
assert u.Hz.is_equivalent(u.J, u.spectral())
assert u.J.is_equivalent(u.Hz, u.spectral())
assert u.pc.is_equivalent(u.arcsecond, u.parallax())
assert u.arcminute.is_equivalent(u.au, u.parallax())
# Pass a tuple for multiple possibilities
assert u.cm.is_equivalent((u.m, u.s, u.kg))
assert u.ms.is_equivalent((u.m, u.s, u.kg))
assert u.g.is_equivalent((u.m, u.s, u.kg))
assert not u.L.is_equivalent((u.m, u.s, u.kg))
assert not (u.km / u.s).is_equivalent((u.m, u.s, u.kg))
def test_parallax():
a = u.arcsecond.to(u.pc, 10, u.parallax())
assert_allclose(a, 0.10)
b = u.pc.to(u.arcsecond, a, u.parallax())
assert_allclose(b, 10)
a = u.arcminute.to(u.au, 1, u.parallax())
assert_allclose(a, 3437.7467916)
b = u.au.to(u.arcminute, a, u.parallax())
assert_allclose(b, 1)
val = (-1 * u.mas).to(u.pc, u.parallax())
assert np.isnan(val.value)
val = (-1 * u.mas).to_value(u.pc, u.parallax())
assert np.isnan(val)
def test_parallax2():
a = u.arcsecond.to(u.pc, [0.1, 2.5], u.parallax())
assert_allclose(a, [10, 0.4])
def test_spectral():
a = u.AA.to(u.Hz, 1, u.spectral())
assert_allclose(a, 2.9979245799999995e+18)
b = u.Hz.to(u.AA, a, u.spectral())
assert_allclose(b, 1)
a = u.AA.to(u.MHz, 1, u.spectral())
assert_allclose(a, 2.9979245799999995e+12)
b = u.MHz.to(u.AA, a, u.spectral())
assert_allclose(b, 1)
a = u.m.to(u.Hz, 1, u.spectral())
assert_allclose(a, 2.9979245799999995e+8)
b = u.Hz.to(u.m, a, u.spectral())
assert_allclose(b, 1)
def test_spectral2():
a = u.nm.to(u.J, 500, u.spectral())
assert_allclose(a, 3.972891366538605e-19)
b = u.J.to(u.nm, a, u.spectral())
assert_allclose(b, 500)
a = u.AA.to(u.Hz, 1, u.spectral())
b = u.Hz.to(u.J, a, u.spectral())
c = u.AA.to(u.J, 1, u.spectral())
assert_allclose(b, c)
c = u.J.to(u.Hz, b, u.spectral())
assert_allclose(a, c)
def test_spectral3():
a = u.nm.to(u.Hz, [1000, 2000], u.spectral())
assert_allclose(a, [2.99792458e+14, 1.49896229e+14])
@pytest.mark.parametrize(
('in_val', 'in_unit'),
[([0.1, 5000.0, 10000.0], u.AA),
([1e+5, 2.0, 1.0], u.micron ** -1),
([2.99792458e+19, 5.99584916e+14, 2.99792458e+14], u.Hz),
([1.98644568e-14, 3.97289137e-19, 1.98644568e-19], u.J)])
def test_spectral4(in_val, in_unit):
"""Wave number conversion w.r.t. wavelength, freq, and energy."""
# Spectroscopic and angular
out_units = [u.micron ** -1, u.radian / u.micron]
answers = [[1e+5, 2.0, 1.0], [6.28318531e+05, 12.5663706, 6.28318531]]
for out_unit, ans in zip(out_units, answers):
# Forward
a = in_unit.to(out_unit, in_val, u.spectral())
assert_allclose(a, ans)
# Backward
b = out_unit.to(in_unit, ans, u.spectral())
assert_allclose(b, in_val)
def test_spectraldensity2():
# flux density
flambda = u.erg / u.angstrom / u.cm ** 2 / u.s
fnu = u.erg / u.Hz / u.cm ** 2 / u.s
a = flambda.to(fnu, 1, u.spectral_density(u.Quantity(3500, u.AA)))
assert_allclose(a, 4.086160166177361e-12)
# luminosity density
llambda = u.erg / u.angstrom / u.s
lnu = u.erg / u.Hz / u.s
a = llambda.to(lnu, 1, u.spectral_density(u.Quantity(3500, u.AA)))
assert_allclose(a, 4.086160166177361e-12)
a = lnu.to(llambda, 1, u.spectral_density(u.Quantity(3500, u.AA)))
assert_allclose(a, 2.44728537142857e11)
def test_spectraldensity3():
# Define F_nu in Jy
f_nu = u.Jy
# Define F_lambda in ergs / cm^2 / s / micron
f_lambda = u.erg / u.cm ** 2 / u.s / u.micron
# 1 GHz
one_ghz = u.Quantity(1, u.GHz)
# Convert to ergs / cm^2 / s / Hz
assert_allclose(f_nu.to(u.erg / u.cm ** 2 / u.s / u.Hz, 1.), 1.e-23, 10)
# Convert to ergs / cm^2 / s at 10 Ghz
assert_allclose(f_nu.to(u.erg / u.cm ** 2 / u.s, 1.,
equivalencies=u.spectral_density(one_ghz * 10)),
1.e-13, 10)
# Convert to F_lambda at 1 Ghz
assert_allclose(f_nu.to(f_lambda, 1.,
equivalencies=u.spectral_density(one_ghz)),
3.335640951981521e-20, 10)
# Convert to Jy at 1 Ghz
assert_allclose(f_lambda.to(u.Jy, 1.,
equivalencies=u.spectral_density(one_ghz)),
1. / 3.335640951981521e-20, 10)
# Convert to ergs / cm^2 / s at 10 microns
assert_allclose(f_lambda.to(u.erg / u.cm ** 2 / u.s, 1.,
equivalencies=u.spectral_density(u.Quantity(10, u.micron))),
10., 10)
def test_spectraldensity4():
"""PHOTLAM and PHOTNU conversions."""
flam = u.erg / (u.cm ** 2 * u.s * u.AA)
fnu = u.erg / (u.cm ** 2 * u.s * u.Hz)
photlam = u.photon / (u.cm ** 2 * u.s * u.AA)
photnu = u.photon / (u.cm ** 2 * u.s * u.Hz)
wave = u.Quantity([4956.8, 4959.55, 4962.3], u.AA)
flux_photlam = [9.7654e-3, 1.003896e-2, 9.78473e-3]
flux_photnu = [8.00335589e-14, 8.23668949e-14, 8.03700310e-14]
flux_flam = [3.9135e-14, 4.0209e-14, 3.9169e-14]
flux_fnu = [3.20735792e-25, 3.29903646e-25, 3.21727226e-25]
flux_jy = [3.20735792e-2, 3.29903646e-2, 3.21727226e-2]
flux_stmag = [12.41858665, 12.38919182, 12.41764379]
flux_abmag = [12.63463143, 12.60403221, 12.63128047]
# PHOTLAM <--> FLAM
assert_allclose(photlam.to(
flam, flux_photlam, u.spectral_density(wave)), flux_flam, rtol=1e-6)
assert_allclose(flam.to(
photlam, flux_flam, u.spectral_density(wave)), flux_photlam, rtol=1e-6)
# PHOTLAM <--> FNU
assert_allclose(photlam.to(
fnu, flux_photlam, u.spectral_density(wave)), flux_fnu, rtol=1e-6)
assert_allclose(fnu.to(
photlam, flux_fnu, u.spectral_density(wave)), flux_photlam, rtol=1e-6)
# PHOTLAM <--> Jy
assert_allclose(photlam.to(
u.Jy, flux_photlam, u.spectral_density(wave)), flux_jy, rtol=1e-6)
assert_allclose(u.Jy.to(
photlam, flux_jy, u.spectral_density(wave)), flux_photlam, rtol=1e-6)
# PHOTLAM <--> PHOTNU
assert_allclose(photlam.to(
photnu, flux_photlam, u.spectral_density(wave)), flux_photnu, rtol=1e-6)
assert_allclose(photnu.to(
photlam, flux_photnu, u.spectral_density(wave)), flux_photlam, rtol=1e-6)
# PHOTNU <--> FNU
assert_allclose(photnu.to(
fnu, flux_photnu, u.spectral_density(wave)), flux_fnu, rtol=1e-6)
assert_allclose(fnu.to(
photnu, flux_fnu, u.spectral_density(wave)), flux_photnu, rtol=1e-6)
# PHOTNU <--> FLAM
assert_allclose(photnu.to(
flam, flux_photnu, u.spectral_density(wave)), flux_flam, rtol=1e-6)
assert_allclose(flam.to(
photnu, flux_flam, u.spectral_density(wave)), flux_photnu, rtol=1e-6)
# PHOTLAM <--> STMAG
assert_allclose(photlam.to(
u.STmag, flux_photlam, u.spectral_density(wave)), flux_stmag, rtol=1e-6)
assert_allclose(u.STmag.to(
photlam, flux_stmag, u.spectral_density(wave)), flux_photlam, rtol=1e-6)
# PHOTLAM <--> ABMAG
assert_allclose(photlam.to(
u.ABmag, flux_photlam, u.spectral_density(wave)), flux_abmag, rtol=1e-6)
assert_allclose(u.ABmag.to(
photlam, flux_abmag, u.spectral_density(wave)), flux_photlam, rtol=1e-6)
def test_spectraldensity5():
""" Test photon luminosity density conversions. """
L_la = u.erg / (u.s * u.AA)
L_nu = u.erg / (u.s * u.Hz)
phot_L_la = u.photon / (u.s * u.AA)
phot_L_nu = u.photon / (u.s * u.Hz)
wave = u.Quantity([4956.8, 4959.55, 4962.3], u.AA)
flux_phot_L_la = [9.7654e-3, 1.003896e-2, 9.78473e-3]
flux_phot_L_nu = [8.00335589e-14, 8.23668949e-14, 8.03700310e-14]
flux_L_la = [3.9135e-14, 4.0209e-14, 3.9169e-14]
flux_L_nu = [3.20735792e-25, 3.29903646e-25, 3.21727226e-25]
# PHOTLAM <--> FLAM
assert_allclose(phot_L_la.to(
L_la, flux_phot_L_la, u.spectral_density(wave)), flux_L_la, rtol=1e-6)
assert_allclose(L_la.to(
phot_L_la, flux_L_la, u.spectral_density(wave)), flux_phot_L_la, rtol=1e-6)
# PHOTLAM <--> FNU
assert_allclose(phot_L_la.to(
L_nu, flux_phot_L_la, u.spectral_density(wave)), flux_L_nu, rtol=1e-6)
assert_allclose(L_nu.to(
phot_L_la, flux_L_nu, u.spectral_density(wave)), flux_phot_L_la, rtol=1e-6)
# PHOTLAM <--> PHOTNU
assert_allclose(phot_L_la.to(
phot_L_nu, flux_phot_L_la, u.spectral_density(wave)), flux_phot_L_nu, rtol=1e-6)
assert_allclose(phot_L_nu.to(
phot_L_la, flux_phot_L_nu, u.spectral_density(wave)), flux_phot_L_la, rtol=1e-6)
# PHOTNU <--> FNU
assert_allclose(phot_L_nu.to(
L_nu, flux_phot_L_nu, u.spectral_density(wave)), flux_L_nu, rtol=1e-6)
assert_allclose(L_nu.to(
phot_L_nu, flux_L_nu, u.spectral_density(wave)), flux_phot_L_nu, rtol=1e-6)
# PHOTNU <--> FLAM
assert_allclose(phot_L_nu.to(
L_la, flux_phot_L_nu, u.spectral_density(wave)), flux_L_la, rtol=1e-6)
assert_allclose(L_la.to(
phot_L_nu, flux_L_la, u.spectral_density(wave)), flux_phot_L_nu, rtol=1e-6)
def test_spectraldensity6():
""" Test surface brightness conversions. """
slam = u.erg / (u.cm ** 2 * u.s * u.AA * u.sr)
snu = u.erg / (u.cm ** 2 * u.s * u.Hz * u.sr)
wave = u.Quantity([4956.8, 4959.55, 4962.3], u.AA)
sb_flam = [3.9135e-14, 4.0209e-14, 3.9169e-14]
sb_fnu = [3.20735792e-25, 3.29903646e-25, 3.21727226e-25]
# S(nu) <--> S(lambda)
assert_allclose(snu.to(
slam, sb_fnu, u.spectral_density(wave)), sb_flam, rtol=1e-6)
assert_allclose(slam.to(
snu, sb_flam, u.spectral_density(wave)), sb_fnu, rtol=1e-6)
def test_equivalent_units():
from astropy.units import imperial
with u.add_enabled_units(imperial):
units = u.g.find_equivalent_units()
units_set = set(units)
match = set(
[u.M_e, u.M_p, u.g, u.kg, u.solMass, u.t, u.u, u.M_earth,
u.M_jup, imperial.oz, imperial.lb, imperial.st, imperial.ton,
imperial.slug])
assert units_set == match
r = repr(units)
assert r.count('\n') == len(units) + 2
def test_equivalent_units2():
units = set(u.Hz.find_equivalent_units(u.spectral()))
match = set(
[u.AU, u.Angstrom, u.Hz, u.J, u.Ry, u.cm, u.eV, u.erg, u.lyr,
u.m, u.micron, u.pc, u.solRad, u.Bq, u.Ci, u.k, u.earthRad,
u.jupiterRad])
assert units == match
from astropy.units import imperial
with u.add_enabled_units(imperial):
units = set(u.Hz.find_equivalent_units(u.spectral()))
match = set(
[u.AU, u.Angstrom, imperial.BTU, u.Hz, u.J, u.Ry,
imperial.cal, u.cm, u.eV, u.erg, imperial.ft, imperial.fur,
imperial.inch, imperial.kcal, u.lyr, u.m, imperial.mi,
imperial.mil, u.micron, u.pc, u.solRad, imperial.yd, u.Bq, u.Ci,
imperial.nmi, u.k, u.earthRad, u.jupiterRad])
assert units == match
units = set(u.Hz.find_equivalent_units(u.spectral()))
match = set(
[u.AU, u.Angstrom, u.Hz, u.J, u.Ry, u.cm, u.eV, u.erg, u.lyr,
u.m, u.micron, u.pc, u.solRad, u.Bq, u.Ci, u.k, u.earthRad,
u.jupiterRad])
assert units == match
def test_trivial_equivalency():
assert u.m.to(u.kg, equivalencies=[(u.m, u.kg)]) == 1.0
def test_invalid_equivalency():
with pytest.raises(ValueError):
u.m.to(u.kg, equivalencies=[(u.m,)])
with pytest.raises(ValueError):
u.m.to(u.kg, equivalencies=[(u.m, 5.0)])
def test_irrelevant_equivalency():
with pytest.raises(u.UnitsError):
u.m.to(u.kg, equivalencies=[(u.m, u.l)])
def test_brightness_temperature():
omega_B = np.pi * (50 * u.arcsec) ** 2
nu = u.GHz * 5
tb = 7.052587837212582 * u.K
np.testing.assert_almost_equal(
tb.value, (1 * u.Jy).to_value(
u.K, equivalencies=u.brightness_temperature(nu, beam_area=omega_B)))
np.testing.assert_almost_equal(
1.0, tb.to_value(
u.Jy, equivalencies=u.brightness_temperature(nu, beam_area=omega_B)))
def test_swapped_args_brightness_temperature():
"""
#5173 changes the order of arguments but accepts the old (deprecated) args
"""
omega_B = np.pi * (50 * u.arcsec) ** 2
nu = u.GHz * 5
tb = 7.052587837212582 * u.K
with catch_warnings(AstropyDeprecationWarning) as w:
result = (1*u.Jy).to(
u.K, equivalencies=u.brightness_temperature(omega_B, nu))
roundtrip = result.to(
u.Jy, equivalencies=u.brightness_temperature(omega_B, nu))
assert len(w) == 2
np.testing.assert_almost_equal(tb.value, result.value)
np.testing.assert_almost_equal(roundtrip.value, 1)
def test_surfacebrightness():
sb = 50*u.MJy/u.sr
k = sb.to(u.K, u.brightness_temperature(50*u.GHz))
np.testing.assert_almost_equal(k.value, 0.650965, 5)
assert k.unit.is_equivalent(u.K)
def test_beam():
# pick a beam area: 2 pi r^2 = area of a Gaussina with sigma=50 arcsec
omega_B = 2 * np.pi * (50 * u.arcsec) ** 2
new_beam = (5*u.beam).to(u.sr, u.equivalencies.beam_angular_area(omega_B))
np.testing.assert_almost_equal(omega_B.to(u.sr).value * 5, new_beam.value)
assert new_beam.unit.is_equivalent(u.sr)
# make sure that it's still consistent with 5 beams
nbeams = new_beam.to(u.beam, u.equivalencies.beam_angular_area(omega_B))
np.testing.assert_almost_equal(nbeams.value, 5)
# test inverse beam equivalency
# (this is just a sanity check that the equivalency is defined;
# it's not for testing numerical consistency)
(5/u.beam).to(1/u.sr, u.equivalencies.beam_angular_area(omega_B))
# test practical case
# (this is by far the most important one)
flux_density = (5*u.Jy/u.beam).to(u.MJy/u.sr, u.equivalencies.beam_angular_area(omega_B))
np.testing.assert_almost_equal(flux_density.value, 13.5425483146382)
def test_thermodynamic_temperature():
nu = 143 * u.GHz
tb = 0.0026320501262630277 * u.K
eq = u.thermodynamic_temperature(nu, T_cmb=2.7255 * u.K)
np.testing.assert_almost_equal(
tb.value, (1 * (u.MJy / u.sr)).to_value(u.K, equivalencies=eq))
np.testing.assert_almost_equal(
1.0, tb.to_value(u.MJy / u.sr, equivalencies=eq))
def test_equivalency_context():
with u.set_enabled_equivalencies(u.dimensionless_angles()):
phase = u.Quantity(1., u.cycle)
assert_allclose(np.exp(1j*phase), 1.)
Omega = u.cycle / (1.*u.minute)
assert_allclose(np.exp(1j*Omega*60.*u.second), 1.)
# ensure we can turn off equivalencies even within the scope
with pytest.raises(u.UnitsError):
phase.to(1, equivalencies=None)
# test the manager also works in the Quantity constructor.
q1 = u.Quantity(phase, u.dimensionless_unscaled)
assert_allclose(q1.value, u.cycle.to(u.radian))
# and also if we use a class that happens to have a unit attribute.
class MyQuantityLookalike(np.ndarray):
pass
mylookalike = np.array(1.).view(MyQuantityLookalike)
mylookalike.unit = 'cycle'
# test the manager also works in the Quantity constructor.
q2 = u.Quantity(mylookalike, u.dimensionless_unscaled)
assert_allclose(q2.value, u.cycle.to(u.radian))
with u.set_enabled_equivalencies(u.spectral()):
u.GHz.to(u.cm)
eq_on = u.GHz.find_equivalent_units()
with pytest.raises(u.UnitsError):
u.GHz.to(u.cm, equivalencies=None)
# without equivalencies, we should find a smaller (sub)set
eq_off = u.GHz.find_equivalent_units()
assert all(eq in set(eq_on) for eq in eq_off)
assert set(eq_off) < set(eq_on)
# Check the equivalency manager also works in ufunc evaluations,
# not just using (wrong) scaling. [#2496]
l2v = u.doppler_optical(6000 * u.angstrom)
l1 = 6010 * u.angstrom
assert l1.to(u.km/u.s, equivalencies=l2v) > 100. * u.km / u.s
with u.set_enabled_equivalencies(l2v):
assert l1 > 100. * u.km / u.s
assert abs((l1 - 500. * u.km / u.s).to(u.angstrom)) < 1. * u.km/u.s
def test_equivalency_context_manager():
base_registry = u.get_current_unit_registry()
def just_to_from_units(equivalencies):
return [(equiv[0], equiv[1]) for equiv in equivalencies]
tf_dimensionless_angles = just_to_from_units(u.dimensionless_angles())
tf_spectral = just_to_from_units(u.spectral())
assert base_registry.equivalencies == []
with u.set_enabled_equivalencies(u.dimensionless_angles()):
new_registry = u.get_current_unit_registry()
assert (set(just_to_from_units(new_registry.equivalencies)) ==
set(tf_dimensionless_angles))
assert set(new_registry.all_units) == set(base_registry.all_units)
with u.set_enabled_equivalencies(u.spectral()):
newer_registry = u.get_current_unit_registry()
assert (set(just_to_from_units(newer_registry.equivalencies)) ==
set(tf_spectral))
assert (set(newer_registry.all_units) ==
set(base_registry.all_units))
assert (set(just_to_from_units(new_registry.equivalencies)) ==
set(tf_dimensionless_angles))
assert set(new_registry.all_units) == set(base_registry.all_units)
with u.add_enabled_equivalencies(u.spectral()):
newer_registry = u.get_current_unit_registry()
assert (set(just_to_from_units(newer_registry.equivalencies)) ==
set(tf_dimensionless_angles) | set(tf_spectral))
assert (set(newer_registry.all_units) ==
set(base_registry.all_units))
assert base_registry is u.get_current_unit_registry()
def test_temperature():
from astropy.units.imperial import deg_F
t_k = 0 * u.K
assert_allclose(t_k.to_value(u.deg_C, u.temperature()), -273.15)
assert_allclose(t_k.to_value(deg_F, u.temperature()), -459.67)
def test_temperature_energy():
x = 1000 * u.K
y = (x * constants.k_B).to(u.keV)
assert_allclose(x.to_value(u.keV, u.temperature_energy()), y.value)
assert_allclose(y.to_value(u.K, u.temperature_energy()), x.value)
def test_molar_mass_amu():
x = 1 * (u.g/u.mol)
y = 1 * u.u
assert_allclose(x.to_value(u.u, u.molar_mass_amu()), y.value)
assert_allclose(y.to_value(u.g/u.mol, u.molar_mass_amu()), x.value)
with pytest.raises(u.UnitsError):
x.to(u.u)
def test_compose_equivalencies():
x = u.Unit("arcsec").compose(units=(u.pc,), equivalencies=u.parallax())
assert x[0] == u.pc
x = u.Unit("2 arcsec").compose(units=(u.pc,), equivalencies=u.parallax())
assert x[0] == u.Unit(0.5 * u.pc)
x = u.degree.compose(equivalencies=u.dimensionless_angles())
assert u.Unit(u.degree.to(u.radian)) in x
x = (u.nm).compose(units=(u.m, u.s), equivalencies=u.doppler_optical(0.55*u.micron))
for y in x:
if y.bases == [u.m, u.s]:
assert y.powers == [1, -1]
assert_allclose(
y.scale,
u.nm.to(u.m / u.s, equivalencies=u.doppler_optical(0.55 * u.micron)))
break
else:
assert False, "Didn't find speed in compose results"
def test_pixel_scale():
pix = 75*u.pix
asec = 30*u.arcsec
pixscale = 0.4*u.arcsec/u.pix
pixscale2 = 2.5*u.pix/u.arcsec
assert_quantity_allclose(pix.to(u.arcsec, u.pixel_scale(pixscale)), asec)
assert_quantity_allclose(pix.to(u.arcmin, u.pixel_scale(pixscale)), asec)
assert_quantity_allclose(pix.to(u.arcsec, u.pixel_scale(pixscale2)), asec)
assert_quantity_allclose(pix.to(u.arcmin, u.pixel_scale(pixscale2)), asec)
assert_quantity_allclose(asec.to(u.pix, u.pixel_scale(pixscale)), pix)
assert_quantity_allclose(asec.to(u.pix, u.pixel_scale(pixscale2)), pix)
def test_plate_scale():
mm = 1.5*u.mm
asec = 30*u.arcsec
platescale = 20*u.arcsec/u.mm
platescale2 = 0.05*u.mm/u.arcsec
assert_quantity_allclose(mm.to(u.arcsec, u.plate_scale(platescale)), asec)
assert_quantity_allclose(mm.to(u.arcmin, u.plate_scale(platescale)), asec)
assert_quantity_allclose(mm.to(u.arcsec, u.plate_scale(platescale2)), asec)
assert_quantity_allclose(mm.to(u.arcmin, u.plate_scale(platescale2)), asec)
assert_quantity_allclose(asec.to(u.mm, u.plate_scale(platescale)), mm)
assert_quantity_allclose(asec.to(u.mm, u.plate_scale(platescale2)), mm)
def test_littleh():
H0_70 = 70*u.km/u.s/u.Mpc
h70dist = 70 * u.Mpc/u.littleh
assert_quantity_allclose(h70dist.to(u.Mpc, u.with_H0(H0_70)), 100*u.Mpc)
# make sure using the default cosmology works
cosmodist = cosmology.default_cosmology.get().H0.value * u.Mpc/u.littleh
assert_quantity_allclose(cosmodist.to(u.Mpc, u.with_H0()), 100*u.Mpc)
# Now try a luminosity scaling
h1lum = .49 * u.Lsun * u.littleh**-2
assert_quantity_allclose(h1lum.to(u.Lsun, u.with_H0(H0_70)), 1*u.Lsun)
# And the trickiest one: magnitudes. Using H0=10 here for the round numbers
H0_10 = 10*u.km/u.s/u.Mpc
# assume the "true" magnitude M = 12.
# Then M - 5*log_10(h) = M + 5 = 17
withlittlehmag = 17 * (u.mag - u.MagUnit(u.littleh**2))
assert_quantity_allclose(withlittlehmag.to(u.mag, u.with_H0(H0_10)), 12*u.mag)
def test_equivelency():
ps = u.pixel_scale(10*u.arcsec/u.pix)
assert isinstance(ps, Equivalency)
assert isinstance(ps.name, list)
assert len(ps.name) == 1
assert ps.name[0] == "pixel_scale"
assert isinstance(ps.kwargs, list)
assert len(ps.kwargs) == 1
assert ps.kwargs[0] == dict({'pixscale': 10*u.arcsec/u.pix})
def test_add_equivelencies():
e1 = u.pixel_scale(10*u.arcsec/u.pixel) + u.temperature_energy()
assert isinstance(e1, Equivalency)
assert e1.name == ["pixel_scale", "temperature_energy"]
assert isinstance(e1.kwargs, list)
assert e1.kwargs == [dict({'pixscale': 10*u.arcsec/u.pix}), dict()]
e2 = u.pixel_scale(10*u.arcsec/u.pixel) + [1, 2, 3]
assert isinstance(e2, list)
| 37.386897
| 93
| 0.645308
|
a62529091a2c8d24d9d2c256440e6ab7deb2c173
| 800
|
py
|
Python
|
Python/get_attitude.py
|
BonaDrone/Bonadrone-demos
|
beaf3187bc8bca5b9cfc4acdf278ffbe2c46176a
|
[
"MIT"
] | null | null | null |
Python/get_attitude.py
|
BonaDrone/Bonadrone-demos
|
beaf3187bc8bca5b9cfc4acdf278ffbe2c46176a
|
[
"MIT"
] | null | null | null |
Python/get_attitude.py
|
BonaDrone/Bonadrone-demos
|
beaf3187bc8bca5b9cfc4acdf278ffbe2c46176a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Author: Juan Gallostra, jgallostra<at>bonadrone.com
# Date: 12-20-2018
import sys
# Add parent folder to path so that the API can be imported.
# This shouldn't be required if the package has been installed via pip
sys.path.insert(0, '../')
sys.path.insert(0, '../mosquito/')
import mapi
import time
import math
def main():
"""
Function that gets executed when the script is directly
called from the command line.
What it does is request the attitude of the Mosquito
and print it to the command line.
"""
Mosquito = mapi.Mosquito()
Mosquito.connect()
while True:
attitude = Mosquito.get_attitude()
if isinstance(attitude, tuple):
print(tuple(i*180/math.pi for i in attitude))
time.sleep(0.1)
if __name__ == "__main__":
main()
| 22.857143
| 70
| 0.71125
|
fe69d50839bfb2fefc2deb8ef235b52a877d9b15
| 438
|
py
|
Python
|
starsbridge.py/setup.py
|
IMSS-PhotonFactory/STARS-Core
|
208b4c7530b391febc1fb41224ad6b7ff396873b
|
[
"MIT"
] | null | null | null |
starsbridge.py/setup.py
|
IMSS-PhotonFactory/STARS-Core
|
208b4c7530b391febc1fb41224ad6b7ff396873b
|
[
"MIT"
] | null | null | null |
starsbridge.py/setup.py
|
IMSS-PhotonFactory/STARS-Core
|
208b4c7530b391febc1fb41224ad6b7ff396873b
|
[
"MIT"
] | null | null | null |
import sys
#from distutils.core import setup
from cx_Freeze import setup, Executable
from starsbridge import __version__
base = None
# Comment if console application
#if sys.platform == 'win32' : base = 'Win32GUI'
# define python filename to exeutable
exe = Executable(script='starsbridge.py', base=base)
# Setup
setup(
name = 'starsbridge',
version = __version__,
description = 'stars bridge',
executables = [exe],
)
| 20.857143
| 52
| 0.726027
|
2dba992875d880f9800c40d3906bd81db560d48b
| 6,589
|
py
|
Python
|
shop/models.py
|
abhishekmandloi/django-product-management-system
|
e89138e77c53b2ed51599e43e5d74bdf8671ae9e
|
[
"MIT"
] | null | null | null |
shop/models.py
|
abhishekmandloi/django-product-management-system
|
e89138e77c53b2ed51599e43e5d74bdf8671ae9e
|
[
"MIT"
] | null | null | null |
shop/models.py
|
abhishekmandloi/django-product-management-system
|
e89138e77c53b2ed51599e43e5d74bdf8671ae9e
|
[
"MIT"
] | 1
|
2021-07-02T04:18:17.000Z
|
2021-07-02T04:18:17.000Z
|
from django.db import models
# Create your models here.
class Ingredient(models.Model):
ingredient = models.CharField(max_length=300)
def __str__(self):
return self.ingredient
class HSNCode(models.Model):
hsn_code = models.CharField(max_length=50)
rate = models.FloatField(default=18.0)
def __str__(self):
return "{}_ ({} %)".format(self.hsn_code,self.rate)
class Product(models.Model):
company = models.CharField(max_length=100, default='None')
product = models.CharField(max_length=100)
# ingredients = models.CharField(max_length=300)
ingredient = models.ForeignKey(Ingredient, on_delete=models.CASCADE, related_name='ingredients',default='None')
hsn_code = models.ForeignKey(HSNCode, on_delete=models.CASCADE, related_name='ingredients',default='None')
def __str__(self):
return self.product
class ProductDetail(models.Model):
product = models.ForeignKey(Product, on_delete=models.CASCADE)
batch_no = models.CharField(max_length=50)
mfgdate = models.DateTimeField('manufacture')
expirydate = models.DateTimeField('expiry date')
packing = models.CharField(max_length=50)
quantity = models.IntegerField(default=0)
price = models.FloatField(default=0)
def __str__(self):
return "{}_{}_{}".format(self.product.product, self.batch_no, self.packing)
class ProductBatch(models.Model):
product = models.ForeignKey(Product, on_delete=models.CASCADE)
batch_no = models.CharField(max_length=50, default='')
mfgdate = models.DateField('manufacture')
expirydate = models.DateField('expiry date')
# packing = models.CharField(max_length=50)
# quantity = models.IntegerField(default=0)
# price = models.FloatField(default=0)
def __str__(self):
return "{}_{}".format(self.product.product, self.batch_no)
class ProductDetailBatch(models.Model):
# product = models.ForeignKey(Product, on_delete=models.CASCADE)
batch_no = models.ForeignKey(ProductBatch, on_delete=models.CASCADE) #models.CharField(max_length=50)
packing = models.CharField(max_length=50, default='')
quantity = models.IntegerField(default=0)
price = models.FloatField(default=0)
def __str__(self):
return "{}_{}_{}".format(self.batch_no.product.product,self.batch_no.batch_no, self.packing)
class CustomerDetail(models.Model):
name = models.CharField(max_length=100)
joining_date = models.DateField('joining date', auto_now_add=True)
address = models.CharField(max_length=150,blank=True)
mobile_no = models.BigIntegerField(blank=True,null=True)
def __str__(self):
if self.mobile_no:
return "{} {}".format(self.name, self.mobile_no)
else:
return "{}".format(self.name)
class Bill(models.Model):
purchaseno = models.IntegerField(default=1)
customer = models.ForeignKey(CustomerDetail, on_delete=models.CASCADE)
purchase_date = models.DateField('purchase date')
def __str__(self):
return "{}_{}".format(self.customer.name,self.purchaseno)
class BillItems(models.Model):
purchaseno = models.ForeignKey(Bill, related_name="bills", on_delete=models.CASCADE)
# productName = self.product.batch_no.product.product
# productBatch = self.product.batch_no.batch_no
# productPacking = self.product.packing
# productQuantity = models.IntegerField()
# productPrice = self.product.price
# productTotalPrice = self.productQuantity * self.productPrice
productName = models.CharField(max_length=100,null=True)
productBatch = models.CharField(max_length=50,null=True)
productPacking = models.CharField(max_length=50,null=True)
productQuantity = models.IntegerField(default=1)
productPrice = models.FloatField(default=0)
productTotalPrice = models.FloatField(default=0) #self.productQuantity * self.productPrice
def __str__(self):
return "{}".format(self.purchaseno.purchaseno)
def getProductName(self):
return "Product Name: {}".format(self.bills.purchaseno)
class BillItemsTest(models.Model):
purchaseno = models.ForeignKey(Bill, related_name="bills2", on_delete=models.CASCADE)
# productName = self.product.batch_no.product.product
# productBatch = self.product.batch_no.batch_no
# productPacking = self.product.packing
# productQuantity = models.IntegerField()
# productPrice = self.product.price
# productTotalPrice = self.productQuantity * self.productPrice
productName = models.ForeignKey(ProductDetailBatch, related_name="bills2", on_delete=models.CASCADE)
productBatch = models.CharField(max_length=50,null=True)
productPacking = models.CharField(max_length=50,null=True)
productQuantity = models.IntegerField(default=1)
productPrice = models.FloatField(default=0)
productTotalPrice = models.FloatField(default=0) #self.productQuantity * self.productPrice
def __str__(self):
return "{}".format(self.purchaseno.purchaseno)
def getProductName(self):
return "Product Name: {}".format(self.bills.purchaseno)
class BillTest2(models.Model):
purchaseno = models.AutoField(primary_key=True)
customer = models.ForeignKey(CustomerDetail, on_delete=models.CASCADE)
purchase_date = models.DateField('purchase date')
def __str__(self):
return "{}_{}".format(self.customer.name,self.purchaseno)
class BillItemsTest2(models.Model):
purchaseno = models.ForeignKey(BillTest2, related_name="bills22", on_delete=models.CASCADE)
# productName = self.product.batch_no.product.product
# productBatch = self.product.batch_no.batch_no
# productPacking = self.product.packing
# productQuantity = models.IntegerField()
# productPrice = self.product.price
# productTotalPrice = self.productQuantity * self.productPrice
productName = models.ForeignKey(ProductDetailBatch, related_name="bills22", on_delete=models.CASCADE)
productBatch = models.CharField(max_length=50,null=True)
productPacking = models.CharField(max_length=50,null=True)
productQuantity = models.IntegerField(default=1)
productPrice = models.FloatField(default=0)
productTotalPrice = models.FloatField(default=0) #self.productQuantity * self.productPrice
def __str__(self):
return "{}".format(self.purchaseno.purchaseno)
def getProductName(self):
return "Product Name: {}".format(self.bills.purchaseno)
| 43.635762
| 116
| 0.714372
|
8e0b1a6e6c86e7e6357f9e7a1e0771b5cb32bf79
| 221
|
py
|
Python
|
As_util.py
|
a2gs/AsWallet
|
dcfd660f8b66ea93ae7589c9bd633db917f985dd
|
[
"MIT"
] | 1
|
2019-12-11T12:55:51.000Z
|
2019-12-11T12:55:51.000Z
|
As_util.py
|
a2gs/AsWallet
|
dcfd660f8b66ea93ae7589c9bd633db917f985dd
|
[
"MIT"
] | 3
|
2021-06-02T00:47:45.000Z
|
2022-03-12T00:08:07.000Z
|
As_util.py
|
a2gs/AsWallet
|
dcfd660f8b66ea93ae7589c9bd633db917f985dd
|
[
"MIT"
] | 1
|
2019-12-11T12:55:55.000Z
|
2019-12-11T12:55:55.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Andre Augusto Giannotti Scota (https://sites.google.com/view/a2gs/)
VERSION = float(0.1)
BTCLIB_DB_PATH = str('')
HOME_DIR = str('')
SCREENBAR = str('')
MSGBAR = str('')
| 20.090909
| 69
| 0.642534
|
00867c92675c0b70b825781a0b0de3dc8fe16f39
| 267
|
py
|
Python
|
tests/artificial/transf_BoxCox/trend_MovingAverage/cycle_12/ar_/test_artificial_1024_BoxCox_MovingAverage_12__20.py
|
shaido987/pyaf
|
b9afd089557bed6b90b246d3712c481ae26a1957
|
[
"BSD-3-Clause"
] | 377
|
2016-10-13T20:52:44.000Z
|
2022-03-29T18:04:14.000Z
|
tests/artificial/transf_BoxCox/trend_MovingAverage/cycle_12/ar_/test_artificial_1024_BoxCox_MovingAverage_12__20.py
|
ysdede/pyaf
|
b5541b8249d5a1cfdc01f27fdfd99b6580ed680b
|
[
"BSD-3-Clause"
] | 160
|
2016-10-13T16:11:53.000Z
|
2022-03-28T04:21:34.000Z
|
tests/artificial/transf_BoxCox/trend_MovingAverage/cycle_12/ar_/test_artificial_1024_BoxCox_MovingAverage_12__20.py
|
ysdede/pyaf
|
b5541b8249d5a1cfdc01f27fdfd99b6580ed680b
|
[
"BSD-3-Clause"
] | 63
|
2017-03-09T14:51:18.000Z
|
2022-03-27T20:52:57.000Z
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "MovingAverage", cycle_length = 12, transform = "BoxCox", sigma = 0.0, exog_count = 20, ar_order = 0);
| 38.142857
| 167
| 0.734082
|
9a24fe65b435f9854fd6233e5aa2e1e2067b8891
| 95
|
py
|
Python
|
glance/mathematics/defaults.py
|
filonik/glance
|
053e5e9567c76774b56597dfa854fd29efaf48fd
|
[
"MIT"
] | null | null | null |
glance/mathematics/defaults.py
|
filonik/glance
|
053e5e9567c76774b56597dfa854fd29efaf48fd
|
[
"MIT"
] | null | null | null |
glance/mathematics/defaults.py
|
filonik/glance
|
053e5e9567c76774b56597dfa854fd29efaf48fd
|
[
"MIT"
] | null | null | null |
import numpy as np
CHUNK_SIZE = 4
DEFAULT_N = 3
DEFAULT_M = 4
DEFAULT_DTYPE = np.float32
| 7.916667
| 26
| 0.715789
|
65febf5627a5660c27eee148843daedd69501ab0
| 2,630
|
py
|
Python
|
tests/schemas/test_calc.py
|
arosen93/HT-ASE
|
a76542e7a2bc5bf6e7382d8f1387374eb2abc713
|
[
"BSD-3-Clause-LBNL"
] | 9
|
2022-02-08T08:31:30.000Z
|
2022-03-30T21:37:35.000Z
|
tests/schemas/test_calc.py
|
arosen93/HT-ASE
|
a76542e7a2bc5bf6e7382d8f1387374eb2abc713
|
[
"BSD-3-Clause-LBNL"
] | 5
|
2022-02-02T21:47:59.000Z
|
2022-03-18T21:28:52.000Z
|
tests/schemas/test_calc.py
|
arosen93/HT-ASE
|
a76542e7a2bc5bf6e7382d8f1387374eb2abc713
|
[
"BSD-3-Clause-LBNL"
] | 3
|
2022-02-23T12:00:57.000Z
|
2022-03-24T23:54:22.000Z
|
import os
from pathlib import Path
import pytest
from ase.build import bulk
from ase.io import read
from monty.json import MontyDecoder, jsanitize
from quacc.schemas.calc import summarize_run
FILE_DIR = Path(__file__).resolve().parent
run1 = os.path.join(FILE_DIR, "vasp_run1")
def test_summarize_run():
# Make sure metadata is made
atoms = read(os.path.join(run1, "OUTCAR.gz"))
results = summarize_run(atoms)
assert results["nsites"] == len(atoms)
assert results["atoms"] == atoms
# Make sure initial atoms object is stored if specified
atoms = read(os.path.join(run1, "OUTCAR.gz"))
results = summarize_run(atoms, atoms)
assert results["nsites"] == len(atoms)
assert results["atoms"] == atoms
assert results["input_structure"]["atoms"] == atoms
# Make sure info tags are handled appropriately
atoms = read(os.path.join(run1, "OUTCAR.gz"))
atoms.info["test_dict"] = {"hi": "there", "foo": "bar"}
results = summarize_run(atoms)
assert atoms.info.get("test_dict", None) == {"hi": "there", "foo": "bar"}
assert results.get("atoms_info", {}) != {}
assert results["atoms_info"].get("test_dict", None) == {"hi": "there", "foo": "bar"}
assert results["atoms"].info.get("test_dict", None) == {"hi": "there", "foo": "bar"}
# Make sure magnetic moments are handled appropriately
atoms = read(os.path.join(run1, "OUTCAR.gz"))
atoms.set_initial_magnetic_moments([3.14] * len(atoms))
atoms.calc.results["magmoms"] = [2.0] * len(atoms)
results = summarize_run(atoms)
assert atoms.calc is not None
assert atoms.get_initial_magnetic_moments().tolist() == [3.14] * len(atoms)
assert results["atoms"].get_initial_magnetic_moments().tolist() == [2.0] * len(
atoms
)
assert results["atoms"].calc is None
# Make sure Atoms magmoms were not moved if specified
atoms = read(os.path.join(run1, "OUTCAR.gz"))
atoms.set_initial_magnetic_moments([3.14] * len(atoms))
results = summarize_run(atoms, prep_next_run=False)
assert atoms.get_initial_magnetic_moments().tolist() == [3.14] * len(atoms)
assert results["atoms"].get_initial_magnetic_moments().tolist() == [3.14] * len(
atoms
)
# test document can be jsanitized and decoded
d = jsanitize(results, strict=True, enum_values=True)
MontyDecoder().process_decoded(d)
def test_errors():
atoms = bulk("Cu")
with pytest.raises(ValueError):
summarize_run(atoms)
atoms = read(os.path.join(run1, "OUTCAR.gz"))
atoms.calc.results = {}
with pytest.raises(ValueError):
summarize_run(atoms)
| 34.155844
| 88
| 0.670342
|
2c0f370ec5fb0a169239ced59b33d86866b7d278
| 936
|
py
|
Python
|
creating_classes.py
|
flavian-anselmo/Object_Oriented_Programming
|
3f4f06bdaa0bfcc1f6bdf55ea63be12a1f539923
|
[
"MIT"
] | 1
|
2021-02-04T11:29:33.000Z
|
2021-02-04T11:29:33.000Z
|
creating_classes.py
|
flavian-anselmo/Object_Oriented_Programming
|
3f4f06bdaa0bfcc1f6bdf55ea63be12a1f539923
|
[
"MIT"
] | null | null | null |
creating_classes.py
|
flavian-anselmo/Object_Oriented_Programming
|
3f4f06bdaa0bfcc1f6bdf55ea63be12a1f539923
|
[
"MIT"
] | null | null | null |
#why use classes
class employee:
#instance variables
"""contains data that is unique
to each instance """
emp_one=employee()
emp_two=employee()
print(emp_one)
print(emp_two)
emp_one.first='anselmo'
emp_one.last='flavian'
emp_one.email='anselmo@gmail.com'
emp_two.first='leon'
emp_two.last='otieno'
emp_two.email='leo@gmail.com'
"""
intead of doing the above ,we can create a constarctor or init
method that allows as to create ht
"""
#to show that the instances are unique print the email of each employee
print(emp_one.email)
print(emp_two.email)
'''
in object oriented programming
an instance ia aconcrete occurence
of any object
in oop objects are created from classes by
subroutines called coantsractors and destroyed
by destractors
*an object is an instance of a class and my be called
a class instance
for example an object that belongs to a class circle
is an instance of the class circle
'''
| 23.4
| 71
| 0.757479
|
49f0d4e4308c7afc66cf90bd34f170546c805eb3
| 4,215
|
py
|
Python
|
lte/gateway/python/magma/kernsnoopd/snooper.py
|
Aitend/magma
|
74fe7b437335728219dfbe54733955bdd652eb60
|
[
"BSD-3-Clause"
] | 1
|
2021-06-02T13:01:54.000Z
|
2021-06-02T13:01:54.000Z
|
lte/gateway/python/magma/kernsnoopd/snooper.py
|
Aitend/magma
|
74fe7b437335728219dfbe54733955bdd652eb60
|
[
"BSD-3-Clause"
] | 55
|
2021-08-12T14:02:48.000Z
|
2022-03-29T11:27:35.000Z
|
lte/gateway/python/magma/kernsnoopd/snooper.py
|
Aitend/magma
|
74fe7b437335728219dfbe54733955bdd652eb60
|
[
"BSD-3-Clause"
] | 2
|
2020-12-25T08:14:12.000Z
|
2022-03-14T13:58:56.000Z
|
"""
Copyright 2020 The Magma Authors.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
import os
from bcc import BPF # pylint:disable=import-error
from jinja2 import Template
from magma.common.job import Job
from magma.kernsnoopd.handlers import ebpf_handlers
EBPF_SRC_DIR = "/var/opt/magma/ebpf/kernsnoopd/"
if not os.path.isdir(EBPF_SRC_DIR):
EBPF_SRC_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'ebpf',
)
EBPF_COMMON_FILE = 'common.bpf.h'
def _get_ebpf_source(filename, context) -> str:
"""
_get_ebpf_source reads template source from file and renders it with
context parameters
Args:
filename: absolute path of file from which to read template source
context: dict containing parameter values
Returns:
Rendered source contents
"""
with open(filename, 'r', encoding="utf-8") as src_f:
src = src_f.read()
template = Template(src)
return template.render(context)
class NoSourcesFoundError(Exception):
"""
NoSourcesFoundError is thrown when Snooper does not find any eBPF programs
or source files to load into the kernel
"""
class Snooper(Job):
"""
Snooper is a Job that compiles and loads eBPF programs, registered relevant
front-end programs as handlers, and periodically calls their handle methods
"""
def __init__(
self, programs: list, collect_interval: int,
service_registry, service_loop,
):
super().__init__(interval=collect_interval, loop=service_loop)
self._bpf = None
self._handlers = []
self._loop = service_loop
self._ebpf_programs = programs
self._service_registry = service_registry
self._context = {
'PROXY_PORT': service_registry.get_proxy_config().get(
'local_port',
),
}
try:
self._load_ebpf_programs()
self.start()
except NoSourcesFoundError:
logging.error('Fatal: no eBPF sources loaded')
def _load_ebpf_programs(self) -> None:
"""
_load_ebpf_programs reads eBPF templates from _ebpf_programs, renders
them with context, compiles and loads them into kernel, and registers
corresponding front-end handlers
Raises:
NoSourcesFoundError: self._ebpf_programs was empty or no source in
self._ebpf_programs could be loaded
"""
if not self._ebpf_programs:
raise NoSourcesFoundError()
sources = []
for basename in self._ebpf_programs:
filename = os.path.join(EBPF_SRC_DIR, f'{basename}.bpf.c')
try:
sources.append(_get_ebpf_source(filename, self._context))
handler = ebpf_handlers[basename](self._service_registry)
self._handlers.append(handler)
except FileNotFoundError:
logging.error('Could not open eBPF source file %s', filename)
except KeyError:
logging.error('Fatal: did not find handler for %s', basename)
# found eBPF sources to load into kernel
if sources:
# find and prepend header
header = os.path.join(EBPF_SRC_DIR, EBPF_COMMON_FILE)
try:
sources.insert(0, _get_ebpf_source(header, self._context))
self._bpf = BPF(text='\n'.join(sources))
logging.info('Loaded sources into kernel')
except FileNotFoundError:
logging.error('Fatal: Could not open header file %s', header)
else:
raise NoSourcesFoundError()
async def _run(self) -> None:
if self._bpf is not None:
for handler in self._handlers:
handler.handle(self._bpf)
| 33.72
| 79
| 0.651957
|
17f923dc81b92795c6803bc5e2e21aa37923fa07
| 6,183
|
py
|
Python
|
mffpy/mffdir.py
|
BEL-Public/mffpy
|
8515824d89a77cf10f7c36bb405f61d338b6f5fe
|
[
"Apache-2.0"
] | 7
|
2019-09-24T07:05:56.000Z
|
2021-12-12T12:10:08.000Z
|
mffpy/mffdir.py
|
BEL-Public/mffpy
|
8515824d89a77cf10f7c36bb405f61d338b6f5fe
|
[
"Apache-2.0"
] | 44
|
2019-09-24T21:30:55.000Z
|
2022-02-10T17:47:14.000Z
|
mffpy/mffdir.py
|
BEL-Public/mffpy
|
8515824d89a77cf10f7c36bb405f61d338b6f5fe
|
[
"Apache-2.0"
] | 4
|
2019-09-24T07:06:04.000Z
|
2021-11-08T20:25:18.000Z
|
"""
Copyright 2019 Brain Electrophysiology Laboratory Company LLC
Licensed under the ApacheLicense, Version 2.0(the "License");
you may not use this module except in compliance with the License.
You may obtain a copy of the License at:
http: // www.apache.org / licenses / LICENSE - 2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied.
"""
import re
from os import listdir
from os.path import join, exists, splitext, basename, isdir
from collections import defaultdict, namedtuple
from typing import Dict, List, Tuple, IO
from . import zipfile
SignalAndInfo: Tuple[IO[bytes], str] = namedtuple(
'SignalAndInfo', 'signal info')
class MFFDirBase:
""".mff directory path
An `MFFDirBase` is able to access and spawn all file in an mff directory
container.
"""
_extensions: Tuple[str, ...] = ('.mff', '.mfz')
_ext_err = 'Unknown file type ["%s"]'
_re_nu = re.compile(r'\d+')
def __init__(self, filename: str):
"""initialize new .mff directory instance
**Parameters:**
`filename` (str) - the full path to the .mff directory.
"""
self._mffname = filename
self._find_files_by_type()
self._check()
def listdir(self) -> List[str]:
raise NotImplementedError
def __contains__(self, filename: str) -> bool:
raise NotImplementedError
def filepointer(self, basename: str) -> IO[bytes]:
raise NotImplementedError
def filename(self, basename: str) -> str:
raise NotImplementedError
def _find_files_by_type(self) -> None:
"""Reads the .mff directory and sorts filenames by extensions
The sorted names are saved in property `files_by_type`.
"""
self.files_by_type: Dict[str, List[str]] = defaultdict(list)
for fbase, ext in (splitext(it) for it in self.listdir()):
self.files_by_type[ext].append(fbase)
def info(self, i: int = None) -> IO[bytes]:
"""return file or data info
If `i is None`, it returns `<self.filename>/file.xml` else
`<self.filename>/file<i>.xml`
"""
return self.filepointer('info'+(str(i) if i else ''))
def signals_with_info(self) -> List[SignalAndInfo]:
ans = []
for signalfile in self.files_by_type['.bin']:
matches = self._re_nu.search(basename(signalfile))
assert matches is not None, f"Something went wrong in {signalfile}"
bin_num = int(matches.group())
ans.append(SignalAndInfo(
signal=self.filepointer(signalfile),
info='info%s' % bin_num
))
return ans
def _check(self) -> None:
"""Checks the .mff directory for completeness
"""
# MFF directory should have the right extension
assert splitext(self._mffname)[
1] in self._extensions, self._ext_err % super().__str__()
# For each `signal%i.bin`, there should be an `info%i.xml`
for signalfile in self.files_by_type['.bin']:
assert 'signal' in signalfile, 'Unknown file "%s"' % signalfile
matches = self._re_nu.search(signalfile)
assert matches is not None, f"""
signal file {signalfile} has invalid file name"""
bin_num = int(matches.group())
assert self.filename('info%s' % bin_num) in self, f"""
No info found [{self.info(bin_num)}]"""
def __str__(self) -> str:
ans = "---\n"
ans += '# .mff directory "%s/"\n' % self._mffname
ans += "---\n"
ans += '## List of files\n'
for ext, files in self.files_by_type.items():
ans += "\n### Files of type %s\n\n" % ext
for filename in files:
ans += " * %s\n" % (filename+ext)
ans += "---"
return ans
class MFFDirectory(MFFDirBase):
"""system-level .mff directory"""
def listdir(self) -> List[str]:
return listdir(self._mffname)
def filepointer(self, basename: str) -> IO[bytes]:
return open(self.filename(basename), 'rb')
def filename(self, basename: str) -> str:
for ext, files in self.files_by_type.items():
if basename in files:
return join(self._mffname, basename) + ext
else:
raise FileNotFoundError(
f"No file with basename {basename} "
f"in directory {super().__str__()}.")
def __contains__(self, filename: str) -> bool:
return exists(filename)
class ZippedMFFDirectory(MFFDirBase):
"""zipped .mff directory
Note: Compression on the zip file has to be 0, i.e. `ZIP_STORE`.
Create the zip file like
```bash
$ zip -Z store -r -j zipped_example.mff ./example.mff
```
"""
def __init__(self, filename: str):
self.root = zipfile.ZipFile(filename)
super().__init__(filename)
def __del__(self):
self.root.close()
def listdir(self) -> List[str]:
return self.root.namelist()
def filepointer(self, basename: str) -> IO[bytes]:
# type `FilePart` implements all methods necessary for `IO[bytes]`
return self.root.open(self.filename(basename)) # type: ignore
def filename(self, basename: str) -> str:
for ext, files in self.files_by_type.items():
if basename in files:
return basename + ext
else:
raise ValueError(f"No file with basename {basename} \
in directory {super().__str__()}.")
def __contains__(self, filename: str) -> bool:
return filename in self.listdir()
def get_directory(filename: str) -> MFFDirBase:
"""return either a system-level or a zipped .mff directory"""
assert exists(filename), f"'{filename}' does not exist"
if isdir(filename):
return MFFDirectory(filename)
elif zipfile.is_zipfile(filename):
return ZippedMFFDirectory(filename)
else:
raise ValueError(f"'{filename}' is likely a corrupted zip file")
| 33.241935
| 79
| 0.614427
|
63efc6273bc06bb3c5c2bea8ef708ea5104e20d1
| 15,041
|
py
|
Python
|
python/ccxt/__init__.py
|
utila/ccxt
|
502b89aa3f54b3363ad628f1ab5e30ea80d9b3b0
|
[
"MIT"
] | null | null | null |
python/ccxt/__init__.py
|
utila/ccxt
|
502b89aa3f54b3363ad628f1ab5e30ea80d9b3b0
|
[
"MIT"
] | null | null | null |
python/ccxt/__init__.py
|
utila/ccxt
|
502b89aa3f54b3363ad628f1ab5e30ea80d9b3b0
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""CCXT: CryptoCurrency eXchange Trading Library"""
# MIT License
# Copyright (c) 2017 Igor Kroitor
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ----------------------------------------------------------------------------
__version__ = '1.21.83'
# ----------------------------------------------------------------------------
from ccxt.base.exchange import Exchange # noqa: F401
from ccxt.base.decimal_to_precision import decimal_to_precision # noqa: F401
from ccxt.base.decimal_to_precision import TRUNCATE # noqa: F401
from ccxt.base.decimal_to_precision import ROUND # noqa: F401
from ccxt.base.decimal_to_precision import DECIMAL_PLACES # noqa: F401
from ccxt.base.decimal_to_precision import SIGNIFICANT_DIGITS # noqa: F401
from ccxt.base.decimal_to_precision import TICK_SIZE # noqa: F401
from ccxt.base.decimal_to_precision import NO_PADDING # noqa: F401
from ccxt.base.decimal_to_precision import PAD_WITH_ZERO # noqa: F401
from ccxt.base import errors # noqa: F401
from ccxt.base.errors import BaseError # noqa: F401
from ccxt.base.errors import ExchangeError # noqa: F401
from ccxt.base.errors import NotSupported # noqa: F401
from ccxt.base.errors import AuthenticationError # noqa: F401
from ccxt.base.errors import PermissionDenied # noqa: F401
from ccxt.base.errors import AccountSuspended # noqa: F401
from ccxt.base.errors import InvalidNonce # noqa: F401
from ccxt.base.errors import InsufficientFunds # noqa: F401
from ccxt.base.errors import InvalidOrder # noqa: F401
from ccxt.base.errors import OrderNotFound # noqa: F401
from ccxt.base.errors import OrderNotCached # noqa: F401
from ccxt.base.errors import DuplicateOrderId # noqa: F401
from ccxt.base.errors import CancelPending # noqa: F401
from ccxt.base.errors import NetworkError # noqa: F401
from ccxt.base.errors import DDoSProtection # noqa: F401
from ccxt.base.errors import RateLimitExceeded # noqa: F401
from ccxt.base.errors import RequestTimeout # noqa: F401
from ccxt.base.errors import ExchangeNotAvailable # noqa: F401
from ccxt.base.errors import OnMaintenance # noqa: F401
from ccxt.base.errors import InvalidAddress # noqa: F401
from ccxt.base.errors import AddressPending # noqa: F401
from ccxt.base.errors import ArgumentsRequired # noqa: F401
from ccxt.base.errors import BadRequest # noqa: F401
from ccxt.base.errors import BadResponse # noqa: F401
from ccxt.base.errors import NullResponse # noqa: F401
from ccxt.base.errors import OrderImmediatelyFillable # noqa: F401
from ccxt.base.errors import OrderNotFillable # noqa: F401
from ccxt._1btcxe import _1btcxe # noqa: F401
from ccxt.acx import acx # noqa: F401
from ccxt.adara import adara # noqa: F401
from ccxt.allcoin import allcoin # noqa: F401
from ccxt.anxpro import anxpro # noqa: F401
from ccxt.bcex import bcex # noqa: F401
from ccxt.bequant import bequant # noqa: F401
from ccxt.bibox import bibox # noqa: F401
from ccxt.bigone import bigone # noqa: F401
from ccxt.binance import binance # noqa: F401
from ccxt.binanceje import binanceje # noqa: F401
from ccxt.binanceus import binanceus # noqa: F401
from ccxt.bit2c import bit2c # noqa: F401
from ccxt.bitbank import bitbank # noqa: F401
from ccxt.bitbay import bitbay # noqa: F401
from ccxt.bitfinex import bitfinex # noqa: F401
from ccxt.bitfinex2 import bitfinex2 # noqa: F401
from ccxt.bitflyer import bitflyer # noqa: F401
from ccxt.bitforex import bitforex # noqa: F401
from ccxt.bithumb import bithumb # noqa: F401
from ccxt.bitkk import bitkk # noqa: F401
from ccxt.bitlish import bitlish # noqa: F401
from ccxt.bitmart import bitmart # noqa: F401
from ccxt.bitmax import bitmax # noqa: F401
from ccxt.bitmex import bitmex # noqa: F401
from ccxt.bitso import bitso # noqa: F401
from ccxt.bitstamp import bitstamp # noqa: F401
from ccxt.bitstamp1 import bitstamp1 # noqa: F401
from ccxt.bittrex import bittrex # noqa: F401
from ccxt.bitz import bitz # noqa: F401
from ccxt.bl3p import bl3p # noqa: F401
from ccxt.bleutrade import bleutrade # noqa: F401
from ccxt.braziliex import braziliex # noqa: F401
from ccxt.btcalpha import btcalpha # noqa: F401
from ccxt.btcbox import btcbox # noqa: F401
from ccxt.btcchina import btcchina # noqa: F401
from ccxt.btcmarkets import btcmarkets # noqa: F401
from ccxt.btctradeim import btctradeim # noqa: F401
from ccxt.btctradeua import btctradeua # noqa: F401
from ccxt.btcturk import btcturk # noqa: F401
from ccxt.buda import buda # noqa: F401
from ccxt.bw import bw # noqa: F401
from ccxt.bytetrade import bytetrade # noqa: F401
from ccxt.cex import cex # noqa: F401
from ccxt.chilebit import chilebit # noqa: F401
from ccxt.cobinhood import cobinhood # noqa: F401
from ccxt.coinbase import coinbase # noqa: F401
from ccxt.coinbaseprime import coinbaseprime # noqa: F401
from ccxt.coinbasepro import coinbasepro # noqa: F401
from ccxt.coincheck import coincheck # noqa: F401
from ccxt.coinegg import coinegg # noqa: F401
from ccxt.coinex import coinex # noqa: F401
from ccxt.coinfalcon import coinfalcon # noqa: F401
from ccxt.coinfloor import coinfloor # noqa: F401
from ccxt.coingi import coingi # noqa: F401
from ccxt.coinmarketcap import coinmarketcap # noqa: F401
from ccxt.coinmate import coinmate # noqa: F401
from ccxt.coinone import coinone # noqa: F401
from ccxt.coinspot import coinspot # noqa: F401
from ccxt.coolcoin import coolcoin # noqa: F401
from ccxt.coss import coss # noqa: F401
from ccxt.crex24 import crex24 # noqa: F401
from ccxt.deribit import deribit # noqa: F401
from ccxt.deribit2 import deribit2 # noqa: F401
from ccxt.digifinex import digifinex # noqa: F401
from ccxt.dsx import dsx # noqa: F401
from ccxt.exmo import exmo # noqa: F401
from ccxt.exx import exx # noqa: F401
from ccxt.fcoin import fcoin # noqa: F401
from ccxt.fcoinjp import fcoinjp # noqa: F401
from ccxt.flowbtc import flowbtc # noqa: F401
from ccxt.foxbit import foxbit # noqa: F401
from ccxt.ftx import ftx # noqa: F401
from ccxt.fybse import fybse # noqa: F401
from ccxt.gateio import gateio # noqa: F401
from ccxt.gemini import gemini # noqa: F401
from ccxt.hitbtc import hitbtc # noqa: F401
from ccxt.hitbtc2 import hitbtc2 # noqa: F401
from ccxt.huobipro import huobipro # noqa: F401
from ccxt.huobiru import huobiru # noqa: F401
from ccxt.ice3x import ice3x # noqa: F401
from ccxt.idex import idex # noqa: F401
from ccxt.independentreserve import independentreserve # noqa: F401
from ccxt.indodax import indodax # noqa: F401
from ccxt.itbit import itbit # noqa: F401
from ccxt.kkex import kkex # noqa: F401
from ccxt.kraken import kraken # noqa: F401
from ccxt.kucoin import kucoin # noqa: F401
from ccxt.kuna import kuna # noqa: F401
from ccxt.lakebtc import lakebtc # noqa: F401
from ccxt.latoken import latoken # noqa: F401
from ccxt.lbank import lbank # noqa: F401
from ccxt.liquid import liquid # noqa: F401
from ccxt.livecoin import livecoin # noqa: F401
from ccxt.luno import luno # noqa: F401
from ccxt.lykke import lykke # noqa: F401
from ccxt.mercado import mercado # noqa: F401
from ccxt.mixcoins import mixcoins # noqa: F401
from ccxt.oceanex import oceanex # noqa: F401
from ccxt.okcoincny import okcoincny # noqa: F401
from ccxt.okcoinusd import okcoinusd # noqa: F401
from ccxt.okex import okex # noqa: F401
from ccxt.okex3 import okex3 # noqa: F401
from ccxt.paymium import paymium # noqa: F401
from ccxt.poloniex import poloniex # noqa: F401
from ccxt.rightbtc import rightbtc # noqa: F401
from ccxt.southxchange import southxchange # noqa: F401
from ccxt.stex import stex # noqa: F401
from ccxt.stronghold import stronghold # noqa: F401
from ccxt.surbitcoin import surbitcoin # noqa: F401
from ccxt.theocean import theocean # noqa: F401
from ccxt.therock import therock # noqa: F401
from ccxt.tidebit import tidebit # noqa: F401
from ccxt.tidex import tidex # noqa: F401
from ccxt.timex import timex # noqa: F401
from ccxt.upbit import upbit # noqa: F401
from ccxt.vaultoro import vaultoro # noqa: F401
from ccxt.vbtc import vbtc # noqa: F401
from ccxt.whitebit import whitebit # noqa: F401
from ccxt.xbtce import xbtce # noqa: F401
from ccxt.yobit import yobit # noqa: F401
from ccxt.zaif import zaif # noqa: F401
from ccxt.zb import zb # noqa: F401
exchanges = [
'_1btcxe',
'acx',
'adara',
'allcoin',
'anxpro',
'bcex',
'bequant',
'bibox',
'bigone',
'binance',
'binanceje',
'binanceus',
'bit2c',
'bitbank',
'bitbay',
'bitfinex',
'bitfinex2',
'bitflyer',
'bitforex',
'bithumb',
'bitkk',
'bitlish',
'bitmart',
'bitmax',
'bitmex',
'bitso',
'bitstamp',
'bitstamp1',
'bittrex',
'bitz',
'bl3p',
'bleutrade',
'braziliex',
'btcalpha',
'btcbox',
'btcchina',
'btcmarkets',
'btctradeim',
'btctradeua',
'btcturk',
'buda',
'bw',
'bytetrade',
'cex',
'chilebit',
'cobinhood',
'coinbase',
'coinbaseprime',
'coinbasepro',
'coincheck',
'coinegg',
'coinex',
'coinfalcon',
'coinfloor',
'coingi',
'coinmarketcap',
'coinmate',
'coinone',
'coinspot',
'coolcoin',
'coss',
'crex24',
'deribit',
'deribit2',
'digifinex',
'dsx',
'exmo',
'exx',
'fcoin',
'fcoinjp',
'flowbtc',
'foxbit',
'ftx',
'fybse',
'gateio',
'gemini',
'hitbtc',
'hitbtc2',
'huobipro',
'huobiru',
'ice3x',
'idex',
'independentreserve',
'indodax',
'itbit',
'kkex',
'kraken',
'kucoin',
'kuna',
'lakebtc',
'latoken',
'lbank',
'liquid',
'livecoin',
'luno',
'lykke',
'mercado',
'mixcoins',
'oceanex',
'okcoincny',
'okcoinusd',
'okex',
'okex3',
'paymium',
'poloniex',
'rightbtc',
'southxchange',
'stex',
'stronghold',
'surbitcoin',
'theocean',
'therock',
'tidebit',
'tidex',
'timex',
'upbit',
'vaultoro',
'vbtc',
'whitebit',
'xbtce',
'yobit',
'zaif',
'zb',
]
base = [
'Exchange',
'exchanges',
'decimal_to_precision',
]
__all__ = base + errors.__all__ + exchanges
| 45.996942
| 80
| 0.53281
|
34466876f8514d012422e065c962742323b5842f
| 2,022
|
py
|
Python
|
src/quickviz.py
|
azeey/quickviz
|
b3b78d8937fb68adbb7cf24d8b001774d1185637
|
[
"MIT"
] | 2
|
2015-10-31T14:55:51.000Z
|
2021-06-09T16:29:11.000Z
|
src/quickviz.py
|
azeey/quickviz
|
b3b78d8937fb68adbb7cf24d8b001774d1185637
|
[
"MIT"
] | null | null | null |
src/quickviz.py
|
azeey/quickviz
|
b3b78d8937fb68adbb7cf24d8b001774d1185637
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
import serial
from pyqtgraph.Qt import QtGui, QtCore
import numpy as np
import pyqtgraph as pg
from collections import deque
app = QtGui.QApplication([])
port, baud = '/dev/ttyACM0', 1000000
ser = serial.Serial(port, baud)
win = pg.GraphicsWindow(title="Basic plotting examples")
win.resize(1000,600)
win.setWindowTitle('pyqtgraph example: Plotting')
#BUF_SIZE = 500
BUF_SIZE = 5000
px = win.addPlot(title="Axis X")
xcurve = px.plot(pen='y')
win.nextRow()
py = win.addPlot(title="Axis Y")
ycurve = py.plot(pen='y')
win.nextRow()
pz = win.addPlot(title="Axis Z")
zcurve = pz.plot(pen='y')
xdata = deque(np.zeros(BUF_SIZE), maxlen=BUF_SIZE)
ydata = deque(np.zeros(BUF_SIZE), maxlen=BUF_SIZE)
zdata = deque(np.zeros(BUF_SIZE), maxlen=BUF_SIZE)
tstamp = deque(np.arange(BUF_SIZE), maxlen=BUF_SIZE)
px.enableAutoRange('y', True)
py.enableAutoRange('y', True)
pz.enableAutoRange('y', True)
#px.setYRange(-28,28)
#py.setYRange(-28,28)
#pz.setYRange(-28,28)
px.setMouseEnabled(x=False, y=True)
py.setMouseEnabled(x=False, y=True)
pz.setMouseEnabled(x=False, y=True)
def read_data():
raw = ser.readline().strip()
try:
#gx,gy,gz,ax,ay,az = map(lambda x: int(x.strip()), raw.split())
new_data = map(lambda x: int(x.strip()), raw.split())
gx,gy,gz = new_data[0], new_data[1], new_data[3]
xdata.append(gx)
ydata.append(gy)
zdata.append(gz)
tstamp.append(tstamp[-1]+1)
except KeyboardInterrupt:
raise
except StandardError as e:
print e
print raw
def update():
global curve, data
xcurve.setData(tstamp, xdata)
ycurve.setData(tstamp, ydata)
zcurve.setData(tstamp, zdata)
timer = QtCore.QTimer()
timer.timeout.connect(update)
timer.start(50)
timer2 = QtCore.QTimer()
timer2.timeout.connect(read_data)
timer2.start(1)
## Start Qt event loop unless running in interactive mode or using pyside.
if __name__ == '__main__':
QtGui.QApplication.instance().exec_()
| 21.978261
| 74
| 0.687933
|
bcf00f65bcb6019d1dceda066d01fbaba0cfcfe6
| 17,583
|
py
|
Python
|
slicematrixIO/manifolds.py
|
tynano/slicematrixIO-python
|
d53070e7d55503625f278c99c35475cf42bfcd0f
|
[
"MIT"
] | 7
|
2017-02-28T00:47:14.000Z
|
2022-03-11T15:59:21.000Z
|
slicematrixIO/manifolds.py
|
tynano/slicematrixIO-python
|
d53070e7d55503625f278c99c35475cf42bfcd0f
|
[
"MIT"
] | null | null | null |
slicematrixIO/manifolds.py
|
tynano/slicematrixIO-python
|
d53070e7d55503625f278c99c35475cf42bfcd0f
|
[
"MIT"
] | 4
|
2018-02-01T18:52:38.000Z
|
2020-12-07T11:09:07.000Z
|
from core import BasePipeline
from utils import rando_name
from uuid import uuid4
import pandas as pd
#################################################################################################################################################################
class KernelPCAPipeline(BasePipeline):
def __init__(self, name, D = 2, kernel = "linear", alpha = 1.0, invert = False, kernel_params = {}, client = None):
params = {"D": D,
"kernel": kernel,
"alpha": alpha,
"invert": invert,
"kernel_params": kernel_params}
BasePipeline.__init__(self, name, "raw_kpca", client, params)
def run(self, dataset, model):
return BasePipeline.run(self, dataset = dataset, model = model)
class KernelPCA():
def __init__(self, dataset = None, name = None, pipeline = None, D = 2, kernel = "linear", alpha = 1.0, invert = False, kernel_params = {}, client = None):
self.client = client
self.type = "raw_kpca"
if dataset is not None:
self.__full_init__(dataset, name, pipeline, D, kernel, alpha, invert, kernel_params, client)
else:
self.__lazy_init__(name)
# full construction, i.e. start from zero and create it all...
def __full_init__(self, dataset, name = None, pipeline = None, D = 2, kernel = "linear", alpha = 1.0, invert = False, kernel_params = {}, client = None):
if name == None:
name = rando_name()
self.name = name
self.dataset = dataset
self.pipeline = pipeline
self.D = D
self.kernel = kernel
self.alpha = alpha
self.invert = invert
self.kernel_params = kernel_params
if self.pipeline == None:
pipeline_name = rando_name()
self.pipeline = KernelPCAPipeline(pipeline_name, D, kernel, alpha, invert, kernel_params, client)
self.response = self.pipeline.run(self.dataset, self.name)
try:
# model will be key if success
model = self.response['model']
self.name = model.split("/")[-1]
except:
# something went wrong creating the model
raise StandardError(self.response)
# lazy loading for already persisted models
def __lazy_init__(self, model_name):
self.name = model_name
def inverse_embedding(self, nodes = True):
nodes = self.nodes()
response = self.client.call_model(model = self.name,
type = self.type,
method = "inverse_embedding",
extra_params = {})
try:
return pd.DataFrame(response['inverse_embedding'], index = nodes)
except:
raise StandardError(response)
def embedding(self, nodes = True):
nodes = self.nodes()
response = self.client.call_model(model = self.name,
type = self.type,
method = "embedding",
extra_params = {})
try:
return pd.DataFrame(response['embedding'], index = nodes)
except:
raise StandardError(response)
def nodes(self):
response = self.client.call_model(model = self.name,
type = self.type,
method = "nodes",
extra_params = {})
try:
return response['nodes']
except:
raise StandardError(response)
def meta(self):
response = self.client.call_model(model = self.name,
type = self.type,
method = "meta",
extra_params = {})
try:
return response['meta']
except:
raise StandardError(response)
def feature_names(self):
response = self.client.call_model(model = self.name,
type = self.type,
method = "feature_names",
extra_params = {})
try:
return response['feature_names']
except:
raise StandardError(response)
#################################################################################################################################################################
class LocalLinearEmbedderPipeline(BasePipeline):
def __init__(self, name, D = 2, K = 3, method = "standard", client = None):
params = {"D": D,
"k": K,
"method": method}
BasePipeline.__init__(self, name, "raw_lle", client, params)
def run(self, dataset, model):
return BasePipeline.run(self, dataset = dataset, model = model)
class LocalLinearEmbedder():
def __init__(self, dataset = None, name = None, pipeline = None, D = 2, K = 3, method = "standard", client = None):
self.client = client
self.type = "raw_lle"
if dataset is not None:
self.__full_init__(dataset, name, pipeline, D, K, method, client)
else:
self.__lazy_init__(name)
def __full_init__(self, dataset, name = None, pipeline = None, D = 2, k = 3, method = "standard", client = None):
if name == None:
name = rando_name()
self.name = name
self.dataset = dataset
self.pipeline = pipeline
self.D = D
self.k = k
self.method = method
if self.pipeline == None:
pipeline_name = rando_name()
self.pipeline = LocalLinearEmbedderPipeline(pipeline_name, D, k, method, client)
self.response = self.pipeline.run(self.dataset, self.name)
try:
# model will be key if success
model = self.response['model']
self.name = model.split("/")[-1]
except:
# something went wrong creating the model
raise StandardError(self.response)
# lazy loading for already persisted models
def __lazy_init__(self, model_name):
self.name = model_name
def embedding(self, nodes = True):
nodes = self.nodes()
response = self.client.call_model(model = self.name,
type = self.type,
method = "embedding",
extra_params = {})
try:
return pd.DataFrame(response['embedding'], index = nodes)
except:
raise StandardError(response)
def nodes(self):
response = self.client.call_model(model = self.name,
type = self.type,
method = "nodes",
extra_params = {})
try:
return response['nodes']
except:
raise StandardError(response)
def recon_error(self):
response = self.client.call_model(model = self.name,
type = self.type,
method = "recon_error",
extra_params = {})
try:
return response['recon_err']
except:
raise StandardError(response)
def meta(self):
response = self.client.call_model(model = self.name,
type = self.type,
method = "meta",
extra_params = {})
try:
return response['meta']
except:
raise StandardError(response)
def feature_names(self):
response = self.client.call_model(model = self.name,
type = self.type,
method = "feature_names",
extra_params = {})
try:
return response['feature_names']
except:
raise StandardError(response)
#################################################################################################################################################################
class LaplacianEigenmapperPipeline(BasePipeline):
def __init__(self, name, D = 2, affinity = "knn", K = 5, gamma = 1.0, client = None):
params = {"D": D,
"K": K,
"affinity": affinity,
"gamma": gamma}
BasePipeline.__init__(self, name, "raw_laplacian_eigenmap", client, params)
def run(self, dataset, model):
return BasePipeline.run(self, dataset = dataset, model = model)
class LaplacianEigenmapper():
def __init__(self, dataset = None, name = None, pipeline = None, D = 2, affinity = "knn", K = 5, gamma = 1.0, client = None):
self.client = client
self.type = "raw_laplacian_eigenmap"
if dataset is not None:
self.__full_init__(dataset, name, pipeline, D, affinity, K, gamma, client)
else:
self.__lazy_init__(name)
def __full_init__(self, dataset, name = None, pipeline = None, D = 2, affinity = "knn", K = 5, gamma = 1.0, client = None):
if name == None:
name = rando_name()
self.name = name
self.dataset = dataset
self.pipeline = pipeline
self.D = D
self.K = K
self.affinity = affinity
if self.pipeline == None:
pipeline_name = rando_name()
self.pipeline = LaplacianEigenmapperPipeline(pipeline_name, D, affinity, K, gamma, client)
self.response = self.pipeline.run(self.dataset, self.name)
try:
# model will be key if success
model = self.response['model']
self.name = model.split("/")[-1]
except:
# something went wrong creating the model
raise StandardError(self.response)
# lazy loading for already persisted models
def __lazy_init__(self, model_name):
self.name = model_name
def embedding(self, nodes = True):
nodes = self.nodes()
response = self.client.call_model(model = self.name,
type = self.type,
method = "embedding",
extra_params = {})
try:
return pd.DataFrame(response['embedding'], index = nodes)
except:
raise StandardError(response)
def nodes(self):
response = self.client.call_model(model = self.name,
type = self.type,
method = "nodes",
extra_params = {})
try:
return response['nodes']
except:
raise StandardError(response)
def meta(self):
response = self.client.call_model(model = self.name,
type = self.type,
method = "meta",
extra_params = {})
try:
return response['meta']
except:
raise StandardError(response)
def feature_names(self):
response = self.client.call_model(model = self.name,
type = self.type,
method = "feature_names",
extra_params = {})
try:
return response['feature_names']
except:
raise StandardError(response)
def affinity_matrix(self):
response = self.client.call_model(model = self.name,
type = self.type,
method = "affinity_matrix",
extra_params = {})
try:
return response['affinity_matrix']
except:
raise StandardError(response)
#################################################################################################################################################################
class IsomapPipeline(BasePipeline):
def __init__(self, name, D = 2, K = 3, client = None):
params = {"D": D,
"K": K}
BasePipeline.__init__(self, name, "raw_isomap", client, params)
def run(self, dataset, model):
return BasePipeline.run(self, dataset = dataset, model = model)
class Isomap():
def __init__(self, dataset, name = None, pipeline = None, D = 2, K = 3, client = None):
self.client = client
self.type = "raw_isomap"
if dataset is not None:
self.__full_init__(dataset.T, name, pipeline, D, K, client)
else:
self.__lazy_init__(name)
def __full_init__(self, dataset, name = None, pipeline = None, D = 2, K = 3, client = None):
if name == None:
name = rando_name()
self.name = name
self.dataset = dataset
self.pipeline = pipeline
self.D = D
self.K = K
self.type = "raw_isomap"
if self.pipeline == None:
pipeline_name = rando_name()
self.pipeline = IsomapPipeline(pipeline_name, D, K, client)
self.response = self.pipeline.run(self.dataset, self.name)
try:
# model will be key if success
model = self.response['model']
self.name = model.split("/")[-1]
except:
# something went wrong creating the model
raise StandardError(self.response)
# lazy loading for already persisted models
def __lazy_init__(self, model_name):
self.name = model_name
def embedding(self, nodes = True):
nodes = self.nodes()
response = self.client.call_model(model = self.name,
type = self.type,
method = "embedding",
extra_params = {})
try:
return pd.DataFrame(response['embedding'], index = nodes)
except:
raise StandardError(response)
def nodes(self):
response = self.client.call_model(model = self.name,
type = self.type,
method = "nodes",
extra_params = {})
try:
return response['nodes']
except:
raise StandardError(response)
def recon_error(self):
response = self.client.call_model(model = self.name,
type = self.type,
method = "recon_error",
extra_params = {})
try:
return response['recon_error']
except:
raise StandardError(response)
def rankLinks(self):
response = self.client.call_model(model = self.name,
type = self.type,
method = "rankLinks",
extra_params = {})
try:
return response['rankLinks']
except:
raise StandardError(response)
def edges(self):
response = self.client.call_model(model = self.name,
type = self.type,
method = "edges",
extra_params = {})
try:
return response['edges']
except:
raise StandardError(response)
def rankNodes(self, statistic = "closeness_centrality"):
extra_params = {"statistic": statistic}
response = self.client.call_model(model = self.name,
type = self.type,
method = "rankNodes",
extra_params = extra_params)
try:
return pd.DataFrame(response['rankNodes'], index = [statistic]).T.sort(columns = statistic)
except:
raise StandardError(response)
def neighborhood(self, node):
extra_params = {"node": node}
response = self.client.call_model(model = self.name,
type = self.type,
method = "neighborhood",
extra_params = extra_params)
try:
return response['neighborhood']
except:
raise StandardError(response)
def search(self, point):
extra_params = {"point": point}
response = self.client.call_model(model = self.name,
type = self.type,
method = "search",
extra_params = extra_params)
try:
return response['search']
except:
raise StandardError(response)
| 40.42069
| 161
| 0.469772
|
e0465243aab0fbb9f381bb1378894d2c2c58b184
| 5,644
|
py
|
Python
|
drugpy/fetch_similar/blast.py
|
labimm/pymol-labimm
|
7df8e82fbcd68718ed14627427531b4b87cec07d
|
[
"MIT"
] | 2
|
2021-09-16T13:46:30.000Z
|
2021-10-05T18:21:05.000Z
|
drugpy/fetch_similar/blast.py
|
labimm/pymol-labimm
|
7df8e82fbcd68718ed14627427531b4b87cec07d
|
[
"MIT"
] | null | null | null |
drugpy/fetch_similar/blast.py
|
labimm/pymol-labimm
|
7df8e82fbcd68718ed14627427531b4b87cec07d
|
[
"MIT"
] | null | null | null |
import webbrowser
from ftplib import FTP
from functools import lru_cache
import requests
from pymol import cmd as pm
from ..commons import rscript, PLUGIN_DATA_DIR
@pm.extend
def fetch_similar_blast_update():
"""
The cluster database needs to be updated before the first use of the
fetch_similar_blast feature. The database ftp://resources.rcsb.org/sequence/clusters/
is updated weekly, for new data is prudent to run this command weekly.
"""
rscb_server = FTP("resources.rcsb.org")
rscb_server.login()
rscb_server.cwd("/sequence/clusters/")
for cluster_fname in [
"bc-100.out",
"bc-95.out",
"bc-90.out",
"bc-70.out",
"bc-50.out",
"bc-40.out",
"bc-30.out",
]:
with open(PLUGIN_DATA_DIR + "/" + cluster_fname, "wb") as cluster_file:
rscb_server.retrbinary(
"RETR " + cluster_fname, cluster_file.write, blocksize=262144
)
def find_similar_chain_ids(chain_id, threshold):
"""Fetch structure similar chain ids from RCSB PDB.
chain_id - reference chain id
threshold - similarity threshold
"""
cluster_fname = f"bc-{threshold}.out"
with open(PLUGIN_DATA_DIR + "/" + cluster_fname) as cluster_file:
for cluster in cluster_file:
if chain_id.upper() in cluster.upper():
break
else:
return []
sim_chain_ids = []
for chain_id in cluster.split():
pdb, chain = chain_id.split("_")
sim_chain_ids.append((pdb.upper(), chain.upper()))
return sim_chain_ids
@lru_cache()
def get_resolution(pdb_id):
"""
Get the resolution for a PDB id, or None case it doesn't have.
"""
ret = requests.post(
"https://data.rcsb.org/graphql",
json={
"query": f"""
{{
entry(entry_id: "{pdb_id}") {{
pdbx_vrpt_summary {{
PDB_resolution
}}
}}
}}
"""
},
)
data = ret.json()
resol = data["data"]["entry"]["pdbx_vrpt_summary"]["PDB_resolution"]
return resol
def plot_hierarquical_cluster(chain_ids):
chain_ids = ",".join(map(repr, chain_ids))
out, success = rscript(
f"""
library(bio3d)
ids <- c({chain_ids})
files <- get.pdb(ids, path="pdbs", split=TRUE)
pdbs <- pdbaln(files)
cores <- core.find(pdbs)
xyz <- pdbfit(pdbs, inds=cores)
pc <- pca(xyz, rm.gaps=TRUE)
d <- dist(pc$z[, 1:2])
hc <- hclust(d)
hclustplot(hc, k=1, labels = ids)
"""
)
webbrowser.open("Rplots.pdf")
print(out)
@pm.extend
def fetch_similar_blast(
chain_id,
similarity=95,
ligand=None,
dist=5,
compounds="organic or inorganic",
prosthetic_groups="HEM FAD NAP NDP ADP FMN",
max_resolution=None,
max_structures=50,
):
"""
Fetch sequence similar structures from RCSB PDB and optionally keep only
apo structures. Apo are evaluated respective to a choosen ligand on the
reference chain.
On the first use update the database with the command `update_cluster_data`.
Update the database weekly.
OPTIONS
chain_id Reference structure chain id.
similarity Sequence similarity threshold (one of the available
from RCSB PDB).
ligand Reference ligand PDB id.
dist Distance cut-off around reference ligand for apo
evaluation.
compounds Selection that should be considered ligands upon apo
computation. Only used when ligand is given.
prothestic_groups List of ligands to be ignored when evaluating apo.
max_resolution Fetch only X-ray structures with up to such
resolution.
max_structures Fetch at most n structures. 0 for all structures.
EXAMPLES
fetch_similar_blast 2XY9_A, 100
fetch_similar_blast 2XY9_A, 95, 3ES, 3, organic
fetch_similar_blast 6Y2F_A, max_structures=0
SEE ALSO
update_cluster_data
fetch_similar_shape3d
"""
chain_id = chain_id.upper()
max_structures = int(max_structures)
pm.fetch(chain_id, chain_id)
sims = []
similars = find_similar_chain_ids(chain_id, similarity)
cont = 0
for sim_pdb, sim_chain in similars:
if max_structures != 0 and cont >= max_structures:
break
sim_obj = f"{sim_pdb}_{sim_chain}"
if sim_obj.upper() == chain_id.upper():
continue
pm.fetch(sim_obj, **{"async": 0})
pm.align(sim_obj, chain_id)
# Check the resolution
resol = None
if max_resolution:
resol = get_resolution(sim_pdb)
if not resol or resol > max_resolution:
pm.delete(sim_obj)
continue
# Check nearby ligands
if ligand:
model = pm.get_model(
f"({sim_obj} and ({compounds}))"
f" within {dist} of"
f"({chain_id} and (resn {ligand}))"
)
resns = set(a.resn for a in model.atom)
is_apo = True
for resn in resns:
if resn not in prosthetic_groups.split():
is_apo = False
break
if not is_apo:
pm.delete(sim_obj)
continue
cont += 1
sims.append((sim_obj, sim_chain, sim_pdb, resol))
plot_hierarquical_cluster([chain_id] + [s[0] for s in sims])
return sims
| 29.243523
| 89
| 0.585046
|
99507b799c6b0c9bdbd9faa162e34c5b1c490ad5
| 117
|
py
|
Python
|
examples/keyboard/drivers/drivers.py
|
Breq16/vibrance_ctrl
|
93247f686b4e5e42d5f36b9d592fe9cd8322069e
|
[
"MIT"
] | null | null | null |
examples/keyboard/drivers/drivers.py
|
Breq16/vibrance_ctrl
|
93247f686b4e5e42d5f36b9d592fe9cd8322069e
|
[
"MIT"
] | null | null | null |
examples/keyboard/drivers/drivers.py
|
Breq16/vibrance_ctrl
|
93247f686b4e5e42d5f36b9d592fe9cd8322069e
|
[
"MIT"
] | null | null | null |
import vibrance.driver.pygame_if
drivers = []
drivers.append(vibrance.driver.pygame_if.PyGameDriver("PyGame Demo"))
| 23.4
| 69
| 0.803419
|
1b4889e85d60a37929140c933388e504274338b5
| 3,542
|
py
|
Python
|
bindings/python/ensmallen/datasets/string/rhizobiumleguminosarumviciae248.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 5
|
2021-02-17T00:44:45.000Z
|
2021-08-09T16:41:47.000Z
|
bindings/python/ensmallen/datasets/string/rhizobiumleguminosarumviciae248.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 18
|
2021-01-07T16:47:39.000Z
|
2021-08-12T21:51:32.000Z
|
bindings/python/ensmallen/datasets/string/rhizobiumleguminosarumviciae248.py
|
AnacletoLAB/ensmallen
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 3
|
2021-01-14T02:20:59.000Z
|
2021-08-04T19:09:52.000Z
|
"""
This file offers the methods to automatically retrieve the graph Rhizobium leguminosarum viciae 248.
The graph is automatically retrieved from the STRING repository.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph # pylint: disable=import-error
def RhizobiumLeguminosarumViciae248(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
"""Return new instance of the Rhizobium leguminosarum viciae 248 graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False
Wether to load the graph as directed or undirected.
By default false.
preprocess: bool = True
Whether to preprocess the graph to be loaded in
optimal time and memory.
load_nodes: bool = True,
Whether to load the nodes vocabulary or treat the nodes
simply as a numeric range.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache: bool = True
Whether to use cache, i.e. download files only once
and preprocess them only once.
cache_path: str = "graphs"
Where to store the downloaded graphs.
version: str = "links.v11.5"
The version of the graph to retrieve.
The available versions are:
- homology.v11.5
- physical.links.v11.5
- links.v11.5
additional_graph_kwargs: Dict
Additional graph kwargs.
Returns
-----------------------
Instace of Rhizobium leguminosarum viciae 248 graph.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
return AutomaticallyRetrievedGraph(
graph_name="RhizobiumLeguminosarumViciae248",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 33.733333
| 223
| 0.682383
|
eb493963ab8d503cbb0bc6355c7ee8ed8768cb0d
| 1,384
|
py
|
Python
|
sample_code/Python_NLP/nlp-5-2-pos_concordance.py
|
ice-melt/python-lib
|
345e34fff7386d91acbb03a01fd4127c5dfed037
|
[
"MIT"
] | null | null | null |
sample_code/Python_NLP/nlp-5-2-pos_concordance.py
|
ice-melt/python-lib
|
345e34fff7386d91acbb03a01fd4127c5dfed037
|
[
"MIT"
] | null | null | null |
sample_code/Python_NLP/nlp-5-2-pos_concordance.py
|
ice-melt/python-lib
|
345e34fff7386d91acbb03a01fd4127c5dfed037
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
@Author : ice-melt
@File : nlp-5-2-pos_concordance.py
@Time : 2019/4/19 15:03
@Version : 1.0
@Desc : None
"""
import nltk
from nltk.corpus import brown
# 研究一个词(比如 often ),看看它是如何使用,可以试着观察often后面的词
brown_learned_text = brown.words(categories='learned')
print(sorted(set(b for (a, b) in nltk.bigrams(brown_learned_text) if a == 'often')))
# 查看跟随词的词性标记
brown_lran_tagged = brown.tagged_words(categories='learned', tagset='universal')
tags = [b[1] for (a, b) in nltk.bigrams(brown_lran_tagged) if a[0] == 'often']
fd = nltk.FreqDist(tags)
fd.tabulate()
"""
often 后面最高频率的词性是动词,没有名词(该语料库中)
VERB ADV ADP ADJ . PRT
37 8 7 6 4 2
"""
# 使用 POS 标记寻找三词短语
def process(sentence):
for (w1, t1), (w2, t2), (w3, t3) in nltk.trigrams(sentence):
if t1.startswith('V') and t2 == 'TO' and t3.startswith('V'):
print(w1, w2, w3)
for tagged_sent in brown.tagged_sents():
process(tagged_sent)
# 查看与它们的标记关系高度模糊不清的词
# 这些词各自的上下文可以帮助弄清楚标记之间的关系
brown_news_tagged = brown.tagged_words(categories='news', tagset='universal')
data = nltk.ConditionalFreqDist((word.lower(), tag) for (word, tag) in brown_news_tagged)
for word in data.conditions():
if len(data[word]) > 3:
tags = data[word].keys()
print(word, ' '.join(tags))
# 打开 POS 一致性工具
nltk.app.concordance()
| 27.137255
| 89
| 0.658237
|
ce62546ec97ac652bdd2db0761c12aafb4a3711a
| 722
|
py
|
Python
|
data_loader/modules/Text_Image_Augmentation_python/__init__.py
|
WenmuZhou/crnn.pytorch
|
bf7a7c62376eee93943ca7c68e88e3d563c09aa8
|
[
"Apache-2.0"
] | 46
|
2018-05-29T08:01:10.000Z
|
2022-02-14T21:47:40.000Z
|
data_loader/modules/Text_Image_Augmentation_python/__init__.py
|
WenmuZhou/crnn.pytorch
|
bf7a7c62376eee93943ca7c68e88e3d563c09aa8
|
[
"Apache-2.0"
] | null | null | null |
data_loader/modules/Text_Image_Augmentation_python/__init__.py
|
WenmuZhou/crnn.pytorch
|
bf7a7c62376eee93943ca7c68e88e3d563c09aa8
|
[
"Apache-2.0"
] | 17
|
2018-11-14T09:17:00.000Z
|
2021-08-06T04:05:07.000Z
|
# -*- coding: utf-8 -*-
# @Time : 2020/4/27 11:28
# @Author : zhoujun
import numpy as np
from .augment import distort, stretch, perspective
__all__ = ['RandomAug']
class RandomAug:
def __init__(self):
pass
def __call__(self, img):
if np.random.randn() > 0.3:
img = distort(img, 3)
elif np.random.randn() > 0.6:
img = stretch(img, 3)
else:
img = perspective(img)
return img
if __name__ == '__main__':
from matplotlib import pyplot as plt
import cv2
r = RandomAug()
im = cv2.imread(r'D:\code\crnn.pytorch\0.jpg')
plt.imshow(im)
resize_img = r(im)
plt.figure()
plt.imshow(resize_img)
plt.show()
| 21.235294
| 50
| 0.574792
|
f08860df1685a6cd0b97bae686788bf640e4352f
| 2,696
|
py
|
Python
|
AppPkg/Applications/Python/Python-2.7.2/Lib/distutils/tests/test_version.py
|
CEOALT1/RefindPlusUDK
|
116b957ad735f96fbb6d80a0ba582046960ba164
|
[
"BSD-2-Clause"
] | 2,757
|
2018-04-28T21:41:36.000Z
|
2022-03-29T06:33:36.000Z
|
AppPkg/Applications/Python/Python-2.7.2/Lib/distutils/tests/test_version.py
|
CEOALT1/RefindPlusUDK
|
116b957ad735f96fbb6d80a0ba582046960ba164
|
[
"BSD-2-Clause"
] | 20
|
2019-07-23T15:29:32.000Z
|
2022-01-21T12:53:04.000Z
|
AppPkg/Applications/Python/Python-2.7.2/Lib/distutils/tests/test_version.py
|
CEOALT1/RefindPlusUDK
|
116b957ad735f96fbb6d80a0ba582046960ba164
|
[
"BSD-2-Clause"
] | 449
|
2018-05-09T05:54:05.000Z
|
2022-03-30T14:54:18.000Z
|
"""Tests for distutils.version."""
import unittest
from distutils.version import LooseVersion
from distutils.version import StrictVersion
from test.test_support import run_unittest
class VersionTestCase(unittest.TestCase):
def test_prerelease(self):
version = StrictVersion('1.2.3a1')
self.assertEqual(version.version, (1, 2, 3))
self.assertEqual(version.prerelease, ('a', 1))
self.assertEqual(str(version), '1.2.3a1')
version = StrictVersion('1.2.0')
self.assertEqual(str(version), '1.2')
def test_cmp_strict(self):
versions = (('1.5.1', '1.5.2b2', -1),
('161', '3.10a', ValueError),
('8.02', '8.02', 0),
('3.4j', '1996.07.12', ValueError),
('3.2.pl0', '3.1.1.6', ValueError),
('2g6', '11g', ValueError),
('0.9', '2.2', -1),
('1.2.1', '1.2', 1),
('1.1', '1.2.2', -1),
('1.2', '1.1', 1),
('1.2.1', '1.2.2', -1),
('1.2.2', '1.2', 1),
('1.2', '1.2.2', -1),
('0.4.0', '0.4', 0),
('1.13++', '5.5.kw', ValueError))
for v1, v2, wanted in versions:
try:
res = StrictVersion(v1).__cmp__(StrictVersion(v2))
except ValueError:
if wanted is ValueError:
continue
else:
raise AssertionError(("cmp(%s, %s) "
"shouldn't raise ValueError")
% (v1, v2))
self.assertEqual(res, wanted,
'cmp(%s, %s) should be %s, got %s' %
(v1, v2, wanted, res))
def test_cmp(self):
versions = (('1.5.1', '1.5.2b2', -1),
('161', '3.10a', 1),
('8.02', '8.02', 0),
('3.4j', '1996.07.12', -1),
('3.2.pl0', '3.1.1.6', 1),
('2g6', '11g', -1),
('0.960923', '2.2beta29', -1),
('1.13++', '5.5.kw', -1))
for v1, v2, wanted in versions:
res = LooseVersion(v1).__cmp__(LooseVersion(v2))
self.assertEqual(res, wanted,
'cmp(%s, %s) should be %s, got %s' %
(v1, v2, wanted, res))
def test_suite():
return unittest.makeSuite(VersionTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
| 37.444444
| 72
| 0.405045
|
8403713dbdf781af8acd9ea5d692485e1e819fed
| 5,005
|
py
|
Python
|
Src/Tests/test_privateBinding.py
|
Enerccio/ironpython26-fixed
|
e302db14f05396a378adb438565a829e66acbf94
|
[
"MS-PL"
] | 1
|
2020-02-11T06:02:40.000Z
|
2020-02-11T06:02:40.000Z
|
Src/Languages/IronPython/Tests/test_privateBinding.py
|
rudimk/dlr-dotnet
|
71d11769f99d6ff1516ddbaed091a359eb46c670
|
[
"MS-PL"
] | null | null | null |
Src/Languages/IronPython/Tests/test_privateBinding.py
|
rudimk/dlr-dotnet
|
71d11769f99d6ff1516ddbaed091a359eb46c670
|
[
"MS-PL"
] | 1
|
2018-11-21T04:10:23.000Z
|
2018-11-21T04:10:23.000Z
|
#####################################################################################
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# This source code is subject to terms and conditions of the Microsoft Public License. A
# copy of the license can be found in the License.html file at the root of this distribution. If
# you cannot locate the Microsoft Public License, please send an email to
# ironpy@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
# by the terms of the Microsoft Public License.
#
# You must not remove this notice, or any other, from this software.
#
#
#####################################################################################
##
## Test "-X:PrivateBinding"
##
from iptest.assert_util import *
skiptest("win32")
import System
if not is_silverlight:
privateBinding = "-X:PrivateBinding" in System.Environment.GetCommandLineArgs()
else:
privateBinding = False
load_iron_python_test()
import IronPythonTest
from IronPythonTest import *
clsPart = ClsPart()
def Negate(i): return -i
def test_Common():
AreEqual("InternalClsPart" in dir(IronPythonTest), privateBinding)
AreEqual("InternalClsPart" in globals(), privateBinding)
AreEqual("_ClsPart__privateField" in dir(ClsPart), privateBinding)
AreEqual("_ClsPart__privateProperty" in dir(ClsPart), privateBinding)
AreEqual("_ClsPart__privateEvent" in dir(ClsPart), privateBinding)
AreEqual("_ClsPart__privateMethod" in dir(ClsPart), privateBinding)
if not privateBinding:
def test_NormalBinding():
try:
from IronPythonTest.BinderTest import PrivateClass
except ImportError:
pass
# mixed namespace
import IronPython.Runtime
AssertError(AttributeError, lambda: IronPython.Runtime.SetHelpers)
else:
def test_PrivateBinding():
# entirely internal namespace
from IronPythonTest.BinderTest import PrivateClass
# mixed namespace
import Microsoft.Scripting
x = Microsoft.Scripting.Actions.TopNamespaceTracker
clsPart._ClsPart__privateField = 1
AreEqual(clsPart._ClsPart__privateField, 1)
clsPart._ClsPart__privateProperty = 1
AreEqual(clsPart._ClsPart__privateProperty, 1)
def bad_assign():
clsPart._ClsPart__privateEvent = Negate
AssertError(AttributeError, bad_assign)
clsPart._ClsPart__privateEvent += Negate
clsPart._ClsPart__privateEvent -= Negate
AreEqual(clsPart._ClsPart__privateMethod(1), -1)
# !!! internalClsPart = InternalClsPart()
internalClsPart = IronPythonTest.InternalClsPart()
internalClsPart._InternalClsPart__Field = 1
AreEqual(internalClsPart._InternalClsPart__Field, 1)
internalClsPart._InternalClsPart__Property = 1
AreEqual(internalClsPart._InternalClsPart__Property, 1)
def bad_assign():
internalClsPart._InternalClsPart__Event = Negate
AssertError(AttributeError, bad_assign)
internalClsPart._InternalClsPart__Event += Negate
internalClsPart._InternalClsPart__Event -= Negate
AreEqual(internalClsPart._InternalClsPart__Method(1), -1)
def test_PrivateStaticMethod():
AreEqual(ClsPart._ClsPart__privateStaticMethod(), 100)
AreEqual("_InternalClsPart__Field" in dir(IronPythonTest.InternalClsPart), True)
AreEqual("_InternalClsPart__Property" in dir(InternalClsPart), True)
AreEqual("_InternalClsPart__Method" in dir(InternalClsPart), True)
@skip("silverlight") # no winforms
def test_override_createparams():
"""verify we can override the CreateParams property and get the expected value from the base class"""
clr.AddReference("System.Windows.Forms")
from System.Windows.Forms import Label, Control
for val in [20, 0xffff]:
global called
called = False
class TransLabel(Label):
def get_CreateParams(self):
global called
cp = super(TransLabel, self).CreateParams
cp.ExStyle = cp.ExStyle | val
called = True
return cp
CreateParams = property(fget=get_CreateParams)
a = TransLabel()
AreEqual(called, True)
def test_misc_coverage():
import clr
clr.AddReference("IronPython")
from IronPython.Runtime.Types import SlotFieldAttribute as SFA
temp = SFA()
AreEqual(temp.GetType().Name, "SlotFieldAttribute")
# use this when running standalone
#run_test(__name__)
run_test(__name__, noOutputPlease=True)
if not privateBinding and not is_silverlight:
from iptest.process_util import launch_ironpython_changing_extensions
AreEqual(launch_ironpython_changing_extensions(__file__, add=["-X:PrivateBinding"]), 0)
| 37.074074
| 109
| 0.668531
|
eeb5fa5f27e0b647a79e43416ecef3b674be20e1
| 518
|
py
|
Python
|
.history/ClassFiles/Control Flow/ForLoopRangeFunc_20210101224215.py
|
minefarmer/Comprehensive-Python
|
f97b9b83ec328fc4e4815607e6a65de90bb8de66
|
[
"Unlicense"
] | null | null | null |
.history/ClassFiles/Control Flow/ForLoopRangeFunc_20210101224215.py
|
minefarmer/Comprehensive-Python
|
f97b9b83ec328fc4e4815607e6a65de90bb8de66
|
[
"Unlicense"
] | null | null | null |
.history/ClassFiles/Control Flow/ForLoopRangeFunc_20210101224215.py
|
minefarmer/Comprehensive-Python
|
f97b9b83ec328fc4e4815607e6a65de90bb8de66
|
[
"Unlicense"
] | null | null | null |
''' Range Function
range() function is a builtin function of python
The range() function generates the integer numbers between the given start integer to the stop integer, which generally used to iterate over with a for loop.
The range() function returns a sequence of numbers starting from zero by default. It can be changed.
'''
for x in range(8):
print(x) # 0
# 1
# 2
# 3
# 4
# 5
# 6
# 7
for x in range(2,)
| 25.9
| 157
| 0.57529
|
12c8fafd9c96e3ebd4413aed430dfc5f6cea679f
| 2,868
|
py
|
Python
|
parrot/parser.py
|
au5ton/parrot
|
9e233f5e405f29b14e8fca242f2040dda50b2eeb
|
[
"MIT"
] | null | null | null |
parrot/parser.py
|
au5ton/parrot
|
9e233f5e405f29b14e8fca242f2040dda50b2eeb
|
[
"MIT"
] | null | null | null |
parrot/parser.py
|
au5ton/parrot
|
9e233f5e405f29b14e8fca242f2040dda50b2eeb
|
[
"MIT"
] | null | null | null |
import re
import json
import string
from parrot.driver import node
'''
There are 0 of a max 20 players online:
There are 1 of a max 20 players online: auston
There are 2 of a max 20 players online: auston, valleytoro
'''
def listPlayers(raw: str):
x = raw.split(' ')
x.reverse()
for i in range(0, len(x)):
if x[i].find(':') != -1:
break
# retrieve elements before the one containing the ':' colon
players = x[:i]
# remove punctuation
players = [re.sub(r'[^\w\s]','',p) for p in players]
return players
'''
There are 0 of a max 20 players online:
There are 1 of a max 20 players online: auston
There are 2 of a max 20 players online: auston, valleytoro
'''
def listSlots(raw: str):
return {
"online": int(raw.split(' ')[2]),
"capacity": int(raw.split(' ')[6])
}
'''
auston has the following entity data: {Brain: {memories: {}}, HurtByTimestamp: 0, SleepTimer: 0s, Attributes: [{Base: 0.10000000149011612d, Name: "minecraft:generic.movement_speed"}], Invulnerable: 0b, FallFlying: 0b, PortalCooldown: 0, AbsorptionAmount: 0.0f, abilities: {invulnerable: 1b, mayfly: 1b, instabuild: 0b, walkSpeed: 0.1f, mayBuild: 0b, flying: 1b, flySpeed: 0.05f}, FallDistance: 0.0f, recipeBook: {recipes: [], isBlastingFurnaceFilteringCraftable: 0b, isSmokerGuiOpen: 0b, isFilteringCraftable: 0b, toBeDisplayed: [], isFurnaceGuiOpen: 0b, isGuiOpen: 0b, isFurnaceFilteringCraftable: 0b, isBlastingFurnaceGuiOpen: 0b, isSmokerFilteringCraftable: 0b}, DeathTime: 0s, XpSeed: -1762884102, XpTotal: 0, UUID: [I; 1577784142, -539867416, -1716572465, -495806708], playerGameType: 3, seenCredits: 0b, Motion: [0.0d, 0.0d, 0.0d], Health: 20.0f, foodSaturationLevel: 5.0f, Air: 300s, OnGround: 0b, Dimension: \"minecraft:overworld\", Rotation: [317.23154f, 17.453646f], XpLevel: 0, Score: 0, Pos: [191.21348004379465d, 75.42368787190134d, -16.258547900128978d], previousPlayerGameType: 2, Fire: 0s, XpP: 0.0f, EnderItems: [], DataVersion: 2567, foodLevel: 20, foodExhaustionLevel: 0.478f, HurtTime: 0s, SelectedItemSlot: 0, Inventory: [], foodTickTimer: 0}
'''
def dataGetEntity(raw: str):
selector = "has the following entity data: "
if raw.find(selector) == -1:
return "Error"
else:
# extract Mojang-son
mson = raw[(raw.find(selector) + len(selector)):]
# fix doubles (123.456d)
temp = re.sub(r'([0-9])d', r'\1', mson)
# fix floats (123.456f)
temp = re.sub(r'([0-9])f', r'\1', temp)
# fix bytes (0b, 1b)
temp = re.sub(r'0b', 'false', temp)
temp = re.sub(r'1b', 'true', temp)
# fix seconds (123s)
temp = re.sub(r'([0-9])s', r'\1', temp)
# fix UUID ([I; 1577784142, -539867416, -1716572465, -495806708])
temp = re.sub(r'I;', '', temp)
# fix quotes (literally just use JavaScript to get back JSON)
data = node.eval(f'''
console.log(JSON.stringify({temp}))
''')
return data
| 47.016393
| 1,263
| 0.673291
|
3b52b677c8044930f2d71547e88909dc05b079b5
| 145
|
py
|
Python
|
extract_results.py
|
goodman1204/birank
|
87db456a304882ca4e4f1ac4759740989ef49af7
|
[
"MIT"
] | null | null | null |
extract_results.py
|
goodman1204/birank
|
87db456a304882ca4e4f1ac4759740989ef49af7
|
[
"MIT"
] | null | null | null |
extract_results.py
|
goodman1204/birank
|
87db456a304882ca4e4f1ac4759740989ef49af7
|
[
"MIT"
] | null | null | null |
dataset=["gossipcop_fake" "gossipcop_real" "politifact_fake" "politifact_real" "Aminer"]
methods=["proposed" "HITS" "CoHITS" "BGRM" "BiRank"]
| 24.166667
| 88
| 0.731034
|
55e71d0f89f8963aa2d14e1cc82dfa5e519e9b57
| 1,835
|
py
|
Python
|
flumewater_exporter/collector.py
|
huagangxie/flumewater_exporter
|
bc24f8b243202ad16bdfe0161317f80699fddfbf
|
[
"MIT"
] | 1
|
2021-10-05T22:52:01.000Z
|
2021-10-05T22:52:01.000Z
|
flumewater_exporter/collector.py
|
huagangxie/flumewater_exporter
|
bc24f8b243202ad16bdfe0161317f80699fddfbf
|
[
"MIT"
] | null | null | null |
flumewater_exporter/collector.py
|
huagangxie/flumewater_exporter
|
bc24f8b243202ad16bdfe0161317f80699fddfbf
|
[
"MIT"
] | null | null | null |
import prometheus_client.core
class FlumewaterCollector(object):
def __init__(self, api):
self.api = api
self._prefix = "flumewater_"
self.api.credentials()
self.api.userid()
self._devices = self.api.device_list()
def make_metric(self, _is_counter, _name, _documentation, _value,
**_labels):
if _is_counter:
cls = prometheus_client.core.CounterMetricFamily
else:
cls = prometheus_client.core.GaugeMetricFamily
label_names = list(_labels.keys())
metric = cls(
_name, _documentation or "No Documentation", labels=label_names)
metric.add_metric([str(_labels[k]) for k in label_names], _value)
return metric
def collect(self):
metrics = []
# Get Creditial
# getUserID
# self.api.userid()
# getDevices
for device in self._devices:
# query last mins and current month's usage, maybe more in the future
qdata = self.api.device_query(device, all=False)
if qdata == None:
self.api.credentials()
self.api.userid()
self._devices = self.api.device_list()
logging.debug("Qdata is NULL and re-register the device")
else :
cur_month = self.make_metric(
True, self._prefix + "month",
"current month water usage ",
qdata[1], device_id=device)
metrics.append(cur_month)
last_min = self.make_metric(
False, self._prefix + "usage",
"last one min water usage",
qdata[0], device_id=device)
metrics.append(last_min)
return metrics
| 33.363636
| 82
| 0.549864
|
90525da23d94307618b966859b6e24bed753dbf3
| 2,934
|
py
|
Python
|
src/rnn_class/lstm.py
|
JouniVatanen/NLP-and-Deep-Learning
|
2fddcc2c39787713d33d17e80565de4ed073ca60
|
[
"MIT"
] | 1
|
2020-05-24T06:55:31.000Z
|
2020-05-24T06:55:31.000Z
|
Machine Learning/rnn_class/lstm.py
|
Ashleshk/Machine-Learning-Data-Science-Deep-Learning
|
03357ab98155bf73b8f1d2fd53255cc16bea2333
|
[
"MIT"
] | null | null | null |
Machine Learning/rnn_class/lstm.py
|
Ashleshk/Machine-Learning-Data-Science-Deep-Learning
|
03357ab98155bf73b8f1d2fd53255cc16bea2333
|
[
"MIT"
] | 1
|
2020-03-16T13:11:14.000Z
|
2020-03-16T13:11:14.000Z
|
# https://deeplearningcourses.com/c/deep-learning-recurrent-neural-networks-in-python
# https://udemy.com/deep-learning-recurrent-neural-networks-in-pythonfrom __future__ import print_function, division
from builtins import range
# Note: you may need to update your version of future
# sudo pip install -U future
import numpy as np
import theano
import theano.tensor as T
from util import init_weight
class LSTM:
def __init__(self, Mi, Mo, activation):
self.Mi = Mi
self.Mo = Mo
self.f = activation
# numpy init
Wxi = init_weight(Mi, Mo)
Whi = init_weight(Mo, Mo)
Wci = init_weight(Mo, Mo)
bi = np.zeros(Mo)
Wxf = init_weight(Mi, Mo)
Whf = init_weight(Mo, Mo)
Wcf = init_weight(Mo, Mo)
bf = np.zeros(Mo)
Wxc = init_weight(Mi, Mo)
Whc = init_weight(Mo, Mo)
bc = np.zeros(Mo)
Wxo = init_weight(Mi, Mo)
Who = init_weight(Mo, Mo)
Wco = init_weight(Mo, Mo)
bo = np.zeros(Mo)
c0 = np.zeros(Mo)
h0 = np.zeros(Mo)
# theano vars
self.Wxi = theano.shared(Wxi)
self.Whi = theano.shared(Whi)
self.Wci = theano.shared(Wci)
self.bi = theano.shared(bi)
self.Wxf = theano.shared(Wxf)
self.Whf = theano.shared(Whf)
self.Wcf = theano.shared(Wcf)
self.bf = theano.shared(bf)
self.Wxc = theano.shared(Wxc)
self.Whc = theano.shared(Whc)
self.bc = theano.shared(bc)
self.Wxo = theano.shared(Wxo)
self.Who = theano.shared(Who)
self.Wco = theano.shared(Wco)
self.bo = theano.shared(bo)
self.c0 = theano.shared(c0)
self.h0 = theano.shared(h0)
self.params = [
self.Wxi,
self.Whi,
self.Wci,
self.bi,
self.Wxf,
self.Whf,
self.Wcf,
self.bf,
self.Wxc,
self.Whc,
self.bc,
self.Wxo,
self.Who,
self.Wco,
self.bo,
self.c0,
self.h0,
]
def recurrence(self, x_t, h_t1, c_t1):
i_t = T.nnet.sigmoid(x_t.dot(self.Wxi) + h_t1.dot(self.Whi) + c_t1.dot(self.Wci) + self.bi)
f_t = T.nnet.sigmoid(x_t.dot(self.Wxf) + h_t1.dot(self.Whf) + c_t1.dot(self.Wcf) + self.bf)
c_t = f_t * c_t1 + i_t * T.tanh(x_t.dot(self.Wxc) + h_t1.dot(self.Whc) + self.bc)
o_t = T.nnet.sigmoid(x_t.dot(self.Wxo) + h_t1.dot(self.Who) + c_t.dot(self.Wco) + self.bo)
h_t = o_t * T.tanh(c_t)
return h_t, c_t
def output(self, x):
# input X should be a matrix (2-D)
# rows index time
[h, c], _ = theano.scan(
fn=self.recurrence,
sequences=x,
outputs_info=[self.h0, self.c0],
n_steps=x.shape[0],
)
return h
| 30.5625
| 116
| 0.541241
|
b2fcea547a816596e5ece1e1ef169d8be84e9097
| 191
|
py
|
Python
|
Basics/ecommerce/app.py
|
caseysalvador/Python
|
19bc762a123e98ebbac427c69ce58925e507b045
|
[
"MIT"
] | null | null | null |
Basics/ecommerce/app.py
|
caseysalvador/Python
|
19bc762a123e98ebbac427c69ce58925e507b045
|
[
"MIT"
] | null | null | null |
Basics/ecommerce/app.py
|
caseysalvador/Python
|
19bc762a123e98ebbac427c69ce58925e507b045
|
[
"MIT"
] | null | null | null |
#from ecommerce.shipping import calc_shipping # first example of import
from ecommerce import shipping
#ecommerce.shipping.calc_shipping() # first example of import
shipping.calc_shipping
| 31.833333
| 72
| 0.827225
|
da1c0b06726cbb8d758ab4c2b040b8326cbbcf45
| 4,763
|
py
|
Python
|
kubernetes/test/test_io_xk8s_cluster_bootstrap_v1alpha3_kubeadm_config_spec_join_configuration.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/test/test_io_xk8s_cluster_bootstrap_v1alpha3_kubeadm_config_spec_join_configuration.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/test/test_io_xk8s_cluster_bootstrap_v1alpha3_kubeadm_config_spec_join_configuration.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1.20.7
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import kubernetes.client
from kubernetes.client.models.io_xk8s_cluster_bootstrap_v1alpha3_kubeadm_config_spec_join_configuration import IoXK8sClusterBootstrapV1alpha3KubeadmConfigSpecJoinConfiguration # noqa: E501
from kubernetes.client.rest import ApiException
class TestIoXK8sClusterBootstrapV1alpha3KubeadmConfigSpecJoinConfiguration(unittest.TestCase):
"""IoXK8sClusterBootstrapV1alpha3KubeadmConfigSpecJoinConfiguration unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test IoXK8sClusterBootstrapV1alpha3KubeadmConfigSpecJoinConfiguration
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = kubernetes.client.models.io_xk8s_cluster_bootstrap_v1alpha3_kubeadm_config_spec_join_configuration.IoXK8sClusterBootstrapV1alpha3KubeadmConfigSpecJoinConfiguration() # noqa: E501
if include_optional :
return IoXK8sClusterBootstrapV1alpha3KubeadmConfigSpecJoinConfiguration(
api_version = '0',
ca_cert_path = '0',
control_plane = kubernetes.client.models.io_x_k8s_cluster_bootstrap_v1alpha3_kubeadm_config_spec_join_configuration_control_plane.io_x_k8s_cluster_bootstrap_v1alpha3_KubeadmConfig_spec_joinConfiguration_controlPlane(
local_api_endpoint = kubernetes.client.models.io_x_k8s_cluster_bootstrap_v1alpha3_kubeadm_config_spec_join_configuration_control_plane_local_api_endpoint.io_x_k8s_cluster_bootstrap_v1alpha3_KubeadmConfig_spec_joinConfiguration_controlPlane_localAPIEndpoint(
advertise_address = '0',
bind_port = 56, ), ),
discovery = kubernetes.client.models.io_x_k8s_cluster_bootstrap_v1alpha3_kubeadm_config_spec_join_configuration_discovery.io_x_k8s_cluster_bootstrap_v1alpha3_KubeadmConfig_spec_joinConfiguration_discovery(
bootstrap_token = kubernetes.client.models.io_x_k8s_cluster_bootstrap_v1alpha3_kubeadm_config_spec_join_configuration_discovery_bootstrap_token.io_x_k8s_cluster_bootstrap_v1alpha3_KubeadmConfig_spec_joinConfiguration_discovery_bootstrapToken(
api_server_endpoint = '0',
ca_cert_hashes = [
'0'
],
token = '0',
unsafe_skip_ca_verification = True, ),
file = kubernetes.client.models.io_x_k8s_cluster_bootstrap_v1alpha3_kubeadm_config_spec_join_configuration_discovery_file.io_x_k8s_cluster_bootstrap_v1alpha3_KubeadmConfig_spec_joinConfiguration_discovery_file(
kube_config_path = '0', ),
timeout = '0',
tls_bootstrap_token = '0', ),
kind = '0',
node_registration = kubernetes.client.models.io_x_k8s_cluster_bootstrap_v1alpha3_kubeadm_config_spec_init_configuration_node_registration.io_x_k8s_cluster_bootstrap_v1alpha3_KubeadmConfig_spec_initConfiguration_nodeRegistration(
cri_socket = '0',
kubelet_extra_args = {
'key' : '0'
},
name = '0',
taints = [
kubernetes.client.models.io_x_k8s_cluster_bootstrap_v1alpha3_kubeadm_config_spec_init_configuration_node_registration_taints.io_x_k8s_cluster_bootstrap_v1alpha3_KubeadmConfig_spec_initConfiguration_nodeRegistration_taints(
effect = '0',
key = '0',
time_added = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
value = '0', )
], )
)
else :
return IoXK8sClusterBootstrapV1alpha3KubeadmConfigSpecJoinConfiguration(
)
def testIoXK8sClusterBootstrapV1alpha3KubeadmConfigSpecJoinConfiguration(self):
"""Test IoXK8sClusterBootstrapV1alpha3KubeadmConfigSpecJoinConfiguration"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| 56.702381
| 277
| 0.702288
|
42c7cb39216e64cbab35c7a0df901423074624e9
| 952
|
py
|
Python
|
actions/ship.py
|
Mymineplays/ExtendedKawaii-DiscordBot
|
4f9304e45414a9cb99492f7aafa31c9bd6730486
|
[
"MIT"
] | 1
|
2020-04-28T16:17:04.000Z
|
2020-04-28T16:17:04.000Z
|
actions/ship.py
|
Mymineplays/ExtendedKawaii-DiscordBot
|
4f9304e45414a9cb99492f7aafa31c9bd6730486
|
[
"MIT"
] | 2
|
2020-08-06T05:10:48.000Z
|
2020-11-29T00:16:18.000Z
|
actions/ship.py
|
Mymineplays/ExtendedKawaii-DiscordBot
|
4f9304e45414a9cb99492f7aafa31c9bd6730486
|
[
"MIT"
] | 3
|
2020-05-20T20:47:58.000Z
|
2022-03-03T20:27:22.000Z
|
import discord
from helpers import get_gif
import random
commands = ["schiff", "couple"]
requires_mention = True
accepts_mention = True
description = "Menschen ~~verschiffen~~ shippen"
async def execute(message: discord.Message):
if len(message.mentions) != 2:
await message.channel.send("Wen denn? o.O\n(Bitte gib zwei gültige Nutzer an)")
return
name_a = message.guild.get_member(message.mentions[0].id).display_name
name_b = message.guild.get_member(message.mentions[1].id).display_name
ship_name = name_a[:int(len(name_a) / 2)] + name_b[int(len(name_b) / 2):]
random.seed(message.mentions[0].id+message.mentions[1].id)
love_calc = random.randint(0, 100)
e = discord.Embed()
e.title = ':heart: Lovely Shipping! :heart:'
e.description = f"Shipping Name: **{ship_name}**\n" \
f"Liebe zwischen {name_a} & {name_b}: **{love_calc}%**"
await message.channel.send(embed=e)
| 32.827586
| 87
| 0.677521
|
ec5e9a1ec071dd51e43014d72dbaee1b867f8097
| 2,247
|
py
|
Python
|
migrations/versions/d9668f6a80fe_deployment.py
|
washucode/pitches
|
001c185822a7b80549a81723218c82d7d0003d76
|
[
"MIT"
] | null | null | null |
migrations/versions/d9668f6a80fe_deployment.py
|
washucode/pitches
|
001c185822a7b80549a81723218c82d7d0003d76
|
[
"MIT"
] | null | null | null |
migrations/versions/d9668f6a80fe_deployment.py
|
washucode/pitches
|
001c185822a7b80549a81723218c82d7d0003d76
|
[
"MIT"
] | 1
|
2019-09-23T20:11:30.000Z
|
2019-09-23T20:11:30.000Z
|
"""deployment
Revision ID: d9668f6a80fe
Revises: 86c4a8a7356a
Create Date: 2019-09-18 16:05:13.827708
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd9668f6a80fe'
down_revision = '86c4a8a7356a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=255), nullable=False),
sa.Column('email', sa.String(length=255), nullable=False),
sa.Column('bio', sa.String(length=255), nullable=True),
sa.Column('profile_img', sa.String(length=255), nullable=True),
sa.Column('password_u', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('username')
)
op.create_table('pitches',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=40), nullable=True),
sa.Column('content', sa.String(), nullable=True),
sa.Column('category', sa.String(length=40), nullable=True),
sa.Column('author', sa.String(length=40), nullable=True),
sa.Column('upvote', sa.Integer(), nullable=True),
sa.Column('downvote', sa.Integer(), nullable=True),
sa.Column('date_posted', sa.DateTime(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('comments',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('content', sa.String(), nullable=True),
sa.Column('date_posted', sa.DateTime(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('pitch_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['pitch_id'], ['pitches.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('comments')
op.drop_table('pitches')
op.drop_table('users')
# ### end Alembic commands ###
| 35.109375
| 67
| 0.668002
|
8719cd73e9e525b96514db8ea3846382ddfaeb98
| 10,790
|
py
|
Python
|
facebook/preprocessing.py
|
alexcrist/russian-propaganda
|
eaf12231013f7afbea368e04a43eca958da9248f
|
[
"MIT"
] | null | null | null |
facebook/preprocessing.py
|
alexcrist/russian-propaganda
|
eaf12231013f7afbea368e04a43eca958da9248f
|
[
"MIT"
] | null | null | null |
facebook/preprocessing.py
|
alexcrist/russian-propaganda
|
eaf12231013f7afbea368e04a43eca958da9248f
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
import os
import re
import us
from collections import OrderedDict
from tqdm import tqdm, tqdm_notebook
state_names = [state.name for state in us.states.STATES_AND_TERRITORIES]
state_names.remove('Orleans')
def read_all_files(in_path):
"""Read all the files and put the data into a DataFrame."""
# For errors from the pdf parsing utility
error_to_col = {'Ad I D': 'Ad ID'}
# Set up the dataframe
cols = ['Ad ID', 'Ad Text', 'Ad Landing Page', 'Ad Targeting Location',
'Excluded Connections', 'Age', 'Language', 'Placements',
'People Who Match', 'Ad Impressions', 'Ad Clicks',
'Ad Spend', 'Ad Creation Date', 'Ad End Date', 'time_period']
df = pd.DataFrame(columns=cols)
# Go through all folders/files and extract the data
folders = os.listdir(in_path)
for folder in tqdm_notebook(folders):
tqdm.write(f"Processing Folder: {folder}")
curr_path = f'{in_path}{folder}/'
files = os.listdir(curr_path)
for file in files:
# Set up row for this data
new_row = pd.Series(index=cols)
new_row['time_period'] = folder
name = file.split('.')[0]
# Read through the file
with open(f'{curr_path}{file}', 'r', encoding='utf-8') as f:
for line in f:
if len(line.strip()) == 0:
break # We are through all the lines
for c in cols:
if c in line:
break # A succesful match
else: # Finally, if no match - check for errors
c = None
# Double check that it wasn't parsed poorly
for k, v in error_to_col.items():
if k in line:
c = v
break
# If no match, append to old line
if c == None:
clean_line = line.strip()
old_line = new_row[last_c]
if last_c != 'Ad ID': # Sometimes we miss Ad Text
new_row[last_c] = ' '.join([old_line, clean_line])
else:
new_row['Ad Text'] = clean_line
else:
clean_line = line.replace(c, '')[1:].strip()
new_row[c] = clean_line
last_c = c
df.loc[name, :] = new_row
return df
def clean_ad_id(ad_id):
"""Clean the ad_id."""
if len(ad_id) > 5:
ad_id = ad_id[6:]
return int(ad_id)
class AdTgtLocCleaner:
def __init__(self):
# Different forms of data which can be stored in the ad location field
self.headers = OrderedDict({
'countries': 'UnusedValue:',
'states': 'UnusedValue:',
'locs': '- Living In:',
'exc_states': 'Exclude Location:',
'interests': 'Interests:',
'connections': 'Connections:',
'friend_connections': 'Friends of connections:',
'behaviors': 'Behaviors:',
'generation': 'Generation:',
'politics': 'Politics:'})
def clean(self, ad_loc, return_type='as_array'):
"""Clean the ad targeting location data field.
A warning to all those who find themselves here:
The following code is an obscure mess of parsing some
data that is itself in an obscure mess of a format.
I tried to write this code replete with comments, lest
some unfortunate soul wanders in. If you find yourself,
I give you this one last warning: turn back.
"""
# Preliminary corner-case cleaning
ad_loc = ad_loc.replace('- Recently In', '- Living In')
# Set up output, check if there is no info
output = OrderedDict({k: [] for k in self.headers.keys()})
if ad_loc == 'None':
if return_type == 'as_dict':
return output
elif return_type =='as_array':
return [v for v in output.values()]
# First parse the locations of the headers
header_locs = {}
for name, string in self.headers.items():
if string in ad_loc:
i_loc = ad_loc.index(string)
header_locs[name] = (i_loc, i_loc + len(string))
# Now reverse into an ordered dict
idx_dict = OrderedDict({v: k for k,v in header_locs.items()})
# Now get the interveneing text indexes
text_idx_dict = {}
last_header = None
is_first = True
for idx_pair, header in idx_dict.items():
# Locations are always first if we dont have a header
if is_first:
is_first = False
if idx_pair[0] != 0:
text_idx_dict['locs'] = (0, idx_pair[0])
else: # If we are at least 2nd, get post-header text
text_idx_dict[last_header] = (start, idx_pair[0]-1)
last_header = header
start = idx_pair[1]
else: # Finally get indexes for the last header
if last_header == None:
# If there were no headers, its just a location
text_idx_dict['locs'] = (0, len(ad_loc))
else:
text_idx_dict[last_header] = (start, len(ad_loc))
# Slice out the text into a dict
text_dict = {}
for name, idxs in text_idx_dict.items():
text_dict[name] = ad_loc[idxs[0]:idxs[1]]
# Parse text into the output
# Clean locations
countries, states, locs = self._clean_location_data('locs', text_dict)
output['countries'] = countries
output['states'] = states
output['locs'] = locs
# Clean the excluded locations
_, exc_locs, _ = self._clean_location_data('exc_states', text_dict)
output['exc_states'] = exc_locs
# Clean all the data that was in list format
keys = ['interests', 'behaviors', 'connections', 'friend_connections',
'generation', 'politics']
for key in keys:
output[key] = self._clean_list_data(key, text_dict)
if return_type == 'as_dict':
return output
elif return_type =='as_array':
return [v for v in output.values()]
def _clean_location_data(self, key, text_dict):
"""Clean location data from ad_tgt_loc."""
countries = []
states = []
locs = []
if key in text_dict.keys():
loc_text = text_dict[key]
loc_text = loc_text.replace('United States ', 'United States: ')
if ':' in loc_text:
loc_list = [*loc_text.split(':')]
countries = [loc_list[0].strip()]
locs = ', '.join(l.strip() for l in loc_list[1:])
states = []
for s in state_names:
if s in locs:
states.append(s)
if s != 'New York':
locs = locs.replace(s, '')
locs = locs.replace(' (', '(').replace('(', ' (')
locs = [*map(lambda s: s.strip(), re.split('[,;]', locs))]
locs = [l for l in locs if l != '']
else: # We just have countries
countries = [*map(lambda s: s.strip(), loc_text.split(','))]
return countries, states, locs
def _clean_list_data(self, key, text_dict):
"""Clean data that was in a list format."""
if key in text_dict.keys():
list_text = text_dict[key]
if ' or ' in list_text:
items = [*list_text.split(' or ')]
i0 = ' or '.join(items[:-1])
i1 = [items[-1].strip()]
parsed_list = re.split('[,.;]', i0) + i1
else:
parsed_list = list_text.split(', ')
parsed_list = [*map(lambda s: s.strip(), parsed_list)]
parsed_list = [p for p in parsed_list if p not in ['', 'Jr']]
else:
parsed_list = []
return parsed_list
class PeopleWhoMatchCleaner(AdTgtLocCleaner):
def __init__(self):
self.headers = OrderedDict({
'interests': 'Interests:',
'friend_connections': 'Friends of connections:',
'behaviors': 'Behaviors:',
'page_likes': 'People who like',
'politics': 'Politics:'})
def _clean(self, ad_loc, return_type='as_array'):
"""Small wrapper to first clean the strings."""
# TODO: abstract out the common cleaning, and make the local .clean
# functions parse out the appropriate keys
ad_loc = ad_loc.replace('And Must Also Match:', '')
return self.clean(ad_loc, return_type)
def parse_ad_targeting_location(df):
"""Parse the ad targeting location column, and add it to the DataFrame."""
# First parse all the data
cleaner = AdTgtLocCleaner()
parsed_arr = np.array([cleaner.clean(ad_text) for ad_text
in df['ad_targeting_location']])
# Now add it to new columns in the DataFrame
new_cols = ['countries', 'states', 'locs', 'exc_states', 'interests',
'connections', 'friend_connections', 'behaviors', 'generation',
'politics']
return df.join(pd.DataFrame(parsed_arr, df.index, new_cols))
def parse_people_who_match(df):
"""Parse the ad targeting location column, and add it to the DataFrame."""
# First parse all the data
cleaner = PeopleWhoMatchCleaner()
parsed_arr = np.array([cleaner._clean(ad_text, 'as_dict') for ad_text
in df['people_who_match']])
# Now add it to new columns in the DataFrame
new_cols = ['interests2', 'friend_connections2', 'behaviors2',
'page_likes']
return parsed_arr
return df.join(pd.DataFrame(parsed_arr, df.index, new_cols))
# Utility function for easy viewing
def sample_loc(s, df, n=10, shuffle=False):
"""Sample locs that contain s."""
msk = df['ad_targeting_location'].apply(lambda v: s in v)
r_val = df['ad_targeting_location'][msk]
if not shuffle:
return r_val[:n].values
else:
return r_val.sample(n).values
# Utility function for easy viewing
def sample_like(s, df, n=10, shuffle=False):
"""Sample locs that contain s."""
msk = df['people_who_match'].apply(lambda v: s in v)
r_val = df['people_who_match'][msk]
if not shuffle:
return r_val[:n].values
else:
return r_val.sample(n).values
| 39.094203
| 79
| 0.546895
|
ada82bb7b8fdb0ca7ebdecfbf7155e2a9e067a88
| 470
|
py
|
Python
|
data/scripts/templates/object/draft_schematic/droid/component/shared_personality_module_stupid.py
|
obi-two/GameServer
|
7d37024e2291a97d49522610cd8f1dbe5666afc2
|
[
"MIT"
] | 20
|
2015-02-23T15:11:56.000Z
|
2022-03-18T20:56:48.000Z
|
data/scripts/templates/object/draft_schematic/droid/component/shared_personality_module_stupid.py
|
apathyboy/swganh
|
665128efe9154611dec4cb5efc61d246dd095984
|
[
"MIT"
] | null | null | null |
data/scripts/templates/object/draft_schematic/droid/component/shared_personality_module_stupid.py
|
apathyboy/swganh
|
665128efe9154611dec4cb5efc61d246dd095984
|
[
"MIT"
] | 20
|
2015-04-04T16:35:59.000Z
|
2022-03-24T14:54:37.000Z
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Intangible()
result.template = "object/draft_schematic/droid/component/shared_personality_module_stupid.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
| 27.647059
| 96
| 0.738298
|
7747abebceb38857d4e6fbd84bddad8ff85bb53d
| 5,254
|
py
|
Python
|
DQM/L1TMonitorClient/python/L1TStage2uGMTClient_cff.py
|
PKUfudawei/cmssw
|
8fbb5ce74398269c8a32956d7c7943766770c093
|
[
"Apache-2.0"
] | 1
|
2021-11-30T16:24:46.000Z
|
2021-11-30T16:24:46.000Z
|
DQM/L1TMonitorClient/python/L1TStage2uGMTClient_cff.py
|
PKUfudawei/cmssw
|
8fbb5ce74398269c8a32956d7c7943766770c093
|
[
"Apache-2.0"
] | 4
|
2021-11-29T13:57:56.000Z
|
2022-03-29T06:28:36.000Z
|
DQM/L1TMonitorClient/python/L1TStage2uGMTClient_cff.py
|
PKUfudawei/cmssw
|
8fbb5ce74398269c8a32956d7c7943766770c093
|
[
"Apache-2.0"
] | 1
|
2022-02-27T06:12:26.000Z
|
2022-02-27T06:12:26.000Z
|
import FWCore.ParameterSet.Config as cms
from DQMServices.Core.DQMEDHarvester import DQMEDHarvester
from DQM.L1TMonitor.L1TStage2uGMT_cff import ignoreBins
# directory path shortening
ugmtDqmDir = 'L1T/L1TStage2uGMT'
ugmtMuCpyDqmDir = ugmtDqmDir+'/uGMTMuonCopies'
ugmtZSDqmDir = ugmtDqmDir+'/zeroSuppression'
# input histograms
errHistNumStr = 'errorSummaryNum'
errHistDenStr = 'errorSummaryDen'
# Muons
l1tStage2uGMTMuonVsuGMTMuonCopy1RatioClient = DQMEDHarvester("L1TStage2RatioClient",
monitorDir = cms.untracked.string(ugmtMuCpyDqmDir+'/uGMTMuonCopy1'),
inputNum = cms.untracked.string(ugmtMuCpyDqmDir+'/uGMTMuonCopy1/'+errHistNumStr),
inputDen = cms.untracked.string(ugmtMuCpyDqmDir+'/uGMTMuonCopy1/'+errHistDenStr),
ratioName = cms.untracked.string('mismatchRatio'),
ratioTitle = cms.untracked.string('Summary of mismatch rates between uGMT muons and uGMT muon copy 1'),
yAxisTitle = cms.untracked.string('# mismatch / # total'),
binomialErr = cms.untracked.bool(True),
ignoreBin = cms.untracked.vint32()
)
l1tStage2uGMTMuonVsuGMTMuonCopy2RatioClient = l1tStage2uGMTMuonVsuGMTMuonCopy1RatioClient.clone(
monitorDir = ugmtMuCpyDqmDir+'/uGMTMuonCopy2',
inputNum = ugmtMuCpyDqmDir+'/uGMTMuonCopy2/'+errHistNumStr,
inputDen = ugmtMuCpyDqmDir+'/uGMTMuonCopy2/'+errHistDenStr,
ratioTitle = 'Summary of mismatch rates between uGMT muons and uGMT muon copy 2'
)
l1tStage2uGMTMuonVsuGMTMuonCopy3RatioClient = l1tStage2uGMTMuonVsuGMTMuonCopy1RatioClient.clone(
monitorDir = ugmtMuCpyDqmDir+'/uGMTMuonCopy3',
inputNum = ugmtMuCpyDqmDir+'/uGMTMuonCopy3/'+errHistNumStr,
inputDen = ugmtMuCpyDqmDir+'/uGMTMuonCopy3/'+errHistDenStr,
ratioTitle = 'Summary of mismatch rates between uGMT muons and uGMT muon copy 3'
)
l1tStage2uGMTMuonVsuGMTMuonCopy4RatioClient = l1tStage2uGMTMuonVsuGMTMuonCopy1RatioClient.clone(
monitorDir = ugmtMuCpyDqmDir+'/uGMTMuonCopy4',
inputNum = ugmtMuCpyDqmDir+'/uGMTMuonCopy4/'+errHistNumStr,
inputDen = ugmtMuCpyDqmDir+'/uGMTMuonCopy4/'+errHistDenStr,
ratioTitle = 'Summary of mismatch rates between uGMT muons and uGMT muon copy 4'
)
l1tStage2uGMTMuonVsuGMTMuonCopy5RatioClient = l1tStage2uGMTMuonVsuGMTMuonCopy1RatioClient.clone(
monitorDir = ugmtMuCpyDqmDir+'/uGMTMuonCopy5',
inputNum = ugmtMuCpyDqmDir+'/uGMTMuonCopy5/'+errHistNumStr,
inputDen = ugmtMuCpyDqmDir+'/uGMTMuonCopy5/'+errHistDenStr,
ratioTitle = 'Summary of mismatch rates between uGMT muons and uGMT muon copy 5'
)
# RegionalMuonCands
l1tStage2BmtfOutVsuGMTInRatioClient = l1tStage2uGMTMuonVsuGMTMuonCopy1RatioClient.clone(
monitorDir = ugmtDqmDir+'/BMTFoutput_vs_uGMTinput',
inputNum = ugmtDqmDir+'/BMTFoutput_vs_uGMTinput/'+errHistNumStr,
inputDen = ugmtDqmDir+'/BMTFoutput_vs_uGMTinput/'+errHistDenStr,
ratioTitle = 'Summary of mismatch rates between BMTF output muons and uGMT input muons from BMTF',
ignoreBin = ignoreBins['Bmtf']
)
l1tStage2OmtfOutVsuGMTInRatioClient = l1tStage2uGMTMuonVsuGMTMuonCopy1RatioClient.clone(
monitorDir = ugmtDqmDir+'/OMTFoutput_vs_uGMTinput',
inputNum = ugmtDqmDir+'/OMTFoutput_vs_uGMTinput/'+errHistNumStr,
inputDen = ugmtDqmDir+'/OMTFoutput_vs_uGMTinput/'+errHistDenStr,
ratioTitle = 'Summary of mismatch rates between OMTF output muons and uGMT input muons from OMTF',
ignoreBin = ignoreBins['Omtf']
)
l1tStage2EmtfOutVsuGMTInRatioClient = l1tStage2uGMTMuonVsuGMTMuonCopy1RatioClient.clone(
monitorDir = ugmtDqmDir+'/EMTFoutput_vs_uGMTinput',
inputNum = ugmtDqmDir+'/EMTFoutput_vs_uGMTinput/'+errHistNumStr,
inputDen = ugmtDqmDir+'/EMTFoutput_vs_uGMTinput/'+errHistDenStr,
ratioTitle = 'Summary of mismatch rates between EMTF output muons and uGMT input muons from EMTF',
ignoreBin = ignoreBins['Emtf']
)
# zero suppression
l1tStage2uGMTZeroSuppRatioClient = l1tStage2uGMTMuonVsuGMTMuonCopy1RatioClient.clone(
monitorDir = ugmtZSDqmDir+'/AllEvts',
inputNum = ugmtZSDqmDir+'/AllEvts/'+errHistNumStr,
inputDen = ugmtZSDqmDir+'/AllEvts/'+errHistDenStr,
ratioTitle = 'Summary of bad zero suppression rates',
yAxisTitle = '# fail / # total'
)
l1tStage2uGMTZeroSuppFatEvtsRatioClient = l1tStage2uGMTZeroSuppRatioClient.clone(
monitorDir = ugmtZSDqmDir+'/FatEvts',
inputNum = ugmtZSDqmDir+'/FatEvts/'+errHistNumStr,
inputDen = ugmtZSDqmDir+'/FatEvts/'+errHistDenStr,
ratioTitle = 'Summary of bad zero suppression rates'
)
# sequences
l1tStage2uGMTMuonCompClient = cms.Sequence(
l1tStage2uGMTMuonVsuGMTMuonCopy1RatioClient
+ l1tStage2uGMTMuonVsuGMTMuonCopy2RatioClient
+ l1tStage2uGMTMuonVsuGMTMuonCopy3RatioClient
+ l1tStage2uGMTMuonVsuGMTMuonCopy4RatioClient
+ l1tStage2uGMTMuonVsuGMTMuonCopy5RatioClient
)
l1tStage2uGMTRegionalMuonCandCompClient = cms.Sequence(
l1tStage2BmtfOutVsuGMTInRatioClient
+ l1tStage2OmtfOutVsuGMTInRatioClient
+ l1tStage2EmtfOutVsuGMTInRatioClient
)
l1tStage2uGMTZeroSuppCompClient = cms.Sequence(
l1tStage2uGMTZeroSuppRatioClient
+ l1tStage2uGMTZeroSuppFatEvtsRatioClient
)
l1tStage2uGMTClient = cms.Sequence(
l1tStage2uGMTMuonCompClient
+ l1tStage2uGMTRegionalMuonCandCompClient
+ l1tStage2uGMTZeroSuppCompClient
)
| 47.333333
| 107
| 0.792729
|
2c0e103b213a5d47efa9f4830d27a55830bb9699
| 574
|
py
|
Python
|
tests/unit/v0x04/test_controller2switch/test_role_reply.py
|
josemauro/python-openflow
|
0537c626f3aeb4c53995b65e0783f09ad5e63101
|
[
"MIT"
] | 48
|
2016-06-09T14:36:22.000Z
|
2021-11-11T16:05:19.000Z
|
tests/unit/v0x04/test_controller2switch/test_role_reply.py
|
josemauro/python-openflow
|
0537c626f3aeb4c53995b65e0783f09ad5e63101
|
[
"MIT"
] | 338
|
2016-05-06T18:42:36.000Z
|
2021-04-29T17:57:09.000Z
|
tests/unit/v0x04/test_controller2switch/test_role_reply.py
|
josemauro/python-openflow
|
0537c626f3aeb4c53995b65e0783f09ad5e63101
|
[
"MIT"
] | 46
|
2016-05-24T15:32:56.000Z
|
2021-06-01T12:16:17.000Z
|
"""RoleReply message tests."""
from pyof.v0x04.controller2switch.role_reply import RoleReply
from tests.unit.test_struct import TestStruct
class TestRoleReply(TestStruct):
"""Test the RoleReply message."""
@classmethod
def setUpClass(cls):
"""Configure raw file and its object in parent class (TestDump)."""
super().setUpClass()
super().set_raw_dump_file('v0x04', 'ofpt_role_reply')
super().set_raw_dump_object(RoleReply, xid=3, role=0,
generation_id=0)
super().set_minimum_size(24)
| 33.764706
| 75
| 0.660279
|
ee15ff6899878f7d4bc8aab9efac3e1067fe2c2e
| 9,744
|
py
|
Python
|
orchestra/contrib/domains/admin.py
|
udm88/django-orchestra
|
49c84f13a8f92427b01231615136549fb5be3a78
|
[
"Unlicense"
] | 68
|
2015-02-09T10:28:44.000Z
|
2022-03-12T11:08:36.000Z
|
orchestra/contrib/domains/admin.py
|
ferminhg/django-orchestra
|
49c84f13a8f92427b01231615136549fb5be3a78
|
[
"Unlicense"
] | 17
|
2015-05-01T18:10:03.000Z
|
2021-03-19T21:52:55.000Z
|
orchestra/contrib/domains/admin.py
|
ferminhg/django-orchestra
|
49c84f13a8f92427b01231615136549fb5be3a78
|
[
"Unlicense"
] | 29
|
2015-03-31T04:51:03.000Z
|
2022-02-17T02:58:50.000Z
|
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models.functions import Concat, Coalesce
from django.templatetags.static import static
from django.utils.translation import ugettext, ugettext_lazy as _
from orchestra.admin import ExtendedModelAdmin
from orchestra.admin.utils import admin_link, change_url
from orchestra.contrib.accounts.actions import list_accounts
from orchestra.contrib.accounts.admin import AccountAdminMixin
from orchestra.utils import apps
from orchestra.utils.html import get_on_site_link
from . import settings
from .actions import view_zone, edit_records, set_soa
from .filters import TopDomainListFilter, HasWebsiteFilter, HasAddressFilter
from .forms import RecordForm, RecordInlineFormSet, BatchDomainCreationAdminForm
from .models import Domain, Record
class RecordInline(admin.TabularInline):
model = Record
form = RecordForm
formset = RecordInlineFormSet
verbose_name_plural = _("Extra records")
class DomainInline(admin.TabularInline):
model = Domain
fields = ('domain_link', 'display_records', 'account_link')
readonly_fields = ('domain_link', 'display_records', 'account_link')
extra = 0
verbose_name_plural = _("Subdomains")
domain_link = admin_link('__str__')
domain_link.short_description = _("Name")
account_link = admin_link('account')
def display_records(self, domain):
return ', '.join([record.type for record in domain.records.all()])
display_records.short_description = _("Declared records")
def has_add_permission(self, *args, **kwargs):
return False
def get_queryset(self, request):
""" Order by structured name and imporve performance """
qs = super(DomainInline, self).get_queryset(request)
return qs.select_related('account').prefetch_related('records')
class DomainAdmin(AccountAdminMixin, ExtendedModelAdmin):
list_display = (
'structured_name', 'display_is_top', 'display_websites', 'display_addresses', 'account_link'
)
add_fields = ('name', 'account')
fields = ('name', 'account_link', 'display_websites', 'display_addresses')
readonly_fields = (
'account_link', 'top_link', 'display_websites', 'display_addresses', 'implicit_records'
)
inlines = (RecordInline, DomainInline)
list_filter = (TopDomainListFilter, HasWebsiteFilter, HasAddressFilter)
change_readonly_fields = ('name', 'serial')
search_fields = ('name', 'account__username', 'records__value')
add_form = BatchDomainCreationAdminForm
actions = (edit_records, set_soa, list_accounts)
change_view_actions = (view_zone, edit_records)
top_link = admin_link('top')
def structured_name(self, domain):
if domain.is_top:
return domain.name
return ' '*4 + domain.name
structured_name.short_description = _("name")
structured_name.allow_tags = True
structured_name.admin_order_field = 'structured_name'
def display_is_top(self, domain):
return domain.is_top
display_is_top.short_description = _("Is top")
display_is_top.boolean = True
display_is_top.admin_order_field = 'top'
def display_websites(self, domain):
if apps.isinstalled('orchestra.contrib.websites'):
websites = domain.websites.all()
if websites:
links = []
for website in websites:
site_link = get_on_site_link(website.get_absolute_url())
admin_url = change_url(website)
title = _("Edit website")
link = '<a href="%s" title="%s">%s %s</a>' % (
admin_url, title, website.name, site_link)
links.append(link)
return '<br>'.join(links)
add_url = reverse('admin:websites_website_add')
add_url += '?account=%i&domains=%i' % (domain.account_id, domain.pk)
image = '<img src="%s"></img>' % static('orchestra/images/add.png')
add_link = '<a href="%s" title="%s">%s</a>' % (
add_url, _("Add website"), image
)
return _("No website %s") % (add_link)
return '---'
display_websites.admin_order_field = 'websites__name'
display_websites.short_description = _("Websites")
display_websites.allow_tags = True
def display_addresses(self, domain):
if apps.isinstalled('orchestra.contrib.mailboxes'):
add_url = reverse('admin:mailboxes_address_add')
add_url += '?account=%i&domain=%i' % (domain.account_id, domain.pk)
image = '<img src="%s"></img>' % static('orchestra/images/add.png')
add_link = '<a href="%s" title="%s">%s</a>' % (
add_url, _("Add address"), image
)
addresses = domain.addresses.all()
if addresses:
url = reverse('admin:mailboxes_address_changelist')
url += '?domain=%i' % addresses[0].domain_id
title = '\n'.join([address.email for address in addresses])
return '<a href="%s" title="%s">%s</a> %s' % (url, title, len(addresses), add_link)
return _("No address %s") % (add_link)
return '---'
display_addresses.short_description = _("Addresses")
display_addresses.admin_order_field = 'addresses__count'
display_addresses.allow_tags = True
def implicit_records(self, domain):
defaults = []
types = set(domain.records.values_list('type', flat=True))
ttl = settings.DOMAINS_DEFAULT_TTL
lines = []
for record in domain.get_default_records():
line = '{name} {ttl} IN {type} {value}'.format(
name=domain.name,
ttl=ttl,
type=record.type,
value=record.value
)
if not domain.record_is_implicit(record, types):
line = '<strike>%s</strike>' % line
if record.type is Record.SOA:
lines.insert(0, line)
else:
lines.append(line)
return '<br>'.join(lines)
implicit_records.short_description = _("Implicit records")
implicit_records.allow_tags = True
def get_fieldsets(self, request, obj=None):
""" Add SOA fields when domain is top """
fieldsets = super(DomainAdmin, self).get_fieldsets(request, obj)
if obj:
fieldsets += (
(_("Implicit records"), {
'classes': ('collapse',),
'fields': ('implicit_records',),
}),
)
if obj.is_top:
fieldsets += (
(_("SOA"), {
'classes': ('collapse',),
'description': _(
"SOA (Start of Authority) records are used to determine how the "
"zone propagates to the secondary nameservers."),
'fields': ('serial', 'refresh', 'retry', 'expire', 'min_ttl'),
}),
)
else:
existing = fieldsets[0][1]['fields']
if 'top_link' not in existing:
fieldsets[0][1]['fields'].insert(2, 'top_link')
return fieldsets
def get_inline_instances(self, request, obj=None):
inlines = super(DomainAdmin, self).get_inline_instances(request, obj)
if not obj or not obj.is_top:
return [inline for inline in inlines if type(inline) != DomainInline]
return inlines
def get_queryset(self, request):
""" Order by structured name and imporve performance """
qs = super(DomainAdmin, self).get_queryset(request)
qs = qs.select_related('top', 'account')
if request.method == 'GET':
qs = qs.annotate(
structured_id=Coalesce('top__id', 'id'),
structured_name=Concat('top__name', 'name')
).order_by('-structured_id', 'structured_name')
if apps.isinstalled('orchestra.contrib.websites'):
qs = qs.prefetch_related('websites__domains')
if apps.isinstalled('orchestra.contrib.mailboxes'):
qs = qs.annotate(models.Count('addresses'))
return qs
def save_model(self, request, obj, form, change):
""" batch domain creation support """
super(DomainAdmin, self).save_model(request, obj, form, change)
self.extra_domains = []
if not change:
for name in form.extra_names:
domain = Domain.objects.create(name=name, account_id=obj.account_id)
self.extra_domains.append(domain)
def save_related(self, request, form, formsets, change):
""" batch domain creation support """
super(DomainAdmin, self).save_related(request, form, formsets, change)
if not change:
# Clone records to extra_domains, if any
for formset in formsets:
if formset.model is Record:
for domain in self.extra_domains:
# Reset pk value of the record instances to force creation of new ones
for record_form in formset.forms:
record = record_form.instance
if record.pk:
record.pk = None
formset.instance = domain
form.instance = domain
self.save_formset(request, form, formset, change)
admin.site.register(Domain, DomainAdmin)
| 42.736842
| 100
| 0.603038
|
1b6164e03ab202642513de1d626a1e1ed4e16b7b
| 4,890
|
py
|
Python
|
boa3/model/builtin/method/isinstancemethod.py
|
DanPopa46/neo3-boa
|
e4ef340744b5bd25ade26f847eac50789b97f3e9
|
[
"Apache-2.0"
] | null | null | null |
boa3/model/builtin/method/isinstancemethod.py
|
DanPopa46/neo3-boa
|
e4ef340744b5bd25ade26f847eac50789b97f3e9
|
[
"Apache-2.0"
] | null | null | null |
boa3/model/builtin/method/isinstancemethod.py
|
DanPopa46/neo3-boa
|
e4ef340744b5bd25ade26f847eac50789b97f3e9
|
[
"Apache-2.0"
] | null | null | null |
from typing import Any, Dict, List, Optional, Tuple, Union
from boa3.model.builtin.method.builtinmethod import IBuiltinMethod
from boa3.model.expression import IExpression
from boa3.model.type.itype import IType
from boa3.model.variable import Variable
from boa3.neo.vm.opcode.Opcode import Opcode
class IsInstanceMethod(IBuiltinMethod):
def __init__(self, target_type: IType = None):
from boa3.model.type.type import Type
identifier = 'isinstance'
args: Dict[str, Variable] = {
'x': Variable(Type.any),
'A_tuple': None
}
super().__init__(identifier, args, return_type=Type.bool)
from boa3.model.type.collection.sequence.tupletype import TupleType
from boa3.model.type.annotation.uniontype import UnionType
if not isinstance(target_type, IType):
instances = [Type.none]
elif isinstance(target_type, TupleType) and isinstance(target_type.item_type, UnionType):
instances = target_type.item_type.union_types
else:
instances = [target_type]
self._instances_type: List[IType] = instances
def set_instance_type(self, value: List[IType]):
new_list = []
for tpe in value:
if isinstance(tpe, IType):
if not any(tpe.raw_identifier == other.raw_identifier for other in new_list):
new_list.append(tpe)
self._instances_type = new_list
@property
def identifier(self) -> str:
from boa3.model.type.type import Type
if (len(self._instances_type) == 0
or (len(self._instances_type) == 1 and self._instances_type[0] in (None, Type.none))
):
return self._identifier
types = list({tpe.raw_identifier for tpe in self._instances_type})
types.sort()
return '-{0}_of_{1}'.format(self._identifier, '_or_'.join(types))
def args_to_be_generated(self) -> List[int]:
args = [name for name, symbol in self.args.items() if isinstance(symbol, Variable)]
return [list(self.args).index(key) for key in args]
@property
def is_supported(self) -> bool:
from boa3.model.type.classtype import ClassType
return not any(isinstance(param, ClassType) and len(param.is_instance_opcodes()) == 0
for param in self._instances_type)
def not_supported_str(self, callable_id: str) -> str:
types = (self._instances_type[0].identifier if len(self._instances_type) == 1
else '({0})'.format(', '.join([arg.identifier for arg in self._instances_type])))
return '{0}({1}, {2})'.format(callable_id, self.arg_x, types)
@property
def arg_x(self) -> Variable:
return self.args['x']
def validate_parameters(self, *params: Union[IExpression, IType]) -> bool:
if len(params) != 2:
return False
return not any(not isinstance(param, (IExpression, IType)) for param in params)
@property
def opcode(self) -> List[Tuple[Opcode, bytes]]:
if len(self._instances_type) == 0:
return [
(Opcode.ISNULL, b'')
]
else:
opcodes = []
from boa3.model.type.type import Type
from boa3.neo.vm.type.Integer import Integer
types = self._instances_type.copy()
jmps = []
for check_instance in types[:-1]:
opcodes.append((Opcode.DUP, b''))
opcodes.extend(check_instance.is_instance_opcodes())
jmps.append(len(opcodes))
opcodes.append((Opcode.JMPIF, b''))
opcodes.extend(types[-1].is_instance_opcodes())
last_index = len(opcodes)
if len(types) > 1:
opcodes.extend([
(Opcode.JMP, Integer(4).to_byte_array(min_length=1, signed=True)),
(Opcode.DROP, b''),
])
last_index = len(opcodes)
opcodes.append((Opcode.PUSH1, b''))
jmp_to = 0
for index in reversed(jmps):
for pos in range(index + 1, last_index):
last_op, last_data = opcodes[pos - 1]
op, data = opcodes[pos]
jmp_to += len(last_data) + len(op)
jmp_to += 1
last_index = index + 1
opcodes[index] = opcodes[index][0], Integer(jmp_to).to_byte_array(min_length=1, signed=True)
return opcodes
@property
def _args_on_stack(self) -> int:
return 1
@property
def _body(self) -> Optional[str]:
return
def build(self, value: Any) -> IBuiltinMethod:
if isinstance(value, list) and self.validate_parameters(*value):
return IsInstanceMethod(value[-1])
return super().build(value)
| 35.693431
| 108
| 0.594479
|
83ada63b8ebb346679f33b7c9bd8b9bc6ee20574
| 2,637
|
py
|
Python
|
tests/formatter/test_tox_section.py
|
Julian/tox-ini-fmt
|
2d251f349d5f30fe62a55a38e119347a182d4cf4
|
[
"MIT"
] | null | null | null |
tests/formatter/test_tox_section.py
|
Julian/tox-ini-fmt
|
2d251f349d5f30fe62a55a38e119347a182d4cf4
|
[
"MIT"
] | null | null | null |
tests/formatter/test_tox_section.py
|
Julian/tox-ini-fmt
|
2d251f349d5f30fe62a55a38e119347a182d4cf4
|
[
"MIT"
] | null | null | null |
import pytest
from tox_ini_fmt.formatter import format_tox_ini
from tox_ini_fmt.formatter.tox_section import order_env_list
def test_no_tox_section(tox_ini):
tox_ini.write_text("")
assert format_tox_ini(tox_ini) == "\n"
def test_format_envlist_simple(tox_ini):
tox_ini.write_text("[tox]\nenvlist=py39,py38\n")
outcome = format_tox_ini(tox_ini)
assert outcome == "[tox]\nenvlist =\n py39\n py38\n"
def test_format_envlist_start_newline(tox_ini):
ok = "[tox]\nenvlist =\n py39\n py38\n"
tox_ini.write_text(ok)
outcome = format_tox_ini(tox_ini)
assert outcome == ok
def test_format_envlist_generator(tmp_path):
path = tmp_path / "tox.ini"
path.write_text("[tox]\nenvlist={py36,py37}-django{20,21},{py36,py37}-mango{20,21},py38\n")
outcome = format_tox_ini(path)
assert outcome == "[tox]\nenvlist =\n py38\n {py37, py36}-django{21, 20}\n {py37, py36}-mango{21, 20}\n"
def test_tox_section_order(tox_ini):
tox_ini.write_text(
"[tox]\nskip_missing_interpreters=true\nisolated_build=true\nminversion=3.14\nskipsdist=false\nenvlist=py37"
)
outcome = format_tox_ini(tox_ini)
assert (
outcome == "[tox]\nenvlist =\n py37\nisolated_build = true\nskipsdist = false\n"
"skip_missing_interpreters = true\nminversion = 3.14\n"
)
@pytest.mark.parametrize(
"key",
(
"isolated_build",
"skipsdist",
"skip_missing_interpreters",
),
)
@pytest.mark.parametrize(
"value, result",
[
("True", "true"),
("False", "false"),
("TRUE", "true"),
("FALSE", "false"),
],
)
def test_tox_fmt_boolean(tox_ini, key, value, result):
tox_ini.write_text(f"[tox]\n{key}={value}")
outcome = format_tox_ini(tox_ini)
expected = f"[tox]\n{key} = {result}\n"
assert outcome == expected
@pytest.mark.parametrize(
"arg, outcome",
[
([], []),
(["py38", "py37"], ["py38", "py37"]),
(["py37", "py38"], ["py38", "py37"]),
(["py", "py37", "pypy3", "py38", "pypy2", "pypy"], ["py38", "py37", "py", "pypy3", "pypy2", "pypy"]),
(["py38-dpkg", "py38", "py37-dpkg", "py37"], ["py38-dpkg", "py38", "py37-dpkg", "py37"]),
(["py37-dpkg", "py37", "py38-dpkg", "py38"], ["py38-dpkg", "py38", "py37-dpkg", "py37"]),
(["py37", "py37-dpkg", "py38", "py38-dpkg"], ["py38", "py38-dpkg", "py37", "py37-dpkg"]),
(["Jython", "jython36", "jython", "Jython27", "py38"], ["py38", "jython36", "Jython27", "Jython", "jython"]),
],
)
def test_order_env_list(arg, outcome):
order_env_list(arg)
assert arg == outcome
| 31.771084
| 117
| 0.61168
|
a207113b6d8d2a2de7d57d9bc79f2b4b5d963f0f
| 4,168
|
py
|
Python
|
lnbits/extensions/satspay/views_api.py
|
blackcoffeexbt/lnbits-legend
|
a9f2877af77ea56d1900e2b5bc1c21b9b7ac2f64
|
[
"MIT"
] | null | null | null |
lnbits/extensions/satspay/views_api.py
|
blackcoffeexbt/lnbits-legend
|
a9f2877af77ea56d1900e2b5bc1c21b9b7ac2f64
|
[
"MIT"
] | null | null | null |
lnbits/extensions/satspay/views_api.py
|
blackcoffeexbt/lnbits-legend
|
a9f2877af77ea56d1900e2b5bc1c21b9b7ac2f64
|
[
"MIT"
] | null | null | null |
from http import HTTPStatus
import httpx
from fastapi import Query
from fastapi.params import Depends
from starlette.exceptions import HTTPException
from lnbits.decorators import WalletTypeInfo, get_key_type, require_admin_key
from lnbits.extensions.satspay import satspay_ext
from .crud import (
check_address_balance,
create_charge,
delete_charge,
get_charge,
get_charges,
update_charge,
)
from .models import CreateCharge
#############################CHARGES##########################
@satspay_ext.post("/api/v1/charge")
@satspay_ext.put("/api/v1/charge/{charge_id}")
async def api_charge_create_or_update(
data: CreateCharge,
wallet: WalletTypeInfo = Depends(require_admin_key),
charge_id=None,
):
if not charge_id:
charge = await create_charge(user=wallet.wallet.user, data=data)
return charge.dict()
else:
charge = await update_charge(charge_id=charge_id, data=data)
return charge.dict()
@satspay_ext.get("/api/v1/charges")
async def api_charges_retrieve(wallet: WalletTypeInfo = Depends(get_key_type)):
try:
return [
{
**charge.dict(),
**{"time_elapsed": charge.time_elapsed},
**{"paid": charge.paid},
}
for charge in await get_charges(wallet.wallet.user)
]
except:
return ""
@satspay_ext.get("/api/v1/charge/{charge_id}")
async def api_charge_retrieve(
charge_id, wallet: WalletTypeInfo = Depends(get_key_type)
):
charge = await get_charge(charge_id)
if not charge:
raise HTTPException(
status_code=HTTPStatus.NOT_FOUND, detail="Charge does not exist."
)
return {
**charge.dict(),
**{"time_elapsed": charge.time_elapsed},
**{"paid": charge.paid},
}
@satspay_ext.delete("/api/v1/charge/{charge_id}")
async def api_charge_delete(charge_id, wallet: WalletTypeInfo = Depends(get_key_type)):
charge = await get_charge(charge_id)
if not charge:
raise HTTPException(
status_code=HTTPStatus.NOT_FOUND, detail="Charge does not exist."
)
await delete_charge(charge_id)
raise HTTPException(status_code=HTTPStatus.NO_CONTENT)
#############################BALANCE##########################
@satspay_ext.get("/api/v1/charges/balance/{charge_id}")
async def api_charges_balance(charge_id):
charge = await check_address_balance(charge_id)
if not charge:
raise HTTPException(
status_code=HTTPStatus.NOT_FOUND, detail="Charge does not exist."
)
if charge.paid and charge.webhook:
async with httpx.AsyncClient() as client:
try:
r = await client.post(
charge.webhook,
json={
"id": charge.id,
"description": charge.description,
"onchainaddress": charge.onchainaddress,
"payment_request": charge.payment_request,
"payment_hash": charge.payment_hash,
"time": charge.time,
"amount": charge.amount,
"balance": charge.balance,
"paid": charge.paid,
"timestamp": charge.timestamp,
"completelink": charge.completelink,
},
timeout=40,
)
except AssertionError:
charge.webhook = None
return charge.dict()
#############################MEMPOOL##########################
@satspay_ext.put("/api/v1/mempool")
async def api_update_mempool(
endpoint: str = Query(...), wallet: WalletTypeInfo = Depends(get_key_type)
):
mempool = await update_mempool(endpoint, user=wallet.wallet.user)
return mempool.dict()
@satspay_ext.route("/api/v1/mempool")
async def api_get_mempool(wallet: WalletTypeInfo = Depends(get_key_type)):
mempool = await get_mempool(wallet.wallet.user)
if not mempool:
mempool = await create_mempool(user=wallet.wallet.user)
return mempool.dict()
| 29.771429
| 87
| 0.599568
|
449240175d70a5d1fb34bc53bd86918f80fca4c0
| 3,837
|
py
|
Python
|
geneparse/index/__main__.py
|
legaultmarc/geneparse
|
5a844df77ded5adc765a086a8d346fce6ba01f3d
|
[
"MIT"
] | 4
|
2018-11-09T11:10:24.000Z
|
2021-07-23T22:17:58.000Z
|
geneparse/index/__main__.py
|
legaultmarc/geneparse
|
5a844df77ded5adc765a086a8d346fce6ba01f3d
|
[
"MIT"
] | 5
|
2017-05-02T15:28:01.000Z
|
2018-04-16T18:29:15.000Z
|
geneparse/index/__main__.py
|
legaultmarc/geneparse
|
5a844df77ded5adc765a086a8d346fce6ba01f3d
|
[
"MIT"
] | 1
|
2017-05-12T17:58:32.000Z
|
2017-05-12T17:58:32.000Z
|
"""Simple script to index genotype files of different format."""
# This file is part of geneparse.
#
# The MIT License (MIT)
#
# Copyright (c) 2017 Pharmacogenomics Centre
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
import logging
import argparse
import subprocess
from .impute2 import generate_index as impute2_index
# Logging configuration
logging.basicConfig(
level=logging.INFO,
format="[%(asctime)s %(levelname)s] %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
logger = logging.getLogger("geneparse-indexer")
def main():
args = parse_args()
# IMPUTE2
if args.impute2:
for fn in args.impute2:
index_impute2(fn)
# BGEN
if args.bgen:
for fn in args.bgen:
index_bgen(fn, legacy=args.legacy)
def index_impute2(fn):
"""Indexes an IMPUTE2 file.
Args:
fn (str): The name of the IMPUTE2 file.
"""
logger.info("Indexing {} (IMPUTE2)".format(fn))
impute2_index(fn, cols=[0, 1, 2], names=["chrom", "name", "pos"], sep=" ")
logger.info("Index generated")
def index_bgen(fn, legacy=False):
"""Indexes a BGEN file.
Args:
fn (str): The name of the BGEN file.
"""
logger.info("Indexing {} (BGEN) using 'bgenix'{}".format(
fn, " (legacy mode)" if legacy else "",
))
command = ["bgenix", "-g", fn, "-index"]
if legacy:
command.append("-with-rowid")
try:
logger.info("Executing '{}'".format(" ".join(command)))
subprocess.Popen(command).communicate()
except FileNotFoundError:
logger.error("Cannot find 'bgenix', impossible to index {}".format(fn))
sys.exit(1)
logger.info("Index generated")
def parse_args():
"""Parses the arguments and options."""
parser = argparse.ArgumentParser(
prog="geneparse-indexer",
description="Genotype file indexer."
)
# IMPUTE2 files
group = parser.add_argument_group("IMPUTE2 index")
group.add_argument(
"--impute2", metavar="IMPUTE2", type=str, nargs="+",
help="Index an IMPUTE2 genotype file format. The file can be plain "
"text or bgzipped.",
)
# BGEN files
group = parser.add_argument_group("BGEN index")
group.add_argument(
"--bgen", metavar="BGEN", type=str, nargs="+",
help="Index a BGEN genotype file. This requires 'bgenix' to be in the "
"PATH.",
)
group.add_argument(
"--legacy", action="store_true",
help="Index the file using the '-with-rowid' option. This flag "
"enables compatibility with SQLITE prior to version 3.8.2. See "
"https://bitbucket.org/gavinband/bgen/wiki/bgenix for more "
"information.",
)
return parser.parse_args()
if __name__ == "__main__":
main()
| 29.744186
| 79
| 0.658327
|
cc6d2b8302bbbf06df69ee4c4ad7d77c90460b3f
| 354
|
py
|
Python
|
nidaqmx_examples/ci_count_edges.py
|
stafak/nidaqmx-python
|
f354d7971b21074c120c6f298dbbf4a5e0e4f4f4
|
[
"MIT"
] | 252
|
2017-03-22T02:43:16.000Z
|
2022-03-27T14:44:44.000Z
|
nidaqmx_examples/ci_count_edges.py
|
stafak/nidaqmx-python
|
f354d7971b21074c120c6f298dbbf4a5e0e4f4f4
|
[
"MIT"
] | 133
|
2017-03-21T20:57:59.000Z
|
2022-03-31T16:08:12.000Z
|
nidaqmx_examples/ci_count_edges.py
|
stafak/nidaqmx-python
|
f354d7971b21074c120c6f298dbbf4a5e0e4f4f4
|
[
"MIT"
] | 124
|
2017-04-01T18:35:24.000Z
|
2022-03-25T06:30:00.000Z
|
import nidaqmx
import pprint
pp = pprint.PrettyPrinter(indent=4)
with nidaqmx.Task() as task:
task.ci_channels.add_ci_count_edges_chan("Dev1/ctr0")
print('1 Channel 1 Sample Read: ')
data = task.read()
pp.pprint(data)
print('1 Channel N Samples Read: ')
data = task.read(number_of_samples_per_channel=8)
pp.pprint(data)
| 20.823529
| 57
| 0.69774
|
484fddc2bb5f74a8b2793d6df08fb5bf51084fda
| 1,156
|
py
|
Python
|
.history/my_classes/FirstClassFunctions/LambdaExpressions_20210704151550.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
.history/my_classes/FirstClassFunctions/LambdaExpressions_20210704151550.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
.history/my_classes/FirstClassFunctions/LambdaExpressions_20210704151550.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
"""[Lambda Expressions]
Lambda expressions are simply another way to create functions anonymous functions
keyword \ parameter list optional
\ \ the : is required, even for zero arguments
\ \ / / this expression is evaluated and returned when the lambda function is called. (think of it as "the body" of the function)
lambda [parameter list]: expression
\
the expression returns a function object
that evaluates and returns the expression when it is called
Examples
from tkinter import Y
from unittest import FunctionTestCase
lambda x: x**2
lambda x, y: x + y
lambda : 'hello'
lambda s: s[::-1].upper()
type(lambda x: x**2) -> function
Note that these expressions are function objects, but are not "named"
-> anonymous Functions
lambdas, or anonymous functions, are NOT equivalent to closures
Assigning a Lambda to a Variable name
"""
my_func = lambda x: x**2
type(my_func) -> fuunction
my_func(3) -> 9
my_func(4) -> 16
identical to:
def my_func(x):
return x**2
| 19.931034
| 161
| 0.628893
|
1c8b2a839fa5f9141fc650631ea5f05ad4921f2e
| 529
|
py
|
Python
|
slu/slu/utils/decorators.py
|
Vernacular-ai/slu-service
|
c61af67ead471e47202036779eeda124e57d9850
|
[
"MIT"
] | 4
|
2021-09-07T12:15:34.000Z
|
2022-03-18T03:39:08.000Z
|
slu/slu/utils/decorators.py
|
Vernacular-ai/slu-service
|
c61af67ead471e47202036779eeda124e57d9850
|
[
"MIT"
] | 28
|
2021-04-13T09:08:25.000Z
|
2021-07-28T17:17:04.000Z
|
slu/slu/utils/decorators.py
|
skit-ai/dialogy-template-simple-transformers
|
82748f49f63212ab578c199545122091d19e6939
|
[
"MIT"
] | 4
|
2021-03-10T08:34:27.000Z
|
2021-08-04T06:21:35.000Z
|
import functools
from slu import constants as const
def task_guard(func):
def wrapper(self, task_name: str, *args, **kwargs):
supported_tasks = {const.CLASSIFICATION, const.NER}
if task_name not in supported_tasks:
raise ValueError(f"Task should be one of {supported_tasks}")
use_task = self.task_by_name(task_name).use
if use_task:
value = func(self, task_name, *args, **kwargs)
return value
else:
return None
return wrapper
| 24.045455
| 72
| 0.63138
|
a3f185ca9131bda81241d1bd083349fd53ba3374
| 4,296
|
py
|
Python
|
virtac/mirror_objects.py
|
dls-controls/atip
|
7d3f79a9ca23abb94b5348142aa0d7f8d6094ca3
|
[
"Apache-2.0"
] | null | null | null |
virtac/mirror_objects.py
|
dls-controls/atip
|
7d3f79a9ca23abb94b5348142aa0d7f8d6094ca3
|
[
"Apache-2.0"
] | 14
|
2019-06-10T17:23:37.000Z
|
2021-09-20T13:09:14.000Z
|
virtac/mirror_objects.py
|
dls-controls/atip
|
7d3f79a9ca23abb94b5348142aa0d7f8d6094ca3
|
[
"Apache-2.0"
] | 1
|
2019-06-10T12:14:14.000Z
|
2019-06-10T12:14:14.000Z
|
import numpy
class summate(object):
"""This class is designed to be passed instead of a mirror record, when its
set method is then called it takes the sum of all the input records and
sets it to the output record.
"""
def __init__(self, input_records, output_record):
"""
Args:
input_records (list): A list of records to take values from.
output_record (pythonSoftIoc.RecordWrapper): The record to set the
sum to.
"""
self.input_records = input_records
self.output_record = output_record
self.name = output_record.name
def set(self, value=None):
"""An imitation of the set method of Soft-IOC records, that sums the
values of the held input records and then sets it to the output record.
N.B. The inital value passed by the call is discarded.
"""
value = sum([record.get() for record in self.input_records])
self.output_record.set(value)
class collate(object):
"""This class is designed to be passed instead of a mirror record, when its
set method is then called it gets the values of all the input records and
combines them in order before setting the combined array to the output
waveform record.
"""
def __init__(self, input_records, output_record):
"""
Args:
input_records (list): A list of records to take values from.
output_record (pythonSoftIoc.RecordWrapper): The record to set the
combined array to.
"""
self.input_records = input_records
self.output_record = output_record
self.name = output_record.name
def set(self, value=None):
"""An imitation of the set method of Soft-IOC records, that combines
the values of the held input records and then sets the resulting array
to the held output record.
N.B. The inital value passed by the call is discarded.
"""
value = numpy.array([record.get() for record in self.input_records])
self.output_record.set(value)
class transform(object):
"""This class is designed to be passed instead of a mirror record, when its
set method is then called it applies the held transformation and then sets
the new value to the held output record.
"""
def __init__(self, transformation, output_record):
"""
Args:
transformation (callable): The transformation to be applied.
output_record (pythonSoftIoc.RecordWrapper): The record to set the
transformed value to.
"""
if not callable(transformation):
raise TypeError(
"Transformation should be a callable, {0} is not.".format(
transformation
)
)
self.output_record = output_record
self.transformation = transformation
self.name = output_record.name
def set(self, value):
"""An imitation of the set method of Soft-IOC records, that applies a
transformation to the value before setting it to the output record.
"""
value = numpy.asarray(value, dtype=bool)
value = numpy.asarray(self.transformation(value), dtype=int)
self.output_record.set(value)
class refresher(object):
"""This class is designed to be passed instead of a mirror record, when its
set method is then called it refreshes the held PV on the held server.
"""
def __init__(self, server, output_pv):
"""
Args:
server (atip_server.ATIPServer): The server object on which to
refresh the PV.
output_pv (str): The name of the record to refresh.
"""
self.server = server
self.output_pv = output_pv
self.name = output_pv + ":REFRESH"
def set(self, value=None):
"""An imitation of the set method of Soft-IOC records, that refreshes
the held output records.
N.B. The inital value passed by the call is discarded.
"""
self.server.refresh_record(self.output_pv)
| 38.357143
| 79
| 0.612197
|
e33ec8f606c139beab30fbe3c3d90983dff476c4
| 5,711
|
py
|
Python
|
siuba/sql/dialects/sqlite.py
|
Techzune/siuba
|
575bffe016c40ccd4045d800f1c542e43a77aa50
|
[
"MIT"
] | null | null | null |
siuba/sql/dialects/sqlite.py
|
Techzune/siuba
|
575bffe016c40ccd4045d800f1c542e43a77aa50
|
[
"MIT"
] | null | null | null |
siuba/sql/dialects/sqlite.py
|
Techzune/siuba
|
575bffe016c40ccd4045d800f1c542e43a77aa50
|
[
"MIT"
] | null | null | null |
# sqlvariant, allow defining 3 namespaces to override defaults
from ..translate import (
SqlColumn, SqlColumnAgg, extend_base,
SqlTranslator,
sql_not_impl,
win_cumul,
win_agg,
annotate,
wrap_annotate
)
from .base import base_nowin
#from .postgresql import PostgresqlColumn as SqlColumn, PostgresqlColumnAgg as SqlColumnAgg
from . import _dt_generics as _dt
import sqlalchemy.sql.sqltypes as sa_types
from sqlalchemy import sql
from sqlalchemy.sql import func as fn
# Custom dispatching in call trees ============================================
# Note that aggs do not inherit SqlColumnAgg, since we disable aggregate functions
# for sqlite. Could add them in, as recent versions support a wide range of aggs.
class SqliteColumn(SqlColumn): pass
class SqliteColumnAgg(SqlColumnAgg, SqliteColumn): pass
# Translations ================================================================
# fix some annotations --------------------------------------------------------
# Note this is taken from the postgres dialect, but it seems that there are 2 key points
# compared to postgresql, which always returns a float
# * sqlite date parts are returned as floats
# * sqlite time parts are returned as integers
def returns_float(func_names):
# TODO: MC-NOTE - shift all translations to directly register
# TODO: MC-NOTE - make an AliasAnnotated class or something, that signals
# it is using another method, but w/ an updated annotation.
from siuba.ops import ALL_OPS
for name in func_names:
generic = ALL_OPS[name]
f_concrete = generic.dispatch(SqlColumn)
f_annotated = wrap_annotate(f_concrete, result_type="float")
generic.register(SqliteColumn, f_annotated)
# detect first and last date (similar to the mysql dialect) -------------------
@annotate(return_type="float")
def sql_extract(name):
if name == "quarter":
# division in sqlite automatically rounds down
# so for jan, 1 + 2 = 3, and 3 / 1 is Q1
return lambda _, col: (fn.strftime("%m", col) + 2) / 3
return lambda _, col: fn.extract(name, col)
@_dt.sql_is_last_day_of.register
def _sql_is_last_day_of(codata: SqliteColumn, col, period):
valid_periods = {"month", "year"}
if period not in valid_periods:
raise ValueError(f"Period must be one of {valid_periods}")
incr = f"+1 {period}"
target_date = fn.date(col, f'start of {period}', incr, "-1 day")
return col == target_date
@_dt.sql_is_first_day_of.register
def _sql_is_first_day_of(codata: SqliteColumn, col, period):
valid_periods = {"month", "year"}
if period not in valid_periods:
raise ValueError(f"Period must be one of {valid_periods}")
target_date = fn.date(col, f'start of {period}')
return fn.date(col) == target_date
# date part of period calculations --------------------------------------------
def sql_days_in_month(_, col):
date_last_day = fn.date(col, 'start of month', '+1 month', '-1 day')
return fn.strftime("%d", date_last_day).cast(sa_types.Integer())
def sql_week_of_year(_, col):
# convert sqlite week to ISO week
# adapted from: https://stackoverflow.com/a/15511864
iso_dow = (fn.strftime("%j", fn.date(col, "-3 days", "weekday 4")) - 1)
return (iso_dow / 7) + 1
# misc ------------------------------------------------------------------------
@annotate(result_type = "float")
def sql_round(_, col, n):
return sql.func.round(col, n)
def sql_func_truediv(_, x, y):
return sql.cast(x, sa_types.Float()) / y
def between(_, col, x, y):
res = col.between(x, y)
# tell sqlalchemy the result is a boolean. this causes it to be correctly
# converted from an integer to bool when the results are collected.
# note that this is consistent with what col == col returns
res.type = sa_types.Boolean()
return res
def sql_str_capitalize(_, col):
# capitalize first letter, then concatenate with lowercased rest
first_upper = fn.upper(fn.substr(col, 1, 1))
rest_lower = fn.lower(fn.substr(col, 2))
return first_upper.concat(rest_lower)
extend_base(
SqliteColumn,
between = between,
clip = sql_not_impl("sqlite does not have a least or greatest function."),
div = sql_func_truediv,
divide = sql_func_truediv,
rdiv = lambda _, x,y: sql_func_truediv(_, y, x),
__truediv__ = sql_func_truediv,
truediv = sql_func_truediv,
__rtruediv__ = lambda _, x, y: sql_func_truediv(_, y, x),
round = sql_round,
__round__ = sql_round,
**{
"str.title": sql_not_impl("TODO"),
"str.capitalize": sql_str_capitalize,
},
**{
"dt.quarter": sql_extract("quarter"),
"dt.is_quarter_start": sql_not_impl("TODO"),
"dt.is_quarter_end": sql_not_impl("TODO"),
"dt.days_in_month": sql_days_in_month,
"dt.daysinmonth": sql_days_in_month,
"dt.week": sql_week_of_year,
"dt.weekofyear": sql_week_of_year,
}
)
returns_float([
"dt.dayofweek",
"dt.weekday",
])
extend_base(
SqliteColumn,
# TODO: should check sqlite version, since < 3.25 can't use windows
cumsum = win_cumul("sum"),
quantile = sql_not_impl("sqlite does not support ordered set aggregates"),
sum = win_agg("sum"),
)
extend_base(
SqliteColumnAgg,
quantile = sql_not_impl("sqlite does not support ordered set aggregates"),
)
translator = SqlTranslator.from_mappings(
SqliteColumn, SqliteColumnAgg
)
| 31.727778
| 91
| 0.629312
|
dd910dee5d666326c88ae6f4e84a202cdb23ec49
| 1,355
|
py
|
Python
|
tests/hazmat/primitives/test_constant_time.py
|
glyph/cryptography
|
43cf688e885668198bc966b1cf3a4a425a60f1a6
|
[
"Apache-2.0"
] | 1
|
2020-08-26T01:36:57.000Z
|
2020-08-26T01:36:57.000Z
|
tests/hazmat/primitives/test_constant_time.py
|
Lukasa/cryptography
|
9341ddca88aebdd04f07adfefaa2ef8133ce2bca
|
[
"Apache-2.0"
] | 4
|
2021-03-22T02:00:19.000Z
|
2021-04-07T07:40:19.000Z
|
tests/hazmat/primitives/test_constant_time.py
|
majacQ/cryptography
|
add8bec357f09aba6609af16577111addec07ef7
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import pytest
import six
from cryptography.hazmat.primitives import constant_time
class TestConstantTimeBytesEq(object):
def test_reject_unicode(self):
with pytest.raises(TypeError):
constant_time.bytes_eq(b"foo", six.u("foo"))
with pytest.raises(TypeError):
constant_time.bytes_eq(six.u("foo"), b"foo")
with pytest.raises(TypeError):
constant_time.bytes_eq(six.u("foo"), six.u("foo"))
def test_compares(self):
assert constant_time.bytes_eq(b"foo", b"foo") is True
assert constant_time.bytes_eq(b"foo", b"bar") is False
assert constant_time.bytes_eq(b"foobar", b"foo") is False
assert constant_time.bytes_eq(b"foo", b"foobar") is False
| 32.261905
| 69
| 0.717343
|
7e84d63c45f9b78429a504a9f8a2e1d84f1c1621
| 6,233
|
py
|
Python
|
deploy_config_generator/template.py
|
ApplauseAQI/applause-deploy-config-generator
|
46f957fbfe991677f920d5db74b0670385b6e505
|
[
"MIT"
] | null | null | null |
deploy_config_generator/template.py
|
ApplauseAQI/applause-deploy-config-generator
|
46f957fbfe991677f920d5db74b0670385b6e505
|
[
"MIT"
] | null | null | null |
deploy_config_generator/template.py
|
ApplauseAQI/applause-deploy-config-generator
|
46f957fbfe991677f920d5db74b0670385b6e505
|
[
"MIT"
] | null | null | null |
import jinja2
import json
import re
from deploy_config_generator.errors import TemplateUndefinedError
OMIT_TOKEN = '__OMIT__TOKEN__'
class UnsafeText(str):
__UNSAFE__ = True
# The name of the decorator changed in 3.x, so this allows us to support both
if hasattr(jinja2, 'contextfunction'):
jinja2_contextfunction = jinja2.contextfunction
elif hasattr(jinja2, 'pass_context'):
jinja2_contextfunction = jinja2.pass_context
else:
raise Exception('could not determine Jinja2 context decorator')
class Template(object):
def __init__(self, recursive=True, default_vars=None):
# Whether to recursively resolve vars
self._recursive = recursive
# Default vars
self._default_vars = default_vars
# Setup custom Jinja2 Environment instance with our own 'finalize' function,
# filters, and top-level functions. We use StrictUndefined to raise an exception
# when accessing an undefined var, so that we can report it to the user
self._env = jinja2.Environment(finalize=self.finalize, undefined=jinja2.StrictUndefined, keep_trailing_newline=True)
self._env.filters.update(FILTERS)
self._env.globals.update(GLOBALS)
@jinja2_contextfunction
def finalize(self, context, value):
'''
This function is called on rendered vars before outputting them. This allows
us to do recursive templating of vars (vars referencing other vars)
'''
# If the value appears to contain a template, render it and return the result
if self._recursive and isinstance(value, str):
if '{{' in value or '{%' in value:
return context.environment.from_string(value).render(context)
return value
def type_fixup(self, value):
'''
This function looks for a type header/footer (as added by the various output_*
Jinja filters) and converts as necessary
'''
if isinstance(value, str):
# This regex looks for a value like '__int__whatever__int__' and captures
# the value in the middle
matches = re.match(r'^__(?P<type>[a-z]+)__(.*)__(?P=type)__$', value)
if matches:
value_type = matches.group(1)
if value_type == 'int':
return int(matches.group(2))
if value_type == 'float':
return float(matches.group(2))
if value_type == 'bool':
if matches.group(2).lower() == 'true':
return True
return False
if value_type == 'complex':
# Parse python complex type from serialized format
return eval(matches.group(2))
return value
def render_template(self, template, args=None):
'''
This function will recursively render templates in strings, dicts, and lists
'''
if args is None:
args = self._default_vars
if isinstance(template, UnsafeText):
return template
if isinstance(template, dict):
ret = {}
for k, v in template.items():
v = self.type_fixup(self.render_template(v, args))
if v == OMIT_TOKEN:
continue
ret[k] = v
return ret
elif isinstance(template, (list, tuple)):
ret = []
for i, v in enumerate(template):
v = self.type_fixup(self.render_template(v, args))
if v == OMIT_TOKEN:
continue
ret.append(v)
return ret
elif isinstance(template, str):
try:
return self._env.from_string(template).render(**args)
except jinja2.exceptions.UndefinedError as e:
raise TemplateUndefinedError('undefined value: %s in template: %s' % (str(e), template))
else:
return template
def evaluate_condition(self, condition, tmp_vars):
'''
This function uses Jinja to evaluate a conditional statement
'''
ret = self._env.from_string('{% if ' + condition + ' %}True{% else %}False{% endif %}').render(**tmp_vars)
if ret == 'True':
return True
return False
def filter_output_int(arg):
return '__int__' + str(arg) + '__int__'
def filter_output_float(arg):
return '__float__' + str(arg) + '__float__'
def filter_output_bool(arg):
return '__bool__' + str(arg) + '__bool__'
def filter_output_complex(arg):
return '__complex__' + str(arg) + '__complex__'
def filter_to_json(arg, **args):
return json.dumps(arg, sort_keys=True, **args)
def filter_to_nice_json(arg, indent=2, prefix_indent=None, **args):
out = filter_to_json(arg, indent=indent, **args)
# Add extra indentation to all lines to account for being embedded in a
# larger JSON document
if prefix_indent:
out = '\n'.join([ (' ' * prefix_indent) + line for line in out.split('\n') ])
return out
def filter_default(arg, default):
'''
Custom version of default() filter that also returns the default when arg
is None, in addition to when arg is undefined
'''
if arg is None or isinstance(arg, (jinja2.Undefined, jinja2.StrictUndefined)):
return default
return arg
def filter_regex_replace(arg, pattern, replacement):
return re.sub(pattern, replacement, str(arg))
@jinja2_contextfunction
def evaluate_condition(context, condition, **kwargs):
tmp_vars = context.get_all()
tmp_vars.update(kwargs)
ret = context.environment.from_string('{% if ' + condition + ' %}True{% else %}False{% endif %}').render(**tmp_vars)
if ret == 'True':
return True
return False
FILTERS = {
'output_int': filter_output_int,
'output_float': filter_output_float,
'output_bool': filter_output_bool,
'output_complex': filter_output_complex,
'to_json': filter_to_json,
'to_nice_json': filter_to_nice_json,
'default': filter_default,
'regex_replace': filter_regex_replace,
}
GLOBALS = {
'evaluate_condition': evaluate_condition,
'omit': OMIT_TOKEN,
}
| 33.691892
| 124
| 0.623937
|
cec3e744b9011bded06e5803982058a5cc76e1c7
| 120
|
py
|
Python
|
funboost/factories/__init__.py
|
DJMIN/funboost
|
7570ca2909bb0b44a1080f5f98aa96c86d3da9d4
|
[
"Apache-2.0"
] | 333
|
2019-08-08T10:25:27.000Z
|
2022-03-30T07:32:04.000Z
|
funboost/factories/__init__.py
|
mooti-barry/funboost
|
2cd9530e2c4e5a52fc921070d243d402adbc3a0e
|
[
"Apache-2.0"
] | 38
|
2020-04-24T01:47:51.000Z
|
2021-12-20T07:22:15.000Z
|
funboost/factories/__init__.py
|
mooti-barry/funboost
|
2cd9530e2c4e5a52fc921070d243d402adbc3a0e
|
[
"Apache-2.0"
] | 84
|
2019-08-09T11:51:14.000Z
|
2022-03-02T06:29:09.000Z
|
# -*- coding: utf-8 -*-
# @Author : ydf
# @Time : 2019/8/8 0008 13:17
"""
工厂模式,通过broker_kind来生成不同中间件类型的消费者和发布者。
"""
| 17.142857
| 37
| 0.6
|
bb95cf8c5f03b060cb9c48e709808c33c92016ac
| 2,438
|
py
|
Python
|
Python OOP Retake Exam - 19 Dec 2020/problem1/bunker.py
|
DiyanKalaydzhiev23/OOP---Python
|
7ac424d5fb08a6bd28dc36593e45d949b3ac0cd0
|
[
"MIT"
] | null | null | null |
Python OOP Retake Exam - 19 Dec 2020/problem1/bunker.py
|
DiyanKalaydzhiev23/OOP---Python
|
7ac424d5fb08a6bd28dc36593e45d949b3ac0cd0
|
[
"MIT"
] | null | null | null |
Python OOP Retake Exam - 19 Dec 2020/problem1/bunker.py
|
DiyanKalaydzhiev23/OOP---Python
|
7ac424d5fb08a6bd28dc36593e45d949b3ac0cd0
|
[
"MIT"
] | null | null | null |
class Bunker:
def __init__(self):
self.survivors = []
self.supplies = []
self.medicine = []
@property
def food(self):
food_supplies = [f for f in self.supplies if f.__name__ == "FoodSupply"]
if not food_supplies:
raise IndexError("There are no food supplies left!")
return food_supplies
@property
def water(self):
water_supplies = [w for w in self.supplies if w.__name__ == "WaterSupply"]
if not water_supplies:
raise IndexError("There are no water supplies left!")
return water_supplies
@property
def painkillers(self):
painkillers_supplies = [p for p in self.medicine if p.__name__ == "Painkillers"]
if not painkillers_supplies:
raise IndexError("There are no painkillers left!")
return painkillers_supplies
@property
def salves(self):
salves_supplies = [s for s in self.medicine if s.__name__ == "Salves"]
if not salves_supplies:
raise IndexError("There are no salves left!")
return salves_supplies
def add_survivor(self, survivor):
if survivor in self.survivors:
raise ValueError(f"Survivor with name {survivor.name} already exists.")
self.survivors.append(survivor)
def add_supply(self, supply):
self.supplies.append(supply)
def add_medicine(self, medicine):
self.medicine.append(medicine)
def heal(self, survivor, medicine_type):
if survivor.needs_healing:
for med in self.medicine[::-1]:
if type(med).__name__ == medicine_type:
med.apply(survivor)
self.medicine.remove(med)
return f"{survivor.name} healed successfully with {medicine_type}"
def sustain(self, survivor, sustenance_type):
if survivor.needs_sustenance:
for sup in self.supplies[::-1]:
if type(sup).__name__ == sustenance_type:
sup.apply(survivor)
self.supplies.remove(sup)
return f"{survivor.name} sustained successfully with {sustenance_type}"
def next_day(self):
for survivor in self.survivors:
survivor.needs -= survivor.age * 2
for survivor in self.survivors:
self.sustain(survivor, "FoodSupply")
self.sustain(survivor, "WaterSupply")
| 34.828571
| 91
| 0.611567
|
956c3546b55ad7f90fdd723d12e32d27ed3536e7
| 1,190
|
py
|
Python
|
pylearn2/sandbox/rnn/scripts/wordModel.py
|
capybaralet/pylearn2
|
78d78a99bc6114616a22a4d798c7fedfe4489c58
|
[
"BSD-3-Clause"
] | null | null | null |
pylearn2/sandbox/rnn/scripts/wordModel.py
|
capybaralet/pylearn2
|
78d78a99bc6114616a22a4d798c7fedfe4489c58
|
[
"BSD-3-Clause"
] | null | null | null |
pylearn2/sandbox/rnn/scripts/wordModel.py
|
capybaralet/pylearn2
|
78d78a99bc6114616a22a4d798c7fedfe4489c58
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import theano as t
from scipy.spatial.distance import cosine
class WordModel():
def __init__(self, model, word_dict, embeddings):
self.embeddings = embeddings
self.word_dict = word_dict
self.iword_dict = {v:k for k,v in word_dict.iteritems()}
def closest(self, vec, n):
words_ = []
dists = [(cosine(vec, self.embeddings[i]), i) for i in range(30000)]
for k in range(n):
index = min(dists)[1]
dists[index] = (float("inf"),index)
words_.append(index)
return words_
def findClose(self, wordvec):
indices = self.closest(wordvec, 15)
close = [self.makeWord(i) for i in indices]
return close
def runIndex(self, i):
return self.embeddings[i]
def runString(self, string):
return self.runIndex(self.word_dict[string])
def displayStringRun(self,word):
close = self.findClose(self.runString(word))
print word, ":", close
def displayIndexRun(self, index):
close = self.findClose(self.runIndex(index))
print self.makeWord(index), ":", close
def makeWord(self, i):
w = self.iword_dict[i]
return w
| 27.674419
| 74
| 0.626891
|
fe6d2d3abe42abbef05b826d85eb3561665fa27a
| 2,378
|
py
|
Python
|
src/swarm_rescue/solutions/my_drone_random.py
|
filliat/swarm-rescue
|
ce323c90ccb4a25216c63abccd09d3f31eea1189
|
[
"MIT"
] | 5
|
2021-11-11T12:19:00.000Z
|
2022-03-09T08:47:37.000Z
|
src/swarm_rescue/solutions/my_drone_random.py
|
filliat/swarm-rescue
|
ce323c90ccb4a25216c63abccd09d3f31eea1189
|
[
"MIT"
] | 4
|
2021-11-06T17:19:11.000Z
|
2022-02-16T11:46:54.000Z
|
src/swarm_rescue/solutions/my_drone_random.py
|
filliat/swarm-rescue
|
ce323c90ccb4a25216c63abccd09d3f31eea1189
|
[
"MIT"
] | 3
|
2021-11-10T17:14:55.000Z
|
2021-11-22T16:32:17.000Z
|
"""
Simple random controller
The Drone will move forward and turn for a random angle when an obstacle is hit
"""
import random
import math
from typing import Optional
from spg_overlay.drone_abstract import DroneAbstract
from spg_overlay.misc_data import MiscData
from spg_overlay.utils import normalize_angle
class MyDroneRandom(DroneAbstract):
def __init__(self,
identifier: Optional[int] = None,
misc_data: Optional[MiscData] = None,
**kwargs):
super().__init__(identifier=identifier,
misc_data=misc_data,
should_display_lidar=False,
**kwargs)
self.counterStraight = 0
self.angleStopTurning = 0
self.isTurning = False
def define_message(self):
"""
Here, we don't need communication...
"""
pass
def process_touch_sensor(self):
"""
Returns True if the drone hits an obstacle
"""
touched = False
detection = max(self.touch().sensor_values)
if detection > 0.5:
touched = True
return touched
def control(self):
"""
The Drone will move forward and turn for a random angle when an obstacle is hit
"""
command_straight = {self.longitudinal_force: 1.0,
self.lateral_force: 0.0,
self.rotation_velocity: 0.0,
self.grasp: 0,
self.activate: 0}
command_turn = {self.longitudinal_force: 0.0,
self.lateral_force: 0.0,
self.rotation_velocity: 1.0,
self.grasp: 0,
self.activate: 0}
touched = self.process_touch_sensor()
self.counterStraight += 1
if touched and not self.isTurning and self.counterStraight > 10:
self.isTurning = True
self.angleStopTurning = random.uniform(-math.pi, math.pi)
diff_angle = normalize_angle(self.angleStopTurning - self.measured_angle())
if self.isTurning and abs(diff_angle) < 0.2:
self.isTurning = False
self.counterStraight = 0
if self.isTurning:
return command_turn
else:
return command_straight
| 30.101266
| 87
| 0.565601
|
1d828556d4934bfd395252f23350666340286512
| 1,012
|
py
|
Python
|
Lib/site-packages/asn1crypto/_errors.py
|
ldepaula3/TextAnalyticsApp
|
cd87f2017cf301266a82355d4c781de67b9c6ac9
|
[
"bzip2-1.0.6"
] | null | null | null |
Lib/site-packages/asn1crypto/_errors.py
|
ldepaula3/TextAnalyticsApp
|
cd87f2017cf301266a82355d4c781de67b9c6ac9
|
[
"bzip2-1.0.6"
] | null | null | null |
Lib/site-packages/asn1crypto/_errors.py
|
ldepaula3/TextAnalyticsApp
|
cd87f2017cf301266a82355d4c781de67b9c6ac9
|
[
"bzip2-1.0.6"
] | null | null | null |
# coding: utf-8
"""
Helper for formatting exception messages. Exports the following items:
- unwrap()
"""
from __future__ import unicode_literals, division, absolute_import, print_function
import re
import textwrap
def unwrap(string, *params):
"""
Takes a multi-line string and does the following:
- dedents
- converts newlines with text before and after into a single line
- strips leading and trailing whitespace
:param string:
The string to format
:param *params:
Params to interpolate into the string
:return:
The formatted string
"""
output = textwrap.dedent(string)
# Unwrap lines, taking into account bulleted lists, ordered lists and
# underlines consisting of = signs
if output.find('\n') != -1:
output = re.sub('(?<=\\S)\n(?=[^ \n\t\\d\\*\\-=])', ' ', output)
if params:
output = output % params
output = output.strip()
return output
| 22
| 83
| 0.613636
|
130aa8597f404e035a48ce476abcfed0fb45a7dd
| 456
|
py
|
Python
|
plotly/validators/layout/angularaxis/_endpadding.py
|
faezs/plotly.py
|
6009b5b9c746e5d2a2849ad255a4eb234b551ed7
|
[
"MIT"
] | 2
|
2020-03-24T11:41:14.000Z
|
2021-01-14T07:59:43.000Z
|
plotly/validators/layout/angularaxis/_endpadding.py
|
faezs/plotly.py
|
6009b5b9c746e5d2a2849ad255a4eb234b551ed7
|
[
"MIT"
] | null | null | null |
plotly/validators/layout/angularaxis/_endpadding.py
|
faezs/plotly.py
|
6009b5b9c746e5d2a2849ad255a4eb234b551ed7
|
[
"MIT"
] | 4
|
2019-06-03T14:49:12.000Z
|
2022-01-06T01:05:12.000Z
|
import _plotly_utils.basevalidators
class EndpaddingValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name='endpadding',
parent_name='layout.angularaxis',
**kwargs
):
super(EndpaddingValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type='plot',
role='style',
**kwargs
)
| 24
| 72
| 0.607456
|
d4e0c4b0a4ea353d3e07b393f58d2cb651c89e55
| 1,907
|
py
|
Python
|
juriscraper/opinions/united_states/state/me.py
|
mmantel/juriscraper
|
a6e4b8cfb6f69d728196664a9a47e1a0cf3e3d25
|
[
"BSD-2-Clause"
] | null | null | null |
juriscraper/opinions/united_states/state/me.py
|
mmantel/juriscraper
|
a6e4b8cfb6f69d728196664a9a47e1a0cf3e3d25
|
[
"BSD-2-Clause"
] | null | null | null |
juriscraper/opinions/united_states/state/me.py
|
mmantel/juriscraper
|
a6e4b8cfb6f69d728196664a9a47e1a0cf3e3d25
|
[
"BSD-2-Clause"
] | null | null | null |
"""Scraper for Supreme Court of Maine
CourtID: me
Court Short Name: Me.
Author: Brian W. Carver
Date created: June 20, 2014
History:
2014-06-25 (est): Added code for additional date formats.
2014-07-02: Was receiving InsanityException and tweaked date code to get some
missing dates.
2014-12-15: Fixes insanity exception by tweaking the XPaths.
"""
from lxml import html
from juriscraper.lib.string_utils import convert_date_string
from juriscraper.OpinionSite import OpinionSite
class Site(OpinionSite):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.court_id = self.__module__
self.url = "https://www.courts.maine.gov/courts/sjc/opinions.html"
self.path_root = '//table[contains(.//th[1], "Opinion")]'
def _get_cell_path(self, cell_number: int, subpath: str = "") -> str:
path = '//table[contains(.//th[1], "Opinion")]//td[%d]'
return f"{path}/{subpath}" if subpath else path
def _get_download_urls(self):
path = f"{self.path_root}//td[2]/a[1]/@href"
return list(self.html.xpath(path))
def _get_case_names(self):
case_names = []
path = f"{self.path_root}//td[2]/a[1]"
for e in self.html.xpath(path):
s = html.tostring(e, method="text", encoding="unicode")
case_names.append(s)
return case_names
def _get_case_dates(self):
dates = []
path = f"{self.path_root}//td[3]"
for cell in self.html.xpath(path):
date_string = cell.text_content().replace("Aguust", "August")
dates.append(convert_date_string(date_string))
return dates
def _get_precedential_statuses(self):
return ["Published"] * len(self.case_names)
def _get_neutral_citations(self):
path = f"{self.path_root}//td[1]//text()"
return list(self.html.xpath(path))
| 33.45614
| 79
| 0.64237
|
31191a14afeac5dbe1ef211cc9e8baec2fbeb83f
| 3,107
|
py
|
Python
|
Python/FaceRecognition.py
|
vermayash7980/Hacktoberfest2021
|
66e190608c5e3f9ad983ba8f707e499ca5bc6da0
|
[
"MIT"
] | 39
|
2021-10-03T05:40:26.000Z
|
2021-10-31T18:09:23.000Z
|
Python/FaceRecognition.py
|
vermayash7980/Hacktoberfest2021
|
66e190608c5e3f9ad983ba8f707e499ca5bc6da0
|
[
"MIT"
] | 26
|
2021-10-03T04:50:47.000Z
|
2021-10-16T07:39:22.000Z
|
Python/FaceRecognition.py
|
vermayash7980/Hacktoberfest2021
|
66e190608c5e3f9ad983ba8f707e499ca5bc6da0
|
[
"MIT"
] | 215
|
2021-10-03T04:35:47.000Z
|
2021-10-31T17:37:42.000Z
|
import face_recognition
import cv2
import numpy as np
# Get a reference to webcam #0 (the default one)
video_capture = cv2.VideoCapture(0)
# Load a sample picture and learn how to recognize it.
j_image = face_recognition.load_image_file("add_path_to_your_image")
j_face_encoding = face_recognition.face_encodings(j_image)[0]
# Create arrays of known face encodings and their names
known_face_encodings = [
j_face_encoding,
]
known_face_names = [
"JackJJ"
]
# Initialize some variables
face_locations = []
face_encodings = []
face_names = []
process_this_frame = True
while True:
# Grab a single frame of video
ret, frame = video_capture.read()
# Resize frame of video to 1/4 size for faster face recognition processing
small_frame = cv2.resize(frame, (0, 0), fx=0.25, fy=0.25)
# Convert the image from BGR color (which OpenCV uses) to RGB color (which face_recognition uses)
rgb_small_frame = small_frame[:, :, ::-1]
# Only process every other frame of video to save time
if process_this_frame:
# Find all the faces and face encodings in the current frame of video
face_locations = face_recognition.face_locations(rgb_small_frame)
face_encodings = face_recognition.face_encodings(rgb_small_frame, face_locations)
face_names = []
for face_encoding in face_encodings:
# See if the face is a match for the known face(s)
matches = face_recognition.compare_faces(known_face_encodings, face_encoding)
name = "Not JackJJ"
# # If a match was found in known_face_encodings, just use the first one.
# if True in matches:
# first_match_index = matches.index(True)
# name = known_face_names[first_match_index]
# Or instead, use the known face with the smallest distance to the new face
face_distances = face_recognition.face_distance(known_face_encodings, face_encoding)
best_match_index = np.argmin(face_distances)
if matches[best_match_index]:
name = known_face_names[best_match_index]
face_names.append(name)
process_this_frame = not process_this_frame
# Display the results
for (top, right, bottom, left), name in zip(face_locations, face_names):
# Scale back up face locations since the frame we detected in was scaled to 1/4 size
top *= 4
right *= 4
bottom *= 4
left *= 4
# Draw a box around the face
cv2.rectangle(frame, (left, top), (right, bottom), (0, 0, 255), 2)
# Draw a label with a name below the face
cv2.rectangle(frame, (left, bottom - 35), (right, bottom), (0, 0, 255), cv2.FILLED)
font = cv2.FONT_HERSHEY_DUPLEX
cv2.putText(frame, name, (left + 6, bottom - 6), font, 1.0, (255, 255, 255), 1)
# Display the resulting image
cv2.imshow('Video', frame)
# Hit 'q' on the keyboard to quit!
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# Release handle to the webcam
video_capture.release()
cv2.destroyAllWindows()
| 34.142857
| 101
| 0.671709
|
be4b22ea4b1403a01a9c2e52b5a7f1d21edd9f8e
| 16,935
|
py
|
Python
|
magenta/music/midi_io_test.py
|
fanzhiyan/magenta
|
622c47c19bb84c6f57b286ed03b738516b2f27d6
|
[
"Apache-2.0"
] | 2
|
2019-10-19T00:21:16.000Z
|
2019-10-19T00:21:36.000Z
|
magenta/music/midi_io_test.py
|
fanzhiyan/magenta
|
622c47c19bb84c6f57b286ed03b738516b2f27d6
|
[
"Apache-2.0"
] | 1
|
2019-09-29T22:41:54.000Z
|
2019-09-29T22:41:54.000Z
|
magenta/music/midi_io_test.py
|
fanzhiyan/magenta
|
622c47c19bb84c6f57b286ed03b738516b2f27d6
|
[
"Apache-2.0"
] | 1
|
2019-10-05T02:04:09.000Z
|
2019-10-05T02:04:09.000Z
|
# Copyright 2019 The Magenta Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test to ensure correct midi input and output."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import os.path
import tempfile
from magenta.music import constants
from magenta.music import midi_io
from magenta.protobuf import music_pb2
import mido
import pretty_midi
import tensorflow as tf
# self.midi_simple_filename contains a c-major scale of 8 quarter notes each
# with a sustain of .95 of the entire note. Here are the first two notes dumped
# using mididump.py:
# midi.NoteOnEvent(tick=0, channel=0, data=[60, 100]),
# midi.NoteOnEvent(tick=209, channel=0, data=[60, 0]),
# midi.NoteOnEvent(tick=11, channel=0, data=[62, 100]),
# midi.NoteOnEvent(tick=209, channel=0, data=[62, 0]),
_SIMPLE_MIDI_FILE_VELO = 100
_SIMPLE_MIDI_FILE_NUM_NOTES = 8
_SIMPLE_MIDI_FILE_SUSTAIN = .95
# self.midi_complex_filename contains many instruments including percussion as
# well as control change and pitch bend events.
# self.midi_is_drum_filename contains 41 tracks, two of which are on channel 9.
# self.midi_event_order_filename contains notes ordered
# non-monotonically by pitch. Here are relevent events as printed by
# mididump.py:
# midi.NoteOnEvent(tick=0, channel=0, data=[1, 100]),
# midi.NoteOnEvent(tick=0, channel=0, data=[3, 100]),
# midi.NoteOnEvent(tick=0, channel=0, data=[2, 100]),
# midi.NoteOnEvent(tick=4400, channel=0, data=[3, 0]),
# midi.NoteOnEvent(tick=0, channel=0, data=[1, 0]),
# midi.NoteOnEvent(tick=0, channel=0, data=[2, 0]),
class MidiIoTest(tf.test.TestCase):
def setUp(self):
self.midi_simple_filename = os.path.join(
tf.resource_loader.get_data_files_path(), '../testdata/example.mid')
self.midi_complex_filename = os.path.join(
tf.resource_loader.get_data_files_path(),
'../testdata/example_complex.mid')
self.midi_is_drum_filename = os.path.join(
tf.resource_loader.get_data_files_path(),
'../testdata/example_is_drum.mid')
self.midi_event_order_filename = os.path.join(
tf.resource_loader.get_data_files_path(),
'../testdata/example_event_order.mid')
def CheckPrettyMidiAndSequence(self, midi, sequence_proto):
"""Compares PrettyMIDI object against a sequence proto.
Args:
midi: A pretty_midi.PrettyMIDI object.
sequence_proto: A tensorflow.magenta.Sequence proto.
"""
# Test time signature changes.
self.assertEqual(len(midi.time_signature_changes),
len(sequence_proto.time_signatures))
for midi_time, sequence_time in zip(midi.time_signature_changes,
sequence_proto.time_signatures):
self.assertEqual(midi_time.numerator, sequence_time.numerator)
self.assertEqual(midi_time.denominator, sequence_time.denominator)
self.assertAlmostEqual(midi_time.time, sequence_time.time)
# Test key signature changes.
self.assertEqual(len(midi.key_signature_changes),
len(sequence_proto.key_signatures))
for midi_key, sequence_key in zip(midi.key_signature_changes,
sequence_proto.key_signatures):
self.assertEqual(midi_key.key_number % 12, sequence_key.key)
self.assertEqual(midi_key.key_number // 12, sequence_key.mode)
self.assertAlmostEqual(midi_key.time, sequence_key.time)
# Test tempos.
midi_times, midi_qpms = midi.get_tempo_changes()
self.assertEqual(len(midi_times),
len(sequence_proto.tempos))
self.assertEqual(len(midi_qpms),
len(sequence_proto.tempos))
for midi_time, midi_qpm, sequence_tempo in zip(
midi_times, midi_qpms, sequence_proto.tempos):
self.assertAlmostEqual(midi_qpm, sequence_tempo.qpm)
self.assertAlmostEqual(midi_time, sequence_tempo.time)
# Test instruments.
seq_instruments = collections.defaultdict(
lambda: collections.defaultdict(list))
for seq_note in sequence_proto.notes:
seq_instruments[
(seq_note.instrument, seq_note.program, seq_note.is_drum)][
'notes'].append(seq_note)
for seq_bend in sequence_proto.pitch_bends:
seq_instruments[
(seq_bend.instrument, seq_bend.program, seq_bend.is_drum)][
'bends'].append(seq_bend)
for seq_control in sequence_proto.control_changes:
seq_instruments[
(seq_control.instrument, seq_control.program, seq_control.is_drum)][
'controls'].append(seq_control)
sorted_seq_instrument_keys = sorted(seq_instruments.keys())
if seq_instruments:
self.assertEqual(len(midi.instruments), len(seq_instruments))
else:
self.assertEqual(1, len(midi.instruments))
self.assertEqual(0, len(midi.instruments[0].notes))
self.assertEqual(0, len(midi.instruments[0].pitch_bends))
for midi_instrument, seq_instrument_key in zip(
midi.instruments, sorted_seq_instrument_keys):
seq_instrument_notes = seq_instruments[seq_instrument_key]['notes']
self.assertEqual(len(midi_instrument.notes), len(seq_instrument_notes))
for midi_note, sequence_note in zip(midi_instrument.notes,
seq_instrument_notes):
self.assertEqual(midi_note.pitch, sequence_note.pitch)
self.assertEqual(midi_note.velocity, sequence_note.velocity)
self.assertAlmostEqual(midi_note.start, sequence_note.start_time)
self.assertAlmostEqual(midi_note.end, sequence_note.end_time)
seq_instrument_pitch_bends = seq_instruments[seq_instrument_key]['bends']
self.assertEqual(len(midi_instrument.pitch_bends),
len(seq_instrument_pitch_bends))
for midi_pitch_bend, sequence_pitch_bend in zip(
midi_instrument.pitch_bends,
seq_instrument_pitch_bends):
self.assertEqual(midi_pitch_bend.pitch, sequence_pitch_bend.bend)
self.assertAlmostEqual(midi_pitch_bend.time, sequence_pitch_bend.time)
def CheckMidiToSequence(self, filename):
"""Test the translation from PrettyMIDI to Sequence proto."""
source_midi = pretty_midi.PrettyMIDI(filename)
sequence_proto = midi_io.midi_to_sequence_proto(source_midi)
self.CheckPrettyMidiAndSequence(source_midi, sequence_proto)
def CheckSequenceToPrettyMidi(self, filename):
"""Test the translation from Sequence proto to PrettyMIDI."""
source_midi = pretty_midi.PrettyMIDI(filename)
sequence_proto = midi_io.midi_to_sequence_proto(source_midi)
translated_midi = midi_io.sequence_proto_to_pretty_midi(sequence_proto)
self.CheckPrettyMidiAndSequence(translated_midi, sequence_proto)
def CheckReadWriteMidi(self, filename):
"""Test writing to a MIDI file and comparing it to the original Sequence."""
# TODO(deck): The input MIDI file is opened in pretty-midi and
# re-written to a temp file, sanitizing the MIDI data (reordering
# note ons, etc). Issue 85 in the pretty-midi GitHub
# (http://github.com/craffel/pretty-midi/issues/85) requests that
# this sanitization be available outside of the context of a file
# write. If that is implemented, this rewrite code should be
# modified or deleted.
# When writing to the temp file, use the file object itself instead of
# file.name to avoid the permission error on Windows.
with tempfile.NamedTemporaryFile(prefix='MidiIoTest') as rewrite_file:
original_midi = pretty_midi.PrettyMIDI(filename)
original_midi.write(rewrite_file) # Use file object
# Back the file position to top to reload the rewrite_file
rewrite_file.seek(0)
source_midi = pretty_midi.PrettyMIDI(rewrite_file) # Use file object
sequence_proto = midi_io.midi_to_sequence_proto(source_midi)
# Translate the NoteSequence to MIDI and write to a file.
with tempfile.NamedTemporaryFile(prefix='MidiIoTest') as temp_file:
midi_io.sequence_proto_to_midi_file(sequence_proto, temp_file.name)
# Read it back in and compare to source.
created_midi = pretty_midi.PrettyMIDI(temp_file) # Use file object
self.CheckPrettyMidiAndSequence(created_midi, sequence_proto)
def testSimplePrettyMidiToSequence(self):
self.CheckMidiToSequence(self.midi_simple_filename)
def testSimpleSequenceToPrettyMidi(self):
self.CheckSequenceToPrettyMidi(self.midi_simple_filename)
def testSimpleSequenceToPrettyMidi_DefaultTicksAndTempo(self):
source_midi = pretty_midi.PrettyMIDI(self.midi_simple_filename)
stripped_sequence_proto = midi_io.midi_to_sequence_proto(source_midi)
del stripped_sequence_proto.tempos[:]
stripped_sequence_proto.ClearField('ticks_per_quarter')
expected_sequence_proto = music_pb2.NoteSequence()
expected_sequence_proto.CopyFrom(stripped_sequence_proto)
expected_sequence_proto.tempos.add(
qpm=constants.DEFAULT_QUARTERS_PER_MINUTE)
expected_sequence_proto.ticks_per_quarter = constants.STANDARD_PPQ
translated_midi = midi_io.sequence_proto_to_pretty_midi(
stripped_sequence_proto)
self.CheckPrettyMidiAndSequence(translated_midi, expected_sequence_proto)
def testSimpleSequenceToPrettyMidi_MultipleTempos(self):
source_midi = pretty_midi.PrettyMIDI(self.midi_simple_filename)
multi_tempo_sequence_proto = midi_io.midi_to_sequence_proto(source_midi)
multi_tempo_sequence_proto.tempos.add(time=1.0, qpm=60)
multi_tempo_sequence_proto.tempos.add(time=2.0, qpm=120)
translated_midi = midi_io.sequence_proto_to_pretty_midi(
multi_tempo_sequence_proto)
self.CheckPrettyMidiAndSequence(translated_midi, multi_tempo_sequence_proto)
def testSimpleSequenceToPrettyMidi_FirstTempoNotAtZero(self):
source_midi = pretty_midi.PrettyMIDI(self.midi_simple_filename)
multi_tempo_sequence_proto = midi_io.midi_to_sequence_proto(source_midi)
del multi_tempo_sequence_proto.tempos[:]
multi_tempo_sequence_proto.tempos.add(time=1.0, qpm=60)
multi_tempo_sequence_proto.tempos.add(time=2.0, qpm=120)
translated_midi = midi_io.sequence_proto_to_pretty_midi(
multi_tempo_sequence_proto)
# Translating to MIDI adds an implicit DEFAULT_QUARTERS_PER_MINUTE tempo
# at time 0, so recreate the list with that in place.
del multi_tempo_sequence_proto.tempos[:]
multi_tempo_sequence_proto.tempos.add(
time=0.0, qpm=constants.DEFAULT_QUARTERS_PER_MINUTE)
multi_tempo_sequence_proto.tempos.add(time=1.0, qpm=60)
multi_tempo_sequence_proto.tempos.add(time=2.0, qpm=120)
self.CheckPrettyMidiAndSequence(translated_midi, multi_tempo_sequence_proto)
def testSimpleSequenceToPrettyMidi_DropEventsAfterLastNote(self):
source_midi = pretty_midi.PrettyMIDI(self.midi_simple_filename)
multi_tempo_sequence_proto = midi_io.midi_to_sequence_proto(source_midi)
# Add a final tempo long after the last note.
multi_tempo_sequence_proto.tempos.add(time=600.0, qpm=120)
# Translate without dropping.
translated_midi = midi_io.sequence_proto_to_pretty_midi(
multi_tempo_sequence_proto)
self.CheckPrettyMidiAndSequence(translated_midi, multi_tempo_sequence_proto)
# Translate dropping anything after the last note.
translated_midi = midi_io.sequence_proto_to_pretty_midi(
multi_tempo_sequence_proto, drop_events_n_seconds_after_last_note=0)
# The added tempo should have been dropped.
del multi_tempo_sequence_proto.tempos[-1]
self.CheckPrettyMidiAndSequence(translated_midi, multi_tempo_sequence_proto)
# Add a final tempo 15 seconds after the last note.
last_note_time = max([n.end_time for n in multi_tempo_sequence_proto.notes])
multi_tempo_sequence_proto.tempos.add(time=last_note_time + 15, qpm=120)
# Translate dropping anything 30 seconds after the last note, which should
# preserve the added tempo.
translated_midi = midi_io.sequence_proto_to_pretty_midi(
multi_tempo_sequence_proto, drop_events_n_seconds_after_last_note=30)
self.CheckPrettyMidiAndSequence(translated_midi, multi_tempo_sequence_proto)
def testEmptySequenceToPrettyMidi_DropEventsAfterLastNote(self):
source_sequence = music_pb2.NoteSequence()
# Translate without dropping.
translated_midi = midi_io.sequence_proto_to_pretty_midi(
source_sequence)
self.assertEqual(1, len(translated_midi.instruments))
self.assertEqual(0, len(translated_midi.instruments[0].notes))
# Translate dropping anything after 30 seconds.
translated_midi = midi_io.sequence_proto_to_pretty_midi(
source_sequence, drop_events_n_seconds_after_last_note=30)
self.assertEqual(1, len(translated_midi.instruments))
self.assertEqual(0, len(translated_midi.instruments[0].notes))
def testNonEmptySequenceWithNoNotesToPrettyMidi_DropEventsAfterLastNote(self):
source_sequence = music_pb2.NoteSequence()
source_sequence.tempos.add(time=0, qpm=120)
source_sequence.tempos.add(time=10, qpm=160)
source_sequence.tempos.add(time=40, qpm=240)
# Translate without dropping.
translated_midi = midi_io.sequence_proto_to_pretty_midi(
source_sequence)
self.CheckPrettyMidiAndSequence(translated_midi, source_sequence)
# Translate dropping anything after 30 seconds.
translated_midi = midi_io.sequence_proto_to_pretty_midi(
source_sequence, drop_events_n_seconds_after_last_note=30)
del source_sequence.tempos[-1]
self.CheckPrettyMidiAndSequence(translated_midi, source_sequence)
def testSimpleReadWriteMidi(self):
self.CheckReadWriteMidi(self.midi_simple_filename)
def testComplexPrettyMidiToSequence(self):
self.CheckMidiToSequence(self.midi_complex_filename)
def testComplexSequenceToPrettyMidi(self):
self.CheckSequenceToPrettyMidi(self.midi_complex_filename)
def testIsDrumDetection(self):
"""Verify that is_drum instruments are properly tracked.
self.midi_is_drum_filename is a MIDI file containing two tracks
set to channel 9 (is_drum == True). Each contains one NoteOn. This
test is designed to catch a bug where the second track would lose
is_drum, remapping the drum track to an instrument track.
"""
sequence_proto = midi_io.midi_file_to_sequence_proto(
self.midi_is_drum_filename)
with tempfile.NamedTemporaryFile(prefix='MidiDrumTest') as temp_file:
midi_io.sequence_proto_to_midi_file(sequence_proto, temp_file.name)
midi_data1 = mido.MidiFile(filename=self.midi_is_drum_filename)
# Use the file object when writing to the tempfile
# to avoid permission error.
midi_data2 = mido.MidiFile(file=temp_file)
# Count number of channel 9 Note Ons.
channel_counts = [0, 0]
for index, midi_data in enumerate([midi_data1, midi_data2]):
for event in midi_data:
if (event.type == 'note_on' and
event.velocity > 0 and event.channel == 9):
channel_counts[index] += 1
self.assertEqual(channel_counts, [2, 2])
def testInstrumentInfo_NoteSequenceToPrettyMidi(self):
source_sequence = music_pb2.NoteSequence()
source_sequence.notes.add(
pitch=60, start_time=0.0, end_time=0.5, velocity=80, instrument=0)
source_sequence.notes.add(
pitch=60, start_time=0.5, end_time=1.0, velocity=80, instrument=1)
instrument_info1 = source_sequence.instrument_infos.add()
instrument_info1.name = 'inst_0'
instrument_info1.instrument = 0
instrument_info2 = source_sequence.instrument_infos.add()
instrument_info2.name = 'inst_1'
instrument_info2.instrument = 1
translated_midi = midi_io.sequence_proto_to_pretty_midi(source_sequence)
translated_sequence = midi_io.midi_to_note_sequence(translated_midi)
self.assertEqual(
len(source_sequence.instrument_infos),
len(translated_sequence.instrument_infos))
self.assertEqual(source_sequence.instrument_infos[0].name,
translated_sequence.instrument_infos[0].name)
self.assertEqual(source_sequence.instrument_infos[1].name,
translated_sequence.instrument_infos[1].name)
def testComplexReadWriteMidi(self):
self.CheckReadWriteMidi(self.midi_complex_filename)
def testEventOrdering(self):
self.CheckReadWriteMidi(self.midi_event_order_filename)
if __name__ == '__main__':
tf.test.main()
| 44.683377
| 80
| 0.755536
|
5727a1731e1a69d842b0afa13e7bec33397820c6
| 1,113
|
py
|
Python
|
robosuite/__init__.py
|
melfm/robosuite
|
f66801a5f4cc5ef4ce6f517e805122e8bddba3dc
|
[
"MIT"
] | null | null | null |
robosuite/__init__.py
|
melfm/robosuite
|
f66801a5f4cc5ef4ce6f517e805122e8bddba3dc
|
[
"MIT"
] | null | null | null |
robosuite/__init__.py
|
melfm/robosuite
|
f66801a5f4cc5ef4ce6f517e805122e8bddba3dc
|
[
"MIT"
] | null | null | null |
from robosuite.environments.base import make
# Manipulation environments
from robosuite.environments.manipulation.reach import Reach
from robosuite.environments.manipulation.lift import Lift
from robosuite.environments.manipulation.stack import Stack
from robosuite.environments.manipulation.nut_assembly import NutAssembly
from robosuite.environments.manipulation.pick_place import PickPlace
from robosuite.environments.manipulation.door import Door
from robosuite.environments.manipulation.wipe import Wipe
from robosuite.environments.manipulation.two_arm_lift import TwoArmLift
from robosuite.environments.manipulation.two_arm_peg_in_hole import TwoArmPegInHole
from robosuite.environments.manipulation.two_arm_handover import TwoArmHandover
from robosuite.environments import ALL_ENVIRONMENTS
from robosuite.controllers import ALL_CONTROLLERS, load_controller_config
from robosuite.robots import ALL_ROBOTS
from robosuite.models.grippers import ALL_GRIPPERS
__version__ = "1.2.1"
__logo__ = """
; / ,--.
["] ["] ,< |__**|
/[_]\ [~]\/ |// |
] [ OOO /o|__|
"""
| 41.222222
| 83
| 0.799641
|
8b35b579d3c5377e3779e62bf1ada71d93e975f8
| 1,759
|
py
|
Python
|
mayan/apps/django_gpg/links.py
|
prezi/mayan-edms
|
e9bc10a056c3379b57115c6e83022f48c6298e1d
|
[
"Apache-2.0"
] | 4
|
2019-02-17T08:35:42.000Z
|
2019-03-28T06:02:11.000Z
|
mayan/apps/django_gpg/links.py
|
zhoubear/mayan-edms
|
e9bc10a056c3379b57115c6e83022f48c6298e1d
|
[
"Apache-2.0"
] | 1
|
2018-10-11T13:01:34.000Z
|
2018-10-11T13:01:34.000Z
|
mayan/apps/django_gpg/links.py
|
prezi/mayan-edms
|
e9bc10a056c3379b57115c6e83022f48c6298e1d
|
[
"Apache-2.0"
] | 3
|
2019-01-29T13:21:57.000Z
|
2019-10-27T03:20:15.000Z
|
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from navigation import Link
from .icons import icon_key_setup, icon_key_upload, icon_keyserver_search
from .permissions import (
permission_key_delete, permission_key_download, permission_key_receive,
permission_key_view, permission_key_upload, permission_keyserver_query
)
link_key_delete = Link(
args=('resolved_object.pk',), permissions=(permission_key_delete,),
tags='dangerous', text=_('Delete'), view='django_gpg:key_delete',
)
link_key_detail = Link(
args=('resolved_object.pk',), permissions=(permission_key_view,),
text=_('Details'), view='django_gpg:key_detail',
)
link_key_download = Link(
args=('resolved_object.pk',), permissions=(permission_key_download,),
text=_('Download'), view='django_gpg:key_download',
)
link_key_query = Link(
icon_class=icon_keyserver_search,
permissions=(permission_keyserver_query,), text=_('Query keyservers'),
view='django_gpg:key_query'
)
link_key_receive = Link(
args='object.key_id', keep_query=True,
permissions=(permission_key_receive,), text=_('Import'),
view='django_gpg:key_receive',
)
link_key_setup = Link(
icon_class=icon_key_setup, permissions=(permission_key_view,),
text=_('Key management'), view='django_gpg:key_public_list'
)
link_key_upload = Link(
icon_class=icon_key_upload, permissions=(permission_key_upload,),
text=_('Upload key'), view='django_gpg:key_upload'
)
link_private_keys = Link(
permissions=(permission_key_view,), text=_('Private keys'),
view='django_gpg:key_private_list'
)
link_public_keys = Link(
permissions=(permission_key_view,), text=_('Public keys'),
view='django_gpg:key_public_list'
)
| 34.490196
| 75
| 0.761228
|
46d29be69700e8d57a2560311a03b8dd61b46421
| 1,483
|
py
|
Python
|
stubs/micropython-pyboard-1_13-95/os.py
|
RonaldHiemstra/micropython-stubs
|
d97f879b01f6687baaebef1c7e26a80909c3cff3
|
[
"MIT"
] | 38
|
2020-10-18T21:59:44.000Z
|
2022-03-17T03:03:28.000Z
|
stubs/micropython-pyboard-1_13-95/os.py
|
RonaldHiemstra/micropython-stubs
|
d97f879b01f6687baaebef1c7e26a80909c3cff3
|
[
"MIT"
] | 176
|
2020-10-18T14:31:03.000Z
|
2022-03-30T23:22:39.000Z
|
stubs/micropython-pyboard-1_13-95/os.py
|
RonaldHiemstra/micropython-stubs
|
d97f879b01f6687baaebef1c7e26a80909c3cff3
|
[
"MIT"
] | 6
|
2020-12-28T21:11:12.000Z
|
2022-02-06T04:07:50.000Z
|
"""
Module: 'os' on pyboard 1.13.0-95
"""
# MCU: (sysname='pyboard', nodename='pyboard', release='1.13.0', version='v1.13-95-g0fff2e03f on 2020-10-03', machine='PYBv1.1 with STM32F405RG')
# Stubber: 1.3.4
class VfsFat:
''
def chdir():
pass
def getcwd():
pass
def ilistdir():
pass
def mkdir():
pass
def mkfs():
pass
def mount():
pass
def open():
pass
def remove():
pass
def rename():
pass
def rmdir():
pass
def stat():
pass
def statvfs():
pass
def umount():
pass
class VfsLfs2:
''
def chdir():
pass
def getcwd():
pass
def ilistdir():
pass
def mkdir():
pass
def mkfs():
pass
def mount():
pass
def open():
pass
def remove():
pass
def rename():
pass
def rmdir():
pass
def stat():
pass
def statvfs():
pass
def umount():
pass
def chdir():
pass
def dupterm():
pass
def getcwd():
pass
def ilistdir():
pass
def listdir():
pass
def mkdir():
pass
def mount():
pass
def remove():
pass
def rename():
pass
def rmdir():
pass
sep = '/'
def stat():
pass
def statvfs():
pass
def sync():
pass
def umount():
pass
def uname():
pass
def unlink():
pass
def urandom():
pass
| 10.443662
| 145
| 0.475388
|
934ad576305543f6c76d5bf5739c4e4248c4aea1
| 5,894
|
py
|
Python
|
dns/rrset.py
|
liyongyue/dnsspider
|
ab29fb240c45bf16e146e96acff41aea29591f51
|
[
"0BSD"
] | null | null | null |
dns/rrset.py
|
liyongyue/dnsspider
|
ab29fb240c45bf16e146e96acff41aea29591f51
|
[
"0BSD"
] | null | null | null |
dns/rrset.py
|
liyongyue/dnsspider
|
ab29fb240c45bf16e146e96acff41aea29591f51
|
[
"0BSD"
] | null | null | null |
# Copyright (C) 2003-2005 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS RRsets (an RRset is a named rdataset)"""
import dns.name
import dns.rdataset
import dns.rdataclass
import dns.renderer
class RRset(dns.rdataset.Rdataset):
"""A DNS RRset (named rdataset).
RRset inherits from Rdataset, and RRsets can be treated as
Rdatasets in most cases. There are, however, a few notable
exceptions. RRsets have different to_wire() and to_text() method
arguments, reflecting the fact that RRsets always have an owner
name.
"""
__slots__ = ['name', 'deleting']
def __init__(self, name, rdclass, rdtype, covers=dns.rdatatype.NONE,
deleting=None):
"""Create a new RRset."""
super(RRset, self).__init__(rdclass, rdtype)
self.name = name
self.deleting = deleting
def _clone(self):
obj = super(RRset, self)._clone()
obj.name = self.name
obj.deleting = self.deleting
return obj
def __repr__(self):
if self.covers == 0:
ctext = ''
else:
ctext = '(' + dns.rdatatype.to_text(self.covers) + ')'
if not self.deleting is None:
dtext = ' delete=' + dns.rdataclass.to_text(self.deleting)
else:
dtext = ''
return '<DNS ' + str(self.name) + ' ' + \
dns.rdataclass.to_text(self.rdclass) + ' ' + \
dns.rdatatype.to_text(self.rdtype) + ctext + dtext + ' RRset>'
def __str__(self):
return self.to_text()
def __eq__(self, other):
"""Two RRsets are equal if they have the same name and the same
rdataset
@rtype: bool"""
if not isinstance(other, RRset):
return False
if self.name != other.name:
return False
return super(RRset, self).__eq__(other)
def match(self, name, rdclass, rdtype, covers, deleting=None):
"""Returns True if this rrset matches the specified class, type,
covers, and deletion state."""
if not super(RRset, self).match(rdclass, rdtype, covers):
return False
if self.name != name or self.deleting != deleting:
return False
return True
def to_text(self, origin=None, relativize=True, **kw):
"""Convert the RRset into DNS master file format.
@see: L{dns.name.Name.choose_relativity} for more information
on how I{origin} and I{relativize} determine the way names
are emitted.
Any additional keyword arguments are passed on to the rdata
to_text() method.
@param origin: The origin for relative names, or None.
@type origin: dns.name.Name object
@param relativize: True if names should names be relativized
@type relativize: bool"""
return super(RRset, self).to_text(self.name, origin, relativize,
self.deleting, **kw)
def to_wire(self, file, compress=None, origin=None, **kw):
"""Convert the RRset to wire format."""
return super(RRset, self).to_wire(self.name, file, compress, origin,
self.deleting, **kw)
def to_rdataset(self):
"""Convert an RRset into an Rdataset.
@rtype: dns.rdataset.Rdataset object
"""
return dns.rdataset.from_rdata_list(self.ttl, list(self))
def from_text_list(name, ttl, rdclass, rdtype, text_rdatas):
"""Create an RRset with the specified name, TTL, class, and type, and with
the specified list of rdatas in text format.
@rtype: dns.rrset.RRset object
"""
if isinstance(name, (str, unicode)):
name = dns.name.from_text(name, None)
if isinstance(rdclass, str):
rdclass = dns.rdataclass.from_text(rdclass)
if isinstance(rdtype, str):
rdtype = dns.rdatatype.from_text(rdtype)
r = RRset(name, rdclass, rdtype)
r.update_ttl(ttl)
for t in text_rdatas:
rd = dns.rdata.from_text(r.rdclass, r.rdtype, t)
r.add(rd)
return r
def from_text(name, ttl, rdclass, rdtype, *text_rdatas):
"""Create an RRset with the specified name, TTL, class, and type and with
the specified rdatas in text format.
@rtype: dns.rrset.RRset object
"""
return from_text_list(name, ttl, rdclass, rdtype, text_rdatas)
def from_rdata_list(name, ttl, rdatas):
"""Create an RRset with the specified name and TTL, and with
the specified list of rdata objects.
@rtype: dns.rrset.RRset object
"""
if isinstance(name, (str, unicode)):
name = dns.name.from_text(name, None)
if len(rdatas) == 0:
raise ValueError, "rdata list must not be empty"
r = None
for rd in rdatas:
if r is None:
r = RRset(name, rd.rdclass, rd.rdtype)
r.update_ttl(ttl)
first_time = False
r.add(rd)
return r
def from_rdata(name, ttl, *rdatas):
"""Create an RRset with the specified name and TTL, and with
the specified rdata objects.
@rtype: dns.rrset.RRset object
"""
return from_rdata_list(name, ttl, rdatas)
| 33.488636
| 78
| 0.63149
|
c3319360008b388b322269d636fda398aa9315a4
| 7,084
|
py
|
Python
|
HTML/style.py
|
alexandratutino/Style-2
|
769d1b49fe96fb89162acde20e448ec905fd0b6f
|
[
"Apache-2.0"
] | null | null | null |
HTML/style.py
|
alexandratutino/Style-2
|
769d1b49fe96fb89162acde20e448ec905fd0b6f
|
[
"Apache-2.0"
] | null | null | null |
HTML/style.py
|
alexandratutino/Style-2
|
769d1b49fe96fb89162acde20e448ec905fd0b6f
|
[
"Apache-2.0"
] | null | null | null |
#!flask/bin/python
from flask import Flask, render_template, request, redirect, url_for
app = Flask(__name__)
@app.route("/steinbeck")
def steinbeck():
return render_template("steinbeck.html", awl=request.args.get("awl"), asl=request.args.get("asl"),
nr=request.args.get("nr"), vr=request.args.get("vr"), avr=request.args.get("avr"),
ajr=request.args.get("ajr"))
@app.route("/twain")
def twain():
return render_template("twain.html", awl=request.args.get("awl"), asl=request.args.get("asl"),
nr=request.args.get("nr"), vr=request.args.get("vr"), avr=request.args.get("avr"),
ajr=request.args.get("ajr"))
@app.route("/shelley")
def shelley():
return render_template("shelley.html", awl=request.args.get("awl"), asl=request.args.get("asl"),
nr=request.args.get("nr"), vr=request.args.get("vr"), avr=request.args.get("avr"),
ajr=request.args.get("ajr"))
@app.route("/austen")
def austen():
return render_template("austen.html", awl=request.args.get("awl"), asl=request.args.get("asl"),
nr=request.args.get("nr"), vr=request.args.get("vr"), avr=request.args.get("avr"),
ajr=request.args.get("ajr"))
@app.route("/grimm")
def grimm():
return render_template("grimm.html", awl=request.args.get("awl"), asl=request.args.get("asl"),
nr=request.args.get("nr"), vr=request.args.get("vr"), avr=request.args.get("avr"),
ajr=request.args.get("ajr"))
@app.route("/asimov")
def asimov():
return render_template("asimov.html", awl=request.args.get("awl"), asl=request.args.get("asl"),
nr=request.args.get("nr"), vr=request.args.get("vr"), avr=request.args.get("avr"),
ajr=request.args.get("ajr"))
@app.route("/lovecraft")
def lovecraft():
return render_template("lovecraft.html", awl=request.args.get("awl"), asl=request.args.get("asl"),
nr=request.args.get("nr"), vr=request.args.get("vr"), avr=request.args.get("avr"),
ajr=request.args.get("ajr"))
@app.route("/fitzgerald")
def fitzgerald():
return render_template("fitzgerald.html", awl=request.args.get("awl"), asl=request.args.get("asl"),
nr=request.args.get("nr"), vr=request.args.get("vr"), avr=request.args.get("avr"),
ajr=request.args.get("ajr"))
@app.route("/hemingway")
def hemingway():
return render_template("hemingway.html", awl=request.args.get("awl"), asl=request.args.get("asl"),
nr=request.args.get("nr"), vr=request.args.get("vr"), avr=request.args.get("avr"),
ajr=request.args.get("ajr"))
@app.route("/poe")
def poe():
return render_template("poe.html", awl=request.args.get("awl"), asl=request.args.get("asl"),
nr=request.args.get("nr"), vr=request.args.get("vr"), avr=request.args.get("avr"),
ajr=request.args.get("ajr"))
@app.route("/lewis")
def lewis():
return render_template("lewis.html", awl=request.args.get("awl"), asl=request.args.get("asl"),
nr=request.args.get("nr"), vr=request.args.get("vr"), avr=request.args.get("avr"),
ajr=request.args.get("ajr"))
@app.route("/clarke")
def clark():
return render_template("clarke.html", awl=request.args.get("awl"), asl=request.args.get("asl"),
nr=request.args.get("nr"), vr=request.args.get("vr"), avr=request.args.get("avr"),
ajr=request.args.get("ajr"))
@app.route("/christie")
def christie():
return render_template("christie.html", awl=request.args.get("awl"), asl=request.args.get("asl"),
nr=request.args.get("nr"), vr=request.args.get("vr"), avr=request.args.get("avr"),
ajr=request.args.get("ajr"))
@app.route("/style")
def get_input():
return render_template('homePage.html')
@app.route("/style", methods=["GET", "POST"])
def result():
data = request.form.get('text')
style = [data, "1", "2", "3", "4", "5", "6"]
if style[0] == "John Steinbeck":
return redirect(url_for("steinbeck", awl=style[1], asl=style[2], nr=style[3], vr=style[4], avr=style[5],
ajr=style[6]))
elif style[0] == "Mark Twain":
return redirect(url_for("twain", awl=style[1], asl=style[2], nr=style[3], vr=style[4], avr=style[5],
ajr=style[6]))
elif style[0] == "Mary Shelley":
return redirect(url_for("shelley", awl=style[1], asl=style[2], nr=style[3], vr=style[4], avr=style[5],
ajr=style[6]))
elif style[0] == "Jane Austen":
return redirect(url_for("austen", awl=style[1], asl=style[2], nr=style[3], vr=style[4], avr=style[5],
ajr=style[6]))
elif style[0] == "Jacob Grimm and Wilhelm Grimm":
return redirect(url_for("grimm", awl=style[1], asl=style[2], nr=style[3], vr=style[4], avr=style[5],
ajr=style[6]))
elif style[0] == "Issac Asimov":
return redirect(url_for("asimov", awl=style[1], asl=style[2], nr=style[3], vr=style[4], avr=style[5],
ajr=style[6]))
elif style[0] == "H.P lovecraft":
return redirect(url_for("lovecraft", awl=style[1], asl=style[2], nr=style[3], vr=style[4], avr=style[5],
ajr=style[6]))
elif style[0] == "F Scott Fitzgerald":
return redirect(url_for("fitzgerald", awl=style[1], asl=style[2], nr=style[3], vr=style[4], avr=style[5],
ajr=style[6]))
elif style[0] == "Ernest Hemingway":
return redirect(url_for("hemingway", awl=style[1], asl=style[2], nr=style[3], vr=style[4], avr=style[5],
ajr=style[6]))
elif style[0] == "Edgar Allan Poe":
return redirect(url_for("poe", awl=style[1], asl=style[2], nr=style[3], vr=style[4], avr=style[5],
ajr=style[6]))
elif style[0] == "CS Lewis":
return redirect(url_for("lewis", awl=style[1], asl=style[2], nr=style[3], vr=style[4], avr=style[5],
ajr=style[6]))
elif style[0] == "Arthur C Clark":
return redirect(url_for("clarke", awl=style[1], asl=style[2], nr=style[3], vr=style[4], avr=style[5],
ajr=style[6]))
elif style[0] == "Agatha Christie":
return redirect(url_for("christie", awl=style[1], asl=style[2], nr=style[3], vr=style[4], avr=style[5],
ajr=style[6]))
if __name__ == '__main__':
app.run(debug=True)
| 47.543624
| 114
| 0.539526
|
9fe625030b2993cf97f255b1d20b5726be2bc457
| 1,357
|
py
|
Python
|
migrations/versions/5b1c342f1989_move_language_col_from_user_to_.py
|
Amaimersion/yandex-disk-telegram-bot
|
d9735d288c12b3961dc8143f50328434dc2069e4
|
[
"MIT"
] | 15
|
2020-05-12T23:23:38.000Z
|
2022-02-22T00:44:06.000Z
|
migrations/versions/5b1c342f1989_move_language_col_from_user_to_.py
|
zYxDevs/yandex-disk-telegram-bot
|
d9735d288c12b3961dc8143f50328434dc2069e4
|
[
"MIT"
] | 5
|
2020-08-03T08:57:37.000Z
|
2022-01-31T08:30:03.000Z
|
migrations/versions/5b1c342f1989_move_language_col_from_user_to_.py
|
zYxDevs/yandex-disk-telegram-bot
|
d9735d288c12b3961dc8143f50328434dc2069e4
|
[
"MIT"
] | 7
|
2020-08-15T20:24:56.000Z
|
2021-09-26T21:43:46.000Z
|
"""Move Language col from User to UserSettings
Revision ID: 5b1c342f1989
Revises: 2806d4dc492a
Create Date: 2021-06-24 10:51:58.562458
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5b1c342f1989'
down_revision = '2806d4dc492a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('user_settings', schema=None) as batch_op:
batch_op.add_column(sa.Column('language', sa.Enum('EN', name='supportedlanguage'), nullable=True, comment='Preferred language of user'))
with op.batch_alter_table('users', schema=None) as batch_op:
batch_op.drop_column('language')
op.execute(
"UPDATE user_settings SET language = 'EN'"
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('users', schema=None) as batch_op:
batch_op.add_column(sa.Column('language', sa.Enum('EN', name='supportedlanguage'), nullable=True, comment='Preferred language of user'))
with op.batch_alter_table('user_settings', schema=None) as batch_op:
batch_op.drop_column('language')
op.execute(
"UPDATE users SET language = 'EN'"
)
# ### end Alembic commands ###
| 28.87234
| 144
| 0.694915
|
a62ef6d8b50db964cb8b80b2e17f64fa885fe18d
| 5,051
|
py
|
Python
|
src/genie/libs/parser/nxos/tests/ShowIpStaticRoute/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/nxos/tests/ShowIpStaticRoute/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/nxos/tests/ShowIpStaticRoute/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
'vrf':{
'default':{
'address_family': {
'ipv4': {
'routes': {
'10.4.1.1/32': {
'route': '10.4.1.1/32',
'next_hop': {
'next_hop_list': {
1: {
'index': 1,
'active': True,
'next_hop': '10.1.3.1',
'next_hop_netmask': '32',
'outgoing_interface': 'Ethernet1/2',
},
2: {
'index': 2,
'active': True,
'next_hop': '10.186.3.1',
'next_hop_netmask': '32',
'outgoing_interface': 'Ethernet1/3',
},
},
},
},
'10.16.2.2/32': {
'route': '10.16.2.2/32',
'next_hop': {
'next_hop_list': {
1: {
'index': 1,
'active': True,
'next_hop': '10.2.3.2',
'next_hop_netmask': '32',
'outgoing_interface': 'Ethernet1/4',
},
2: {
'index': 2,
'active': True,
'next_hop': '10.229.3.2',
'next_hop_netmask': '32',
'outgoing_interface': 'Ethernet1/1',
},
},
},
},
},
},
},
},
'VRF1': {
'address_family': {
'ipv4': {
'routes': {
'10.4.1.1/32': {
'route': '10.4.1.1/32',
'next_hop': {
'next_hop_list': {
1: {
'index': 1,
'active': True,
'next_hop': '0.0.0.0',
'next_hop_netmask': '32',
'outgoing_interface': 'Null0',
},
2: {
'index': 2,
'active': False,
'next_hop': '10.1.3.1',
'next_hop_netmask': '32',
'outgoing_interface': 'Ethernet1/2',
},
},
},
},
'10.16.2.2/32': {
'route': '10.16.2.2/32',
'next_hop': {
'next_hop_list': {
1: {
'index': 1,
'active': False,
'rnh_active': True,
'next_hop': '10.229.3.2',
'next_hop_netmask': '32',
},
2: {
'index': 2,
'active': False,
'next_hop': '10.229.3.2',
'next_hop_netmask': '32',
'outgoing_interface': 'Ethernet1/1',
},
3: {
'index': 3,
'active': False,
'rnh_active': True,
'next_hop': '10.154.3.2',
'next_hop_netmask': '32',
},
},
},
},
},
},
},
},
},
}
| 44.699115
| 77
| 0.179172
|
1d270389293a3b4776beb9edccc810ae35af3ad5
| 3,782
|
py
|
Python
|
src/test-apps/happy/tests/standalone/wdmNext/test_weave_wdm_next_mutual_subscribe_40.py
|
robszewczyk/openweave-core
|
f452cc55859daea83b3ce7af158c8e78b05cc3bc
|
[
"Apache-2.0"
] | 249
|
2017-09-18T17:48:34.000Z
|
2022-02-02T06:46:21.000Z
|
src/test-apps/happy/tests/standalone/wdmNext/test_weave_wdm_next_mutual_subscribe_40.py
|
robszewczyk/openweave-core
|
f452cc55859daea83b3ce7af158c8e78b05cc3bc
|
[
"Apache-2.0"
] | 501
|
2017-11-10T11:25:32.000Z
|
2022-02-01T10:43:13.000Z
|
src/test-apps/happy/tests/standalone/wdmNext/test_weave_wdm_next_mutual_subscribe_40.py
|
robszewczyk/openweave-core
|
f452cc55859daea83b3ce7af158c8e78b05cc3bc
|
[
"Apache-2.0"
] | 116
|
2017-09-20T07:06:55.000Z
|
2022-01-08T13:41:15.000Z
|
#!/usr/bin/env python3
#
# Copyright (c) 2016-2017 Nest Labs, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# @file
# Calls Weave WDM mutual subscribe between nodes.
# F32: Mutual Subscribe: Root path. Null Version. Mutate data in initiator and responder. Publisher in responder aborts
#
from __future__ import absolute_import
from __future__ import print_function
import unittest
import set_test_path
from weave_wdm_next_test_base import weave_wdm_next_test_base
import WeaveUtilities
class test_weave_wdm_next_mutual_subscribe_40(weave_wdm_next_test_base):
def test_weave_wdm_next_mutual_subscribe_40(self):
wdm_next_args = {}
wdm_next_args['wdm_option'] = "mutual_subscribe"
wdm_next_args['total_client_count'] = 2
wdm_next_args['final_client_status'] = 4
wdm_next_args['timer_client_period'] = 4000
wdm_next_args['test_client_iterations'] = 1
wdm_next_args['test_client_delay'] = 15000
wdm_next_args['enable_client_flip'] = 1
wdm_next_args['total_server_count'] = 2
wdm_next_args['final_server_status'] = 3
wdm_next_args['timer_server_period'] = 5000
wdm_next_args['enable_server_flip'] = 1
wdm_next_args['client_log_check'] = [('Handler\[0\] \[(ALIVE|CONFM)\] bound mutual subscription is going away', wdm_next_args['test_client_iterations']),
('Handler\[0\] \[(ALIVE|CONFM)\] TerminateSubscription ', wdm_next_args['test_client_iterations']),
('Client->kEvent_OnNotificationProcessed', wdm_next_args['test_client_iterations'] * (wdm_next_args['total_server_count'] + 1)),
('Client\[0\] moving to \[ FREE\] Ref\(0\)', wdm_next_args['test_client_iterations']),
('Handler\[0\] Moving to \[ FREE\] Ref\(0\)', wdm_next_args['test_client_iterations'])]
wdm_next_args['server_log_check'] = [('Client\[0\] \[(ALIVE|CONFM)\] bound mutual subscription is going away', wdm_next_args['test_client_iterations']),
('Client->kEvent_OnNotificationProcessed', wdm_next_args['test_client_iterations'] * (wdm_next_args['total_client_count'] + 1)),
('Handler\[0\] \[(ALIVE|CONFM)\] AbortSubscription Ref\(\d+\)', wdm_next_args['test_client_iterations']),
('Client\[0\] moving to \[ FREE\] Ref\(0\)', wdm_next_args['test_client_iterations']),
('Handler\[0\] Moving to \[ FREE\] Ref\(0\)', wdm_next_args['test_client_iterations'])]
wdm_next_args['test_tag'] = self.__class__.__name__[19:].upper()
wdm_next_args['test_case_name'] = ['F32: Mutual Subscribe: Root path. Null Version. Mutate data in initiator and responder. Publisher in responder aborts']
print('test file: ' + self.__class__.__name__)
print("weave-wdm-next test F32")
super(test_weave_wdm_next_mutual_subscribe_40, self).weave_wdm_next_test_base(wdm_next_args)
if __name__ == "__main__":
WeaveUtilities.run_unittest()
| 50.426667
| 173
| 0.655473
|
81bc892a9f17d1bf3012da6f92ea0cb874b711b7
| 897
|
py
|
Python
|
_0_download_to_csv/_1_to_csv.py
|
JesseAldridge/github_quality
|
a5730338d6aeaf16d869c78ae7cb3c60b27d368c
|
[
"MIT"
] | 1
|
2015-10-26T17:54:30.000Z
|
2015-10-26T17:54:30.000Z
|
_0_download_to_csv/_1_to_csv.py
|
JesseAldridge/github_quality
|
a5730338d6aeaf16d869c78ae7cb3c60b27d368c
|
[
"MIT"
] | null | null | null |
_0_download_to_csv/_1_to_csv.py
|
JesseAldridge/github_quality
|
a5730338d6aeaf16d869c78ae7cb3c60b27d368c
|
[
"MIT"
] | null | null | null |
import json, csv, glob, os
download_dir_path = os.path.expanduser('~/repoq-downloads')
column_labels = [
'full_name',
'disabled',
'has_projects',
'stargazers_count',
'subscribers_count',
'self_issue_count',
'pushed_at',
'pull_count',
'network_count',
'forks',
'watchers',
'open_issues',
'has_wiki',
'size',
'archived',
'fork',
'has_downloads',
'issue_count',
'language',
'created_at',
'mirror_url',
'has_pages',
'updated_at',
'id',
'description',
]
rows = []
for json_path in glob.glob(os.path.join(download_dir_path, '*.txt')):
with open(json_path) as f:
json_text = f.read()
repo_dict = json.loads(json_text)
rows.append([repo_dict.get(field) for field in column_labels])
with open('out.csv', 'w') as f:
writer = csv.writer(f, lineterminator='\n')
writer.writerow(column_labels)
for row in rows:
writer.writerow(row)
| 19.933333
| 69
| 0.661093
|
84dd33719c4b2ff1fc5e7e3f4563071d11c5e2aa
| 5,078
|
py
|
Python
|
final/listview/listview_container.py
|
JosuX/Albie
|
417bb0065139e118bfacd45c58ac0272ee37ea90
|
[
"MIT"
] | null | null | null |
final/listview/listview_container.py
|
JosuX/Albie
|
417bb0065139e118bfacd45c58ac0272ee37ea90
|
[
"MIT"
] | null | null | null |
final/listview/listview_container.py
|
JosuX/Albie
|
417bb0065139e118bfacd45c58ac0272ee37ea90
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'listview_container.ui'
#
# Created by: PyQt5 UI code generator 5.15.1
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_AlbiePhotography(object):
def setupUi(self, AlbiePhotography):
AlbiePhotography.setObjectName("AlbiePhotography")
AlbiePhotography.resize(960, 540)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("../ALL PICTURES/logo1.jpg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
AlbiePhotography.setWindowIcon(icon)
AlbiePhotography.setStyleSheet("\n"
"QMainWindow{\n"
"background-color: qradialgradient(spread:pad, cx:0.5, cy:0.5, radius:1.805, fx:0.5, fy:0.5, stop:0.0497512 rgba(166, 166, 166, 255), stop:0.880597 rgba(117, 117, 109, 255));\n"
"}")
self.centralwidget = QtWidgets.QWidget(AlbiePhotography)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.pushButton = QtWidgets.QPushButton(self.centralwidget)
self.pushButton.setMaximumSize(QtCore.QSize(50, 16777215))
self.pushButton.setStyleSheet("QPushButton:hover { color: white }\n"
"QPushButton:hover {font-size:32px}\n"
"QPushButton{\n"
"background-color: rgba(255, 255, 255, 0);\n"
"\n"
"}\n"
"QPushButton{\n"
"image:url(C:/Users/Lenovo/Desktop/PYTHON/denne/ALL PICTURES/back button.png);\n"
" color: rgb(255, 255, 255);\n"
" hover-font: 20px;\n"
" border-radius: 50px;\n"
" border-color: beige;\n"
" font: 18pt \"Phenomena\";\n"
" min-width: 0em;\n"
" padding: 0px;\n"
"}")
self.pushButton.setText("")
self.pushButton.setObjectName("pushButton")
self.horizontalLayout.addWidget(self.pushButton)
self.label_2 = QtWidgets.QLabel(self.centralwidget)
self.label_2.setText("")
self.label_2.setObjectName("label_2")
self.horizontalLayout.addWidget(self.label_2)
self.label_6 = QtWidgets.QLabel(self.centralwidget)
self.label_6.setText("")
self.label_6.setObjectName("label_6")
self.horizontalLayout.addWidget(self.label_6)
self.label_title = QtWidgets.QLabel(self.centralwidget)
self.label_title.setMinimumSize(QtCore.QSize(330, 40))
self.label_title.setMaximumSize(QtCore.QSize(300, 40))
font = QtGui.QFont()
font.setFamily("Microsoft JhengHei UI Light")
font.setPointSize(14)
self.label_title.setFont(font)
self.label_title.setStyleSheet("image:url(C:/Users/Lenovo/Desktop/PYTHON/denne/ALL PICTURES/window_3.png);")
self.label_title.setText("")
self.label_title.setPixmap(QtGui.QPixmap("../PYTHON/denne/connectwithus.png"))
self.label_title.setScaledContents(True)
self.label_title.setAlignment(QtCore.Qt.AlignCenter)
self.label_title.setObjectName("label_title")
self.horizontalLayout.addWidget(self.label_title)
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setText("")
self.label.setObjectName("label")
self.horizontalLayout.addWidget(self.label)
self.label_4 = QtWidgets.QLabel(self.centralwidget)
self.label_4.setText("")
self.label_4.setObjectName("label_4")
self.horizontalLayout.addWidget(self.label_4)
self.label_3 = QtWidgets.QLabel(self.centralwidget)
self.label_3.setMaximumSize(QtCore.QSize(50, 16777215))
self.label_3.setText("")
self.label_3.setObjectName("label_3")
self.horizontalLayout.addWidget(self.label_3)
self.gridLayout.addLayout(self.horizontalLayout, 0, 0, 1, 1)
self.container_lisview = QtWidgets.QListView(self.centralwidget)
self.container_lisview.setStyleSheet("")
self.container_lisview.setFrameShape(QtWidgets.QFrame.NoFrame)
self.container_lisview.setFrameShadow(QtWidgets.QFrame.Plain)
self.container_lisview.setDragDropMode(QtWidgets.QAbstractItemView.DragDrop)
self.container_lisview.setObjectName("container_lisview")
self.gridLayout.addWidget(self.container_lisview, 1, 0, 1, 1)
AlbiePhotography.setCentralWidget(self.centralwidget)
self.retranslateUi(AlbiePhotography)
QtCore.QMetaObject.connectSlotsByName(AlbiePhotography)
def retranslateUi(self, AlbiePhotography):
_translate = QtCore.QCoreApplication.translate
AlbiePhotography.setWindowTitle(_translate("AlbiePhotography", "Albie Photography"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
AlbiePhotography = QtWidgets.QMainWindow()
ui = Ui_AlbiePhotography()
ui.setupUi(AlbiePhotography)
AlbiePhotography.show()
sys.exit(app.exec_())
| 44.938053
| 177
| 0.706774
|
80addb330ca7dca0fad59b279b0a30b3edaef161
| 2,117
|
py
|
Python
|
test/integration/src/py/hand_setup.py
|
emeseight/sifnode
|
cf52c0cf622fd5113c0a1a8f704e5f62ef3ae4c3
|
[
"Apache-2.0"
] | 85
|
2020-10-24T07:35:55.000Z
|
2022-03-27T10:27:55.000Z
|
test/integration/src/py/hand_setup.py
|
emeseight/sifnode
|
cf52c0cf622fd5113c0a1a8f704e5f62ef3ae4c3
|
[
"Apache-2.0"
] | 1,760
|
2020-10-24T07:37:17.000Z
|
2022-03-31T17:59:44.000Z
|
test/integration/src/py/hand_setup.py
|
emeseight/sifnode
|
cf52c0cf622fd5113c0a1a8f704e5f62ef3ae4c3
|
[
"Apache-2.0"
] | 115
|
2020-10-31T07:32:38.000Z
|
2022-03-29T21:12:25.000Z
|
import logging
import os
import sys
import burn_lock_functions
import test_utilities
from burn_lock_functions import EthereumToSifchainTransferRequest
from integration_env_credentials import sifchain_cli_credentials_for_test
from test_utilities import get_required_env_var, SifchaincliCredentials, get_optional_env_var, \
ganache_owner_account
logging.basicConfig(
level=logging.DEBUG,
format="%(asctime)s [%(levelname)s] %(message)s",
handlers=[logging.StreamHandler(sys.stdout)]
)
logging.debug("starting")
smart_contracts_dir = get_required_env_var("SMART_CONTRACTS_DIR")
ethereum_address = get_optional_env_var(
"ETHEREUM_ADDRESS",
ganache_owner_account(smart_contracts_dir)
)
def build_request() -> (EthereumToSifchainTransferRequest, SifchaincliCredentials):
new_account_key = 'user1'
credentials = sifchain_cli_credentials_for_test(new_account_key)
new_addr = burn_lock_functions.create_new_sifaddr(credentials=credentials, keyname=new_account_key)
credentials.from_key = new_addr["name"]
request = EthereumToSifchainTransferRequest(
sifchain_address=new_addr["address"],
smart_contracts_dir=smart_contracts_dir,
ethereum_address=ethereum_address,
ethereum_private_key_env_var="ETHEREUM_PRIVATE_KEY",
bridgebank_address=get_required_env_var("BRIDGE_BANK_ADDRESS"),
ethereum_network=(os.environ.get("ETHEREUM_NETWORK") or ""),
amount=9 * 10 ** 18,
ceth_amount=2 * (10 ** 16)
)
return request, credentials
# if there's an existing user1 key, just remove it. Otherwise, adding a duplicate key will just hang
try:
test_utilities.get_shell_output(f"sifnoded keys delete user1 --home /home/vagrant/.sifnoded --keyring-backend test -o json")
except:
logging.debug("no key to delete, this is normal in a fresh environment")
request, credentials = build_request()
burn_lock_functions.transfer_ethereum_to_sifchain(request)
test_utilities.get_sifchain_addr_balance(request.sifchain_address, request.sifnoded_node, "ceth")
logging.info(f"created account for key {credentials.from_key}")
| 38.490909
| 128
| 0.787435
|
9c53983c39a7afc87490acd44a1f8d705b74cda0
| 2,537
|
py
|
Python
|
src/Object/obj_converter.py
|
hegedenes/shader-combinators
|
61a483e259e86f556fc642a13e5067b48be5e676
|
[
"BSD-3-Clause"
] | null | null | null |
src/Object/obj_converter.py
|
hegedenes/shader-combinators
|
61a483e259e86f556fc642a13e5067b48be5e676
|
[
"BSD-3-Clause"
] | null | null | null |
src/Object/obj_converter.py
|
hegedenes/shader-combinators
|
61a483e259e86f556fc642a13e5067b48be5e676
|
[
"BSD-3-Clause"
] | null | null | null |
import sys
import os
name = sys.argv[1]
def from_f(f):
x = f.split('/')
#return '('+name.lower()+'Vertices !! ('+x[0]+'-1), '+name.lower()+'TexCoords !! ('+x[1]+'-1), '+name.lower()+'Normals !! ('+x[2]+'-1))'
return [int(x[0]), int(x[1]), int(x[2])]
def to_str(line):
if line[0] in ['v','vn']:
return 'V3 ('+line[1]+') ('+line[2]+') ('+line[3]+')'
if line[0] in ['vt']:
return 'V2 ('+line[1]+') ('+line[2]+')'
if line[0] == 'f':
# return ', '.join([from_f(line[1]),from_f(line[2]),from_f(line[3])])
return [from_f(line[1]), from_f(line[2]), from_f(line[3])]
return str(line)
file_in = open(name + '.obj', 'r')
file_out = open(name + '.hs', 'w')
file_out.write(
"""{-# LANGUAGE OverloadedStrings #-}
module Object."""+name+""" ("""+name.lower()+"""Mesh) where
import LambdaCube.GL
import LambdaCube.GL.Mesh
import qualified Data.Trie as T
import qualified Data.Vector.Storable as SV
"""
)
data = {}
for line in file_in:
x = line.split()
key = x[0]
if key == '#':
continue
if not key in data.keys():
data[key] = []
data[key].append(to_str(x))
file_out.write(
name.lower() + """Vertices :: [V3F]
""" + name.lower() + """Vertices =
[ """
)
vertices = []
for line in data['f']:
for [pos,tex,norm] in line:
vertices.append(data['v'][pos-1])
file_out.write('\n , '.join(vertices))
file_out.write(
"""
]
"""
)
file_out.write(
name.lower() + """Normals :: [V3F]
""" + name.lower() + """Normals =
[ """
)
normals = []
for line in data['f']:
for [pos,tex,norm] in line:
normals.append(data['vn'][norm-1])
file_out.write('\n , '.join(normals))
file_out.write(
"""
]
"""
)
texcoords = []
for line in data['f']:
for [pos,tex,norm] in line:
texcoords.append(data['vt'][tex-1])
file_out.write(
name.lower() + """TexCoords :: [V2F]
""" + name.lower() + """TexCoords =
[ """
)
file_out.write('\n , '.join(texcoords))
file_out.write(
"""
]
"""
)
file_out.write(
name.lower() + """Mesh :: Mesh
""" + name.lower() + """Mesh = Mesh
{ mAttributes = T.fromList
[ ("position", A_V3F $ SV.fromList """ + name.lower() + """Vertices)
, ("texture_uv", A_V2F $ SV.fromList """ + name.lower() + """TexCoords)
, ("normal", A_V3F $ SV.fromList """ + name.lower() + """Normals)
]
, mPrimitive = P_Triangles
, mGPUData = Nothing
}
main = do
saveMesh \""""+name+""".mesh\" """ + name.lower() + """Mesh
"""
)
file_in.close()
file_out.close()
os.system('runhaskell ' + name + '.hs')
os.remove(name + '.hs')
| 19.075188
| 138
| 0.552621
|
d6310d81ab87f7334e767668217d51054439a89c
| 25,893
|
py
|
Python
|
pandas/io/json.py
|
springcoil/pandas
|
945075ad78cef652039feb50d60092b0580604e6
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null |
pandas/io/json.py
|
springcoil/pandas
|
945075ad78cef652039feb50d60092b0580604e6
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null |
pandas/io/json.py
|
springcoil/pandas
|
945075ad78cef652039feb50d60092b0580604e6
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null |
# pylint: disable-msg=E1101,W0613,W0603
import os
import copy
from collections import defaultdict
import numpy as np
import pandas.json as _json
from pandas.tslib import iNaT
from pandas.compat import long, u
from pandas import compat, isnull
from pandas import Series, DataFrame, to_datetime
from pandas.io.common import get_filepath_or_buffer
from pandas.core.common import AbstractMethodError
import pandas.core.common as com
loads = _json.loads
dumps = _json.dumps
### interface to/from ###
def to_json(path_or_buf, obj, orient=None, date_format='epoch',
double_precision=10, force_ascii=True, date_unit='ms',
default_handler=None):
if isinstance(obj, Series):
s = SeriesWriter(
obj, orient=orient, date_format=date_format,
double_precision=double_precision, ensure_ascii=force_ascii,
date_unit=date_unit, default_handler=default_handler).write()
elif isinstance(obj, DataFrame):
s = FrameWriter(
obj, orient=orient, date_format=date_format,
double_precision=double_precision, ensure_ascii=force_ascii,
date_unit=date_unit, default_handler=default_handler).write()
else:
raise NotImplementedError("'obj' should be a Series or a DataFrame")
if isinstance(path_or_buf, compat.string_types):
with open(path_or_buf, 'w') as fh:
fh.write(s)
elif path_or_buf is None:
return s
else:
path_or_buf.write(s)
class Writer(object):
def __init__(self, obj, orient, date_format, double_precision,
ensure_ascii, date_unit, default_handler=None):
self.obj = obj
if orient is None:
orient = self._default_orient
self.orient = orient
self.date_format = date_format
self.double_precision = double_precision
self.ensure_ascii = ensure_ascii
self.date_unit = date_unit
self.default_handler = default_handler
self.is_copy = None
self._format_axes()
def _format_axes(self):
raise AbstractMethodError(self)
def write(self):
return dumps(
self.obj,
orient=self.orient,
double_precision=self.double_precision,
ensure_ascii=self.ensure_ascii,
date_unit=self.date_unit,
iso_dates=self.date_format == 'iso',
default_handler=self.default_handler)
class SeriesWriter(Writer):
_default_orient = 'index'
def _format_axes(self):
if not self.obj.index.is_unique and self.orient == 'index':
raise ValueError("Series index must be unique for orient="
"'%s'" % self.orient)
class FrameWriter(Writer):
_default_orient = 'columns'
def _format_axes(self):
""" try to axes if they are datelike """
if not self.obj.index.is_unique and self.orient in (
'index', 'columns'):
raise ValueError("DataFrame index must be unique for orient="
"'%s'." % self.orient)
if not self.obj.columns.is_unique and self.orient in (
'index', 'columns', 'records'):
raise ValueError("DataFrame columns must be unique for orient="
"'%s'." % self.orient)
def read_json(path_or_buf=None, orient=None, typ='frame', dtype=True,
convert_axes=True, convert_dates=True, keep_default_dates=True,
numpy=False, precise_float=False, date_unit=None):
"""
Convert a JSON string to pandas object
Parameters
----------
filepath_or_buffer : a valid JSON string or file-like
The string could be a URL. Valid URL schemes include http, ftp, s3, and
file. For file URLs, a host is expected. For instance, a local file
could be ``file://localhost/path/to/table.json``
orient
* `Series`
- default is ``'index'``
- allowed values are: ``{'split','records','index'}``
- The Series index must be unique for orient ``'index'``.
* `DataFrame`
- default is ``'columns'``
- allowed values are: {'split','records','index','columns','values'}
- The DataFrame index must be unique for orients 'index' and
'columns'.
- The DataFrame columns must be unique for orients 'index',
'columns', and 'records'.
* The format of the JSON string
- split : dict like
``{index -> [index], columns -> [columns], data -> [values]}``
- records : list like
``[{column -> value}, ... , {column -> value}]``
- index : dict like ``{index -> {column -> value}}``
- columns : dict like ``{column -> {index -> value}}``
- values : just the values array
typ : type of object to recover (series or frame), default 'frame'
dtype : boolean or dict, default True
If True, infer dtypes, if a dict of column to dtype, then use those,
if False, then don't infer dtypes at all, applies only to the data.
convert_axes : boolean, default True
Try to convert the axes to the proper dtypes.
convert_dates : boolean, default True
List of columns to parse for dates; If True, then try to parse
datelike columns default is True; a column label is datelike if
* it ends with ``'_at'``,
* it ends with ``'_time'``,
* it begins with ``'timestamp'``,
* it is ``'modified'``, or
* it is ``'date'``
keep_default_dates : boolean, default True.
If parsing dates, then parse the default datelike columns
numpy : boolean, default False
Direct decoding to numpy arrays. Supports numeric data only, but
non-numeric column and index labels are supported. Note also that the
JSON ordering MUST be the same for each term if numpy=True.
precise_float : boolean, default False.
Set to enable usage of higher precision (strtod) function when
decoding string to double values. Default (False) is to use fast but
less precise builtin functionality
date_unit : string, default None
The timestamp unit to detect if converting dates. The default behaviour
is to try and detect the correct precision, but if this is not desired
then pass one of 's', 'ms', 'us' or 'ns' to force parsing only seconds,
milliseconds, microseconds or nanoseconds respectively.
Returns
-------
result : Series or DataFrame
"""
filepath_or_buffer, _, _ = get_filepath_or_buffer(path_or_buf)
if isinstance(filepath_or_buffer, compat.string_types):
try:
exists = os.path.exists(filepath_or_buffer)
# if the filepath is too long will raise here
# 5874
except (TypeError,ValueError):
exists = False
if exists:
with open(filepath_or_buffer, 'r') as fh:
json = fh.read()
else:
json = filepath_or_buffer
elif hasattr(filepath_or_buffer, 'read'):
json = filepath_or_buffer.read()
else:
json = filepath_or_buffer
obj = None
if typ == 'frame':
obj = FrameParser(json, orient, dtype, convert_axes, convert_dates,
keep_default_dates, numpy, precise_float,
date_unit).parse()
if typ == 'series' or obj is None:
if not isinstance(dtype, bool):
dtype = dict(data=dtype)
obj = SeriesParser(json, orient, dtype, convert_axes, convert_dates,
keep_default_dates, numpy, precise_float,
date_unit).parse()
return obj
class Parser(object):
_STAMP_UNITS = ('s', 'ms', 'us', 'ns')
_MIN_STAMPS = {
's': long(31536000),
'ms': long(31536000000),
'us': long(31536000000000),
'ns': long(31536000000000000)}
def __init__(self, json, orient, dtype=True, convert_axes=True,
convert_dates=True, keep_default_dates=False, numpy=False,
precise_float=False, date_unit=None):
self.json = json
if orient is None:
orient = self._default_orient
self.orient = orient
self.dtype = dtype
if orient == "split":
numpy = False
if date_unit is not None:
date_unit = date_unit.lower()
if date_unit not in self._STAMP_UNITS:
raise ValueError('date_unit must be one of %s' %
(self._STAMP_UNITS,))
self.min_stamp = self._MIN_STAMPS[date_unit]
else:
self.min_stamp = self._MIN_STAMPS['s']
self.numpy = numpy
self.precise_float = precise_float
self.convert_axes = convert_axes
self.convert_dates = convert_dates
self.date_unit = date_unit
self.keep_default_dates = keep_default_dates
self.obj = None
def check_keys_split(self, decoded):
"checks that dict has only the appropriate keys for orient='split'"
bad_keys = set(decoded.keys()).difference(set(self._split_keys))
if bad_keys:
bad_keys = ", ".join(bad_keys)
raise ValueError(u("JSON data had unexpected key(s): %s") %
com.pprint_thing(bad_keys))
def parse(self):
# try numpy
numpy = self.numpy
if numpy:
self._parse_numpy()
else:
self._parse_no_numpy()
if self.obj is None:
return None
if self.convert_axes:
self._convert_axes()
self._try_convert_types()
return self.obj
def _convert_axes(self):
""" try to convert axes """
for axis in self.obj._AXIS_NUMBERS.keys():
new_axis, result = self._try_convert_data(
axis, self.obj._get_axis(axis), use_dtypes=False,
convert_dates=True)
if result:
setattr(self.obj, axis, new_axis)
def _try_convert_types(self):
raise AbstractMethodError(self)
def _try_convert_data(self, name, data, use_dtypes=True,
convert_dates=True):
""" try to parse a ndarray like into a column by inferring dtype """
# don't try to coerce, unless a force conversion
if use_dtypes:
if self.dtype is False:
return data, False
elif self.dtype is True:
pass
else:
# dtype to force
dtype = (self.dtype.get(name)
if isinstance(self.dtype, dict) else self.dtype)
if dtype is not None:
try:
dtype = np.dtype(dtype)
return data.astype(dtype), True
except:
return data, False
if convert_dates:
new_data, result = self._try_convert_to_date(data)
if result:
return new_data, True
result = False
if data.dtype == 'object':
# try float
try:
data = data.astype('float64')
result = True
except:
pass
if data.dtype.kind == 'f':
if data.dtype != 'float64':
# coerce floats to 64
try:
data = data.astype('float64')
result = True
except:
pass
# do't coerce 0-len data
if len(data) and (data.dtype == 'float' or data.dtype == 'object'):
# coerce ints if we can
try:
new_data = data.astype('int64')
if (new_data == data).all():
data = new_data
result = True
except:
pass
# coerce ints to 64
if data.dtype == 'int':
# coerce floats to 64
try:
data = data.astype('int64')
result = True
except:
pass
return data, result
def _try_convert_to_date(self, data):
""" try to parse a ndarray like into a date column
try to coerce object in epoch/iso formats and
integer/float in epcoh formats, return a boolean if parsing
was successful """
# no conversion on empty
if not len(data):
return data, False
new_data = data
if new_data.dtype == 'object':
try:
new_data = data.astype('int64')
except:
pass
# ignore numbers that are out of range
if issubclass(new_data.dtype.type, np.number):
in_range = (isnull(new_data.values) | (new_data > self.min_stamp) |
(new_data.values == iNaT))
if not in_range.all():
return data, False
date_units = (self.date_unit,) if self.date_unit else self._STAMP_UNITS
for date_unit in date_units:
try:
new_data = to_datetime(new_data, errors='raise',
unit=date_unit)
except OverflowError:
continue
except:
break
return new_data, True
return data, False
def _try_convert_dates(self):
raise AbstractMethodError(self)
class SeriesParser(Parser):
_default_orient = 'index'
_split_keys = ('name', 'index', 'data')
def _parse_no_numpy(self):
json = self.json
orient = self.orient
if orient == "split":
decoded = dict((str(k), v)
for k, v in compat.iteritems(loads(
json,
precise_float=self.precise_float)))
self.check_keys_split(decoded)
self.obj = Series(dtype=None, **decoded)
else:
self.obj = Series(
loads(json, precise_float=self.precise_float), dtype=None)
def _parse_numpy(self):
json = self.json
orient = self.orient
if orient == "split":
decoded = loads(json, dtype=None, numpy=True,
precise_float=self.precise_float)
decoded = dict((str(k), v) for k, v in compat.iteritems(decoded))
self.check_keys_split(decoded)
self.obj = Series(**decoded)
elif orient == "columns" or orient == "index":
self.obj = Series(*loads(json, dtype=None, numpy=True,
labelled=True,
precise_float=self.precise_float))
else:
self.obj = Series(loads(json, dtype=None, numpy=True,
precise_float=self.precise_float))
def _try_convert_types(self):
if self.obj is None:
return
obj, result = self._try_convert_data(
'data', self.obj, convert_dates=self.convert_dates)
if result:
self.obj = obj
class FrameParser(Parser):
_default_orient = 'columns'
_split_keys = ('columns', 'index', 'data')
def _parse_numpy(self):
json = self.json
orient = self.orient
if orient == "columns":
args = loads(json, dtype=None, numpy=True, labelled=True,
precise_float=self.precise_float)
if args:
args = (args[0].T, args[2], args[1])
self.obj = DataFrame(*args)
elif orient == "split":
decoded = loads(json, dtype=None, numpy=True,
precise_float=self.precise_float)
decoded = dict((str(k), v) for k, v in compat.iteritems(decoded))
self.check_keys_split(decoded)
self.obj = DataFrame(**decoded)
elif orient == "values":
self.obj = DataFrame(loads(json, dtype=None, numpy=True,
precise_float=self.precise_float))
else:
self.obj = DataFrame(*loads(json, dtype=None, numpy=True,
labelled=True,
precise_float=self.precise_float))
def _parse_no_numpy(self):
json = self.json
orient = self.orient
if orient == "columns":
self.obj = DataFrame(
loads(json, precise_float=self.precise_float), dtype=None)
elif orient == "split":
decoded = dict((str(k), v)
for k, v in compat.iteritems(loads(
json,
precise_float=self.precise_float)))
self.check_keys_split(decoded)
self.obj = DataFrame(dtype=None, **decoded)
elif orient == "index":
self.obj = DataFrame(
loads(json, precise_float=self.precise_float), dtype=None).T
else:
self.obj = DataFrame(
loads(json, precise_float=self.precise_float), dtype=None)
def _process_converter(self, f, filt=None):
""" take a conversion function and possibly recreate the frame """
if filt is None:
filt = lambda col, c: True
needs_new_obj = False
new_obj = dict()
for i, (col, c) in enumerate(self.obj.iteritems()):
if filt(col, c):
new_data, result = f(col, c)
if result:
c = new_data
needs_new_obj = True
new_obj[i] = c
if needs_new_obj:
# possibly handle dup columns
new_obj = DataFrame(new_obj, index=self.obj.index)
new_obj.columns = self.obj.columns
self.obj = new_obj
def _try_convert_types(self):
if self.obj is None:
return
if self.convert_dates:
self._try_convert_dates()
self._process_converter(
lambda col, c: self._try_convert_data(col, c, convert_dates=False))
def _try_convert_dates(self):
if self.obj is None:
return
# our columns to parse
convert_dates = self.convert_dates
if convert_dates is True:
convert_dates = []
convert_dates = set(convert_dates)
def is_ok(col):
""" return if this col is ok to try for a date parse """
if not isinstance(col, compat.string_types):
return False
col_lower = col.lower()
if (col_lower.endswith('_at') or
col_lower.endswith('_time') or
col_lower == 'modified' or
col_lower == 'date' or
col_lower == 'datetime' or
col_lower.startswith('timestamp')):
return True
return False
self._process_converter(
lambda col, c: self._try_convert_to_date(c),
lambda col, c: ((self.keep_default_dates and is_ok(col))
or col in convert_dates))
#----------------------------------------------------------------------
# JSON normalization routines
def nested_to_record(ds, prefix="", level=0):
"""a simplified json_normalize
converts a nested dict into a flat dict ("record"), unlike json_normalize,
it does not attempt to extract a subset of the data.
Parameters
----------
ds : dict or list of dicts
Returns
-------
d - dict or list of dicts, matching `ds`
Examples
--------
IN[52]: nested_to_record(dict(flat1=1,dict1=dict(c=1,d=2),
nested=dict(e=dict(c=1,d=2),d=2)))
Out[52]:
{'dict1.c': 1,
'dict1.d': 2,
'flat1': 1,
'nested.d': 2,
'nested.e.c': 1,
'nested.e.d': 2}
"""
singleton = False
if isinstance(ds, dict):
ds = [ds]
singleton = True
new_ds = []
for d in ds:
new_d = copy.deepcopy(d)
for k, v in d.items():
# each key gets renamed with prefix
if level == 0:
newkey = str(k)
else:
newkey = prefix + '.' + str(k)
# only dicts gets recurse-flattend
# only at level>1 do we rename the rest of the keys
if not isinstance(v, dict):
if level != 0: # so we skip copying for top level, common case
v = new_d.pop(k)
new_d[newkey] = v
continue
else:
v = new_d.pop(k)
new_d.update(nested_to_record(v, newkey, level+1))
new_ds.append(new_d)
if singleton:
return new_ds[0]
return new_ds
def json_normalize(data, record_path=None, meta=None,
meta_prefix=None,
record_prefix=None):
"""
"Normalize" semi-structured JSON data into a flat table
Parameters
----------
data : dict or list of dicts
Unserialized JSON objects
record_path : string or list of strings, default None
Path in each object to list of records. If not passed, data will be
assumed to be an array of records
meta : list of paths (string or list of strings)
Fields to use as metadata for each record in resulting table
record_prefix : string, default None
If True, prefix records with dotted (?) path, e.g. foo.bar.field if
path to records is ['foo', 'bar']
meta_prefix : string, default None
Returns
-------
frame : DataFrame
Examples
--------
>>> data = [{'state': 'Florida',
... 'shortname': 'FL',
... 'info': {
... 'governor': 'Rick Scott'
... },
... 'counties': [{'name': 'Dade', 'population': 12345},
... {'name': 'Broward', 'population': 40000},
... {'name': 'Palm Beach', 'population': 60000}]},
... {'state': 'Ohio',
... 'shortname': 'OH',
... 'info': {
... 'governor': 'John Kasich'
... },
... 'counties': [{'name': 'Summit', 'population': 1234},
... {'name': 'Cuyahoga', 'population': 1337}]}]
>>> from pandas.io.json import json_normalize
>>> result = json_normalize(data, 'counties', ['state', 'shortname',
... ['info', 'governor']])
>>> result
name population info.governor state shortname
0 Dade 12345 Rick Scott Florida FL
1 Broward 40000 Rick Scott Florida FL
2 Palm Beach 60000 Rick Scott Florida FL
3 Summit 1234 John Kasich Ohio OH
4 Cuyahoga 1337 John Kasich Ohio OH
"""
def _pull_field(js, spec):
result = js
if isinstance(spec, list):
for field in spec:
result = result[field]
else:
result = result[spec]
return result
# A bit of a hackjob
if isinstance(data, dict):
data = [data]
if record_path is None:
if any([isinstance(x, dict) for x in compat.itervalues(data[0])]):
# naive normalization, this is idempotent for flat records
# and potentially will inflate the data considerably for
# deeply nested structures:
# {VeryLong: { b: 1,c:2}} -> {VeryLong.b:1 ,VeryLong.c:@}
#
# TODO: handle record value which are lists, at least error
# reasonably
data = nested_to_record(data)
return DataFrame(data)
elif not isinstance(record_path, list):
record_path = [record_path]
if meta is None:
meta = []
elif not isinstance(meta, list):
meta = [meta]
for i, x in enumerate(meta):
if not isinstance(x, list):
meta[i] = [x]
# Disastrously inefficient for now
records = []
lengths = []
meta_vals = defaultdict(list)
meta_keys = ['.'.join(val) for val in meta]
def _recursive_extract(data, path, seen_meta, level=0):
if len(path) > 1:
for obj in data:
for val, key in zip(meta, meta_keys):
if level + 1 == len(val):
seen_meta[key] = _pull_field(obj, val[-1])
_recursive_extract(obj[path[0]], path[1:],
seen_meta, level=level+1)
else:
for obj in data:
recs = _pull_field(obj, path[0])
# For repeating the metadata later
lengths.append(len(recs))
for val, key in zip(meta, meta_keys):
if level + 1 > len(val):
meta_val = seen_meta[key]
else:
meta_val = _pull_field(obj, val[level:])
meta_vals[key].append(meta_val)
records.extend(recs)
_recursive_extract(data, record_path, {}, level=0)
result = DataFrame(records)
if record_prefix is not None:
result.rename(columns=lambda x: record_prefix + x, inplace=True)
# Data types, a problem
for k, v in compat.iteritems(meta_vals):
if meta_prefix is not None:
k = meta_prefix + k
if k in result:
raise ValueError('Conflicting metadata name %s, '
'need distinguishing prefix ' % k)
result[k] = np.array(v).repeat(lengths)
return result
| 33.281491
| 79
| 0.543351
|
2bcca943fbd5e23655352bac497913ac5109a5ff
| 1,150
|
py
|
Python
|
cortical_breach_detection/metrics.py
|
benjamindkilleen/SPIE-2022_cortical-breach-anticipation
|
0238bf8242d3957ab6f840601138f94df722e8c0
|
[
"MIT"
] | 2
|
2022-02-25T07:08:57.000Z
|
2022-02-27T16:50:45.000Z
|
cortical_breach_detection/metrics.py
|
benjamindkilleen/SPIE-2022_cortical-breach-anticipation
|
0238bf8242d3957ab6f840601138f94df722e8c0
|
[
"MIT"
] | null | null | null |
cortical_breach_detection/metrics.py
|
benjamindkilleen/SPIE-2022_cortical-breach-anticipation
|
0238bf8242d3957ab6f840601138f94df722e8c0
|
[
"MIT"
] | null | null | null |
import logging
import torch
import torchmetrics
from torch import nn
log = logging.getLogger(__name__)
class AggregateMetrics(nn.Module):
def __init__(self, *metrics):
super().__init__()
self.trues = []
self.preds = []
self.metrics = metrics
def forward(self, y_pred, y_true, mode="train"):
self.trues.append(y_true.cpu())
self.preds.append(y_pred.detach().cpu())
trues = torch.cat(self.trues, dim=0)
preds = torch.cat(self.preds, dim=0)
results = {}
if len(trues.unique()) != 1:
for metric in self.metrics:
try:
results.update(
{
f"{mode}/{metric.__class__.__name__.lower()}": metric(
preds.cpu(), trues.cpu()
)
}
)
except ValueError:
log.error(f"{metric.__class__.__name__} metric failed.")
continue
return results
def reset(self):
self.trues = []
self.preds = []
| 26.744186
| 82
| 0.482609
|
c5b0f278af3f34bdb545f3c5c17232450f0191f8
| 353
|
py
|
Python
|
Eager/apimgt/adaptor_factory.py
|
UCSB-CS-RACELab/eager-appscale
|
d58fe64bb867ef58af19c1d84a5e1ec68ecddd3d
|
[
"Apache-2.0"
] | 3
|
2016-06-12T01:18:49.000Z
|
2018-07-16T18:20:23.000Z
|
Eager/apimgt/adaptor_factory.py
|
UCSB-CS-RACELab/eager-appscale
|
d58fe64bb867ef58af19c1d84a5e1ec68ecddd3d
|
[
"Apache-2.0"
] | null | null | null |
Eager/apimgt/adaptor_factory.py
|
UCSB-CS-RACELab/eager-appscale
|
d58fe64bb867ef58af19c1d84a5e1ec68ecddd3d
|
[
"Apache-2.0"
] | 1
|
2020-05-25T02:59:15.000Z
|
2020-05-25T02:59:15.000Z
|
from apimgt.wso2am_14 import WSO2APIManager14Adaptor
CONF_AM = 'api_manager'
CONF_AM_PROVIDER = 'provider'
def get_adaptor(conf):
provider_name = conf[CONF_AM][CONF_AM_PROVIDER]
if provider_name == 'wso2am1.4':
return WSO2APIManager14Adaptor(conf[CONF_AM])
else:
raise Exception('Unknown API Manager provider: {0}'.format(provider_name))
| 32.090909
| 78
| 0.776204
|
ce674cd52f34bd02677715cbd6b4e8b331a7bcac
| 14,528
|
py
|
Python
|
skimage/filters/tests/test_thresholding.py
|
portugueslab/scikit-image
|
0fa3bcb118bb208a0cc7d3e8b96cd96c1ce7a75b
|
[
"BSD-3-Clause"
] | null | null | null |
skimage/filters/tests/test_thresholding.py
|
portugueslab/scikit-image
|
0fa3bcb118bb208a0cc7d3e8b96cd96c1ce7a75b
|
[
"BSD-3-Clause"
] | null | null | null |
skimage/filters/tests/test_thresholding.py
|
portugueslab/scikit-image
|
0fa3bcb118bb208a0cc7d3e8b96cd96c1ce7a75b
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
from scipy import ndimage as ndi
import skimage
from skimage import data
from skimage._shared._warnings import expected_warnings
from skimage.filters.thresholding import (threshold_local,
threshold_adaptive,
threshold_otsu,
threshold_li,
threshold_yen,
threshold_isodata,
threshold_niblack,
threshold_sauvola,
threshold_mean,
threshold_triangle,
threshold_minimum,
_mean_std)
from skimage._shared import testing
from skimage._shared.testing import assert_equal, assert_almost_equal
class TestSimpleImage():
def setup(self):
self.image = np.array([[0, 0, 1, 3, 5],
[0, 1, 4, 3, 4],
[1, 2, 5, 4, 1],
[2, 4, 5, 2, 1],
[4, 5, 1, 0, 0]], dtype=int)
def test_otsu(self):
assert threshold_otsu(self.image) == 2
def test_otsu_negative_int(self):
image = self.image - 2
assert threshold_otsu(image) == 0
def test_otsu_float_image(self):
image = np.float64(self.image)
assert 2 <= threshold_otsu(image) < 3
def test_li(self):
assert int(threshold_li(self.image)) == 2
def test_li_negative_int(self):
image = self.image - 2
assert int(threshold_li(image)) == 0
def test_li_float_image(self):
image = np.float64(self.image)
assert 2 <= threshold_li(image) < 3
def test_li_constant_image(self):
with testing.raises(ValueError):
threshold_li(np.ones((10, 10)))
def test_yen(self):
assert threshold_yen(self.image) == 2
def test_yen_negative_int(self):
image = self.image - 2
assert threshold_yen(image) == 0
def test_yen_float_image(self):
image = np.float64(self.image)
assert 2 <= threshold_yen(image) < 3
def test_yen_arange(self):
image = np.arange(256)
assert threshold_yen(image) == 127
def test_yen_binary(self):
image = np.zeros([2, 256], dtype=np.uint8)
image[0] = 255
assert threshold_yen(image) < 1
def test_yen_blank_zero(self):
image = np.zeros((5, 5), dtype=np.uint8)
assert threshold_yen(image) == 0
def test_yen_blank_max(self):
image = np.empty((5, 5), dtype=np.uint8)
image.fill(255)
assert threshold_yen(image) == 255
def test_isodata(self):
assert threshold_isodata(self.image) == 2
assert threshold_isodata(self.image, return_all=True) == [2]
def test_isodata_blank_zero(self):
image = np.zeros((5, 5), dtype=np.uint8)
assert threshold_isodata(image) == 0
assert threshold_isodata(image, return_all=True) == [0]
def test_isodata_linspace(self):
image = np.linspace(-127, 0, 256)
assert -63.8 < threshold_isodata(image) < -63.6
assert_almost_equal(threshold_isodata(image, return_all=True),
[-63.74804688, -63.25195312])
def test_isodata_16bit(self):
np.random.seed(0)
imfloat = np.random.rand(256, 256)
assert 0.49 < threshold_isodata(imfloat, nbins=1024) < 0.51
assert all(0.49 < threshold_isodata(imfloat, nbins=1024,
return_all=True))
def test_threshold_local_equals_adaptive(self):
def func(arr):
return arr.sum() / arr.shape[0]
with expected_warnings(['deprecated', 'return value']):
thresholded_original = threshold_adaptive(self.image, 3,
method='generic',
param=func)
threshold_new = threshold_local(self.image, 3, method='generic',
param=func)
assert_equal(thresholded_original, self.image > threshold_new)
def test_threshold_adaptive_generic(self):
def func(arr):
return arr.sum() / arr.shape[0]
ref = np.array(
[[False, False, False, False, True],
[False, False, True, False, True],
[False, False, True, True, False],
[False, True, True, False, False],
[ True, True, False, False, False]]
)
with expected_warnings(['deprecated', 'return value']):
out = threshold_adaptive(self.image, 3, method='generic',
param=func)
assert_equal(ref, out)
def test_threshold_local_gaussian(self):
ref = np.array(
[[False, False, False, False, True],
[False, False, True, False, True],
[False, False, True, True, False],
[False, True, True, False, False],
[ True, True, False, False, False]]
)
out = threshold_local(self.image, 3, method='gaussian')
assert_equal(ref, self.image > out)
out = threshold_local(self.image, 3, method='gaussian',
param=1./3.)
assert_equal(ref, self.image > out)
def test_threshold_local_mean(self):
ref = np.array(
[[False, False, False, False, True],
[False, False, True, False, True],
[False, False, True, True, False],
[False, True, True, False, False],
[ True, True, False, False, False]]
)
out = threshold_local(self.image, 3, method='mean')
assert_equal(ref, self.image > out)
def test_threshold_local_median(self):
ref = np.array(
[[False, False, False, False, True],
[False, False, True, False, False],
[False, False, True, False, False],
[False, False, True, True, False],
[False, True, False, False, False]]
)
out = threshold_local(self.image, 3, method='median')
assert_equal(ref, self.image > out)
def test_threshold_niblack(self):
ref = np.array(
[[False, False, False, True, True],
[False, True, True, True, True],
[False, True, True, True, False],
[False, True, True, True, True],
[True, True, False, False, False]]
)
thres = threshold_niblack(self.image, window_size=3, k=0.5)
out = self.image > thres
assert_equal(ref, out)
def test_threshold_sauvola(self):
ref = np.array(
[[False, False, False, True, True],
[False, False, True, True, True],
[False, False, True, True, False],
[False, True, True, True, False],
[True, True, False, False, False]]
)
thres = threshold_sauvola(self.image, window_size=3, k=0.2, r=128)
out = self.image > thres
assert_equal(ref, out)
def test_otsu_camera_image():
camera = skimage.img_as_ubyte(data.camera())
assert 86 < threshold_otsu(camera) < 88
def test_otsu_coins_image():
coins = skimage.img_as_ubyte(data.coins())
assert 106 < threshold_otsu(coins) < 108
def test_otsu_coins_image_as_float():
coins = skimage.img_as_float(data.coins())
assert 0.41 < threshold_otsu(coins) < 0.42
def test_otsu_astro_image():
img = skimage.img_as_ubyte(data.astronaut())
with expected_warnings(['grayscale']):
assert 109 < threshold_otsu(img) < 111
def test_otsu_one_color_image():
img = np.ones((10, 10), dtype=np.uint8)
with testing.raises(ValueError):
threshold_otsu(img)
def test_li_camera_image():
camera = skimage.img_as_ubyte(data.camera())
assert 63 < threshold_li(camera) < 65
def test_li_coins_image():
coins = skimage.img_as_ubyte(data.coins())
assert 95 < threshold_li(coins) < 97
def test_li_coins_image_as_float():
coins = skimage.img_as_float(data.coins())
assert 0.37 < threshold_li(coins) < 0.38
def test_li_astro_image():
img = skimage.img_as_ubyte(data.astronaut())
assert 66 < threshold_li(img) < 68
def test_yen_camera_image():
camera = skimage.img_as_ubyte(data.camera())
assert 197 < threshold_yen(camera) < 199
def test_yen_coins_image():
coins = skimage.img_as_ubyte(data.coins())
assert 109 < threshold_yen(coins) < 111
def test_yen_coins_image_as_float():
coins = skimage.img_as_float(data.coins())
assert 0.43 < threshold_yen(coins) < 0.44
def test_adaptive_even_block_size_error():
img = data.camera()
with testing.raises(ValueError):
threshold_local(img, block_size=4)
def test_isodata_camera_image():
camera = skimage.img_as_ubyte(data.camera())
threshold = threshold_isodata(camera)
assert np.floor((camera[camera <= threshold].mean() +
camera[camera > threshold].mean()) / 2.0) == threshold
assert threshold == 87
assert threshold_isodata(camera, return_all=True) == [87]
def test_isodata_coins_image():
coins = skimage.img_as_ubyte(data.coins())
threshold = threshold_isodata(coins)
assert np.floor((coins[coins <= threshold].mean() +
coins[coins > threshold].mean()) / 2.0) == threshold
assert threshold == 107
assert threshold_isodata(coins, return_all=True) == [107]
def test_isodata_moon_image():
moon = skimage.img_as_ubyte(data.moon())
threshold = threshold_isodata(moon)
assert np.floor((moon[moon <= threshold].mean() +
moon[moon > threshold].mean()) / 2.0) == threshold
assert threshold == 86
thresholds = threshold_isodata(moon, return_all=True)
for threshold in thresholds:
assert np.floor((moon[moon <= threshold].mean() +
moon[moon > threshold].mean()) / 2.0) == threshold
assert_equal(thresholds, [86, 87, 88, 122, 123, 124, 139, 140])
def test_isodata_moon_image_negative_int():
moon = skimage.img_as_ubyte(data.moon()).astype(np.int32)
moon -= 100
threshold = threshold_isodata(moon)
assert np.floor((moon[moon <= threshold].mean() +
moon[moon > threshold].mean()) / 2.0) == threshold
assert threshold == -14
thresholds = threshold_isodata(moon, return_all=True)
for threshold in thresholds:
assert np.floor((moon[moon <= threshold].mean() +
moon[moon > threshold].mean()) / 2.0) == threshold
assert_equal(thresholds, [-14, -13, -12, 22, 23, 24, 39, 40])
def test_isodata_moon_image_negative_float():
moon = skimage.img_as_ubyte(data.moon()).astype(np.float64)
moon -= 100
assert -14 < threshold_isodata(moon) < -13
thresholds = threshold_isodata(moon, return_all=True)
assert_almost_equal(thresholds,
[-13.83789062, -12.84179688, -11.84570312, 22.02148438,
23.01757812, 24.01367188, 38.95507812, 39.95117188])
def test_threshold_minimum():
camera = skimage.img_as_ubyte(data.camera())
threshold = threshold_minimum(camera)
assert_equal(threshold, 76)
astronaut = skimage.img_as_ubyte(data.astronaut())
threshold = threshold_minimum(astronaut)
assert_equal(threshold, 114)
def test_threshold_minimum_synthetic():
img = np.arange(25*25, dtype=np.uint8).reshape((25, 25))
img[0:9, :] = 50
img[14:25, :] = 250
threshold = threshold_minimum(img)
assert_equal(threshold, 95)
def test_threshold_minimum_failure():
img = np.zeros((16*16), dtype=np.uint8)
with testing.raises(RuntimeError):
threshold_minimum(img)
def test_mean():
img = np.zeros((2, 6))
img[:, 2:4] = 1
img[:, 4:] = 2
assert(threshold_mean(img) == 1.)
def test_triangle_uint_images():
assert(threshold_triangle(np.invert(data.text())) == 151)
assert(threshold_triangle(data.text()) == 104)
assert(threshold_triangle(data.coins()) == 80)
assert(threshold_triangle(np.invert(data.coins())) == 175)
def test_triangle_float_images():
text = data.text()
int_bins = text.max() - text.min() + 1
# Set nbins to match the uint case and threshold as float.
assert(round(threshold_triangle(
text.astype(np.float), nbins=int_bins)) == 104)
# Check that rescaling image to floats in unit interval is equivalent.
assert(round(threshold_triangle(text / 255., nbins=int_bins) * 255) == 104)
# Repeat for inverted image.
assert(round(threshold_triangle(
np.invert(text).astype(np.float), nbins=int_bins)) == 151)
assert (round(threshold_triangle(
np.invert(text) / 255., nbins=int_bins) * 255) == 151)
def test_triangle_flip():
# Depending on the skewness, the algorithm flips the histogram.
# We check that the flip doesn't affect too much the result.
img = data.camera()
inv_img = np.invert(img)
t = threshold_triangle(inv_img)
t_inv_img = inv_img > t
t_inv_inv_img = np.invert(t_inv_img)
t = threshold_triangle(img)
t_img = img > t
# Check that most of the pixels are identical
# See numpy #7685 for a future np.testing API
unequal_pos = np.where(t_img.ravel() != t_inv_inv_img.ravel())
assert(len(unequal_pos[0]) / t_img.size < 1e-2)
def test_mean_std_2d():
image = np.random.rand(256, 256)
window_size = 11
m, s = _mean_std(image, w=window_size)
mean_kernel = np.ones((window_size,) * 2) / window_size**2
expected_m = ndi.convolve(image, mean_kernel, mode='mirror')
np.testing.assert_allclose(m, expected_m)
expected_s = ndi.generic_filter(image, np.std, size=window_size,
mode='mirror')
np.testing.assert_allclose(s, expected_s)
def test_mean_std_3d():
image = np.random.rand(40, 40, 40)
window_size = 5
mean_kernel = np.ones((window_size,) * 3) / window_size**3
m, s = _mean_std(image, w=window_size)
expected_m = ndi.convolve(image, mean_kernel, mode='mirror')
np.testing.assert_allclose(m, expected_m)
expected_s = ndi.generic_filter(image, np.std, size=window_size,
mode='mirror')
np.testing.assert_allclose(s, expected_s)
| 34.264151
| 79
| 0.598293
|
53fc0cc7b8a1e19b2603e5919fe50cbdb09d740e
| 306
|
py
|
Python
|
src/wai/common/_functions.py
|
waikato-datamining/wai-common
|
bf3d7ae6e01bcb7ffe9f5c2b5d10a05908a68c34
|
[
"MIT"
] | null | null | null |
src/wai/common/_functions.py
|
waikato-datamining/wai-common
|
bf3d7ae6e01bcb7ffe9f5c2b5d10a05908a68c34
|
[
"MIT"
] | 8
|
2020-07-01T02:11:31.000Z
|
2020-12-17T01:57:17.000Z
|
src/wai/common/_functions.py
|
waikato-datamining/wai-common
|
bf3d7ae6e01bcb7ffe9f5c2b5d10a05908a68c34
|
[
"MIT"
] | null | null | null |
from collections import Hashable
from typing import Any
def is_hashable(obj: Any) -> bool:
"""
Checks if the given object is hashable.
:param obj: The object to check.
:return: True if the object is hashable,
False if not.
"""
return isinstance(obj, Hashable)
| 21.857143
| 47
| 0.640523
|
11151ef7666dcaec889aa6e610e2442d73e2ccf0
| 356
|
py
|
Python
|
frappe/www/error.py
|
ssuda777/frappe
|
d3f3df2ce15154aecc1d9d6d07d947e72c2e8c6e
|
[
"MIT"
] | 1
|
2021-12-18T18:37:29.000Z
|
2021-12-18T18:37:29.000Z
|
frappe/www/error.py
|
JMBodz/frappe
|
eb218a06d1cbfc3a8f1cc00ba8dac2c927d2f71d
|
[
"MIT"
] | 3
|
2021-02-27T11:50:14.000Z
|
2021-05-03T06:48:49.000Z
|
frappe/www/error.py
|
JMBodz/frappe
|
eb218a06d1cbfc3a8f1cc00ba8dac2c927d2f71d
|
[
"MIT"
] | 2
|
2021-09-02T09:51:55.000Z
|
2021-09-07T04:55:42.000Z
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
import frappe
no_cache = 1
def get_context(context):
if frappe.flags.in_migrate: return
context.http_status_code = 500
print(frappe.get_traceback().encode("utf-8"))
return {"error": frappe.get_traceback().replace("<", "<").replace(">", ">") }
| 27.384615
| 84
| 0.716292
|
af05a20138af62725e604eac325f44cbd995fb4f
| 398
|
py
|
Python
|
lib/appengine-ndb-experiment/appengine_config.py
|
ortiz1605/webapp-improved
|
0e6218dcd3ba2e0ba0c6a6c87ba4fbe1eab287c4
|
[
"Apache-2.0"
] | 15
|
2015-01-18T17:30:31.000Z
|
2019-10-25T17:14:41.000Z
|
lib/appengine-ndb-experiment/appengine_config.py
|
ortiz1605/webapp-improved
|
0e6218dcd3ba2e0ba0c6a6c87ba4fbe1eab287c4
|
[
"Apache-2.0"
] | null | null | null |
lib/appengine-ndb-experiment/appengine_config.py
|
ortiz1605/webapp-improved
|
0e6218dcd3ba2e0ba0c6a6c87ba4fbe1eab287c4
|
[
"Apache-2.0"
] | 2
|
2015-06-17T23:01:13.000Z
|
2015-07-08T23:10:19.000Z
|
def webapp_add_wsgi_middleware(app):
try:
from google.appengine.ext.appstats import recording
except ImportError, err:
logging.info('Failed to import recording: %s', err)
else:
app = recording.appstats_wsgi_middleware(app)
return app
appstats_KEY_DISTANCE = 10
appstats_MAX_REPR = 1000
appstats_MAX_STACK = 20
appstats_FILTER_LIST = [
{'PATH_INFO': '!^/favicon\.ico$'},
]
| 23.411765
| 55
| 0.736181
|
b5d1f82c2954a7ef35fe4f5376f76c4161679982
| 8,817
|
py
|
Python
|
stactools_threedep/stactools/threedep/metadata.py
|
jamesvrt/stactools
|
99c2ae556aad61cb854bd0edd8000a7ac42cfa06
|
[
"Apache-2.0"
] | null | null | null |
stactools_threedep/stactools/threedep/metadata.py
|
jamesvrt/stactools
|
99c2ae556aad61cb854bd0edd8000a7ac42cfa06
|
[
"Apache-2.0"
] | null | null | null |
stactools_threedep/stactools/threedep/metadata.py
|
jamesvrt/stactools
|
99c2ae556aad61cb854bd0edd8000a7ac42cfa06
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import annotations
import datetime
from typing import Union, Optional
from xml.etree import ElementTree
from xml.etree.ElementTree import Element
from shapely.geometry import box, mapping
from pystac import Asset, MediaType, Link
from stactools.core import io
from stactools.core.io import ReadHrefModifier
from stactools.core.projection import reproject_geom
from stactools.threedep.constants import THREEDEP_CRS, THREEDEP_EPSG, DEFAULT_BASE
from stactools.threedep import utils
class Metadata:
"""3DEP file metadata."""
@classmethod
def from_href(
cls,
href: str,
read_href_modifier: Optional[ReadHrefModifier] = None) -> Metadata:
"""Creates a metadata from an href to the XML metadata file."""
text = io.read_text(href, read_href_modifier)
element_tree = ElementTree.fromstring(text)
return cls(element_tree)
@classmethod
def from_product_and_id(cls,
product: str,
id: str,
base: str = None) -> Metadata:
"""Creates a Metadata from a product and id."""
if base is None:
base = DEFAULT_BASE
href = utils.path(product, id, extension="xml", base=base)
return cls.from_href(href)
def __init__(self, xml: Element):
"""Creates a new metadata object from XML metadata."""
self.title = xml.findtext("./idinfo/citation/citeinfo/title")
self.description = xml.findtext("./idinfo/descript/abstract")
bounding = xml.find("./idinfo/spdom/bounding")
self.minx = bounding.findtext("./westbc")
self.miny = bounding.findtext("./southbc")
self.maxx = bounding.findtext("./eastbc")
self.maxy = bounding.findtext("./northbc")
self.pubdate = xml.findtext("./idinfo/citation/citeinfo/pubdate")
self.begdate = xml.findtext(
"./idinfo/timeperd/timeinfo/rngdates/begdate")
self.enddate = xml.findtext(
"./idinfo/timeperd/timeinfo/rngdates/enddate")
self.current = xml.findtext("./idinfo/timeperd/current")
self.rowcount = xml.findtext("./spdoinfo/rastinfo/rowcount")
self.colcount = xml.findtext("./spdoinfo/rastinfo/colcount")
self.latres = xml.findtext("./spref/horizsys/geograph/latres")
self.longres = xml.findtext("./spref/horizsys/geograph/longres")
tiff_href = xml.findtext(
"./distinfo/stdorder/digform/digtopt/onlinopt/computer/networka/networkr"
)
parts = tiff_href.split('/')[-4:]
self.product = parts[0]
self.id = parts[2]
@property
def stac_id(self) -> str:
"""Returns the STAC ID of this metadata.
This is the id plus the product, e.g. if the filename of the tif is
"USGS_1_n40w105.tif", then the STAC id is "n40w105-1".
"""
return "{}-{}".format(self.id, self.product)
@property
def geometry(self) -> dict:
"""Returns this item's geometry in WGS84."""
original_bbox = [
float(self.minx),
float(self.miny),
float(self.maxx),
float(self.maxy)
]
return reproject_geom(THREEDEP_CRS, "EPSG:4326",
mapping(box(*original_bbox)))
@property
def datetime(self) -> Union[datetime.datetime, None]:
"""Returns the collection publication datetime."""
if self.current == "publication date":
return _format_date(self.pubdate)
else:
raise NotImplementedError
@property
def start_datetime(self) -> Union[datetime.datetime, None]:
"""Returns the start datetime for this record.
This can be a while ago, since the national elevation dataset was
originally derived from direct survey data.
"""
return _format_date(self.begdate)
@property
def end_datetime(self) -> Union[datetime.datetime, None]:
"""Returns the end datetime for this record."""
return _format_date(self.enddate, end_of_year=True)
@property
def gsd(self) -> float:
"""Returns the nominal ground sample distance from these metadata."""
if self.product == "1":
return 30
elif self.product == "13":
return 10
else:
raise NotImplementedError
def data_asset(self, base: str = DEFAULT_BASE) -> Asset:
"""Returns the data asset (aka the tiff file)."""
return Asset(href=self._asset_href_with_extension(base, "tif"),
title=self.title,
description=self.description,
media_type=MediaType.COG,
roles=["data"])
def metadata_asset(self, base: str = DEFAULT_BASE) -> Asset:
"""Returns the data asset (aka the tiff file)."""
return Asset(href=self._asset_href_with_extension(base, "xml"),
media_type=MediaType.XML,
roles=["metadata"])
def thumbnail_asset(self, base: str = DEFAULT_BASE) -> Asset:
"""Returns the thumbnail asset."""
return Asset(href=self._asset_href_with_extension(base, "jpg"),
media_type=MediaType.JPEG,
roles=["thumbnail"])
def gpkg_asset(self, base: str = DEFAULT_BASE) -> Asset:
"""Returns the geopackage asset."""
return Asset(href=self._asset_href_with_extension(base,
"gpkg",
id_only=True),
media_type=MediaType.GEOPACKAGE,
roles=["metadata"])
def via_link(self, base: str = DEFAULT_BASE) -> Link:
"""Returns the via link for this file."""
return Link("via", self._asset_href_with_extension(base, "xml"))
@property
def projection_extension_dict(self) -> dict:
"""Returns a dictionary of values to be applied to the projection extension."""
shape = [int(self.rowcount), int(self.colcount)]
transform = [
float(self.longres),
0.0,
float(self.minx),
0.0,
-float(self.latres),
float(self.maxy),
0.0,
0.0,
1.0,
]
return {
"epsg": THREEDEP_EPSG,
"shape": shape,
"transform": transform,
}
@property
def region(self) -> str:
"""Returns this objects 3dep "region".
Region is defined as a 10x10 lat/lon box that nominally contains this item.
E.g. for n41w106, the region would be n40w110. This is used mostly for
creating subcatalogs for STACBrowser.
"""
import math
n_or_s = self.id[0]
lat = float(self.id[1:3])
if n_or_s == "s":
lat = -lat
lat = math.floor(lat / 10) * 10
e_or_w = self.id[3]
lon = float(self.id[4:])
if e_or_w == "w":
lon = -lon
lon = math.floor(lon / 10) * 10
return f"{n_or_s}{abs(lat)}{e_or_w}{abs(lon)}"
def _asset_href_with_extension(self,
base: str,
extension: str,
id_only: bool = False) -> str:
if base is None:
base = DEFAULT_BASE
return utils.path(self.product,
self.id,
base=base,
extension=extension,
id_only=id_only)
def _format_date(date: str,
end_of_year: bool = False) -> Union[datetime.datetime, None]:
if len(date) == 4:
year = int(date)
if end_of_year:
month = 12
day = 31
else:
month = 1
day = 1
if year < 1800 or year > datetime.date.today().year:
return None # There's some bad metadata in the USGS records
else:
return datetime.datetime(year,
month,
day,
0,
0,
0,
tzinfo=datetime.timezone.utc)
elif len(date) == 8:
year = int(date[0:4])
month = int(date[4:6])
day = int(date[6:8])
return datetime.datetime(year,
month,
day,
0,
0,
0,
tzinfo=datetime.timezone.utc)
else:
return None
| 36.891213
| 87
| 0.540887
|
b7051989265ca6e31c438b2a00e63e339e49efc8
| 2,445
|
py
|
Python
|
homeassistant/components/tradfri/switch.py
|
DoctorU/core
|
5b218d7e1c4164e32d41473977459cbaf23adf42
|
[
"Apache-2.0"
] | 5
|
2017-01-26T16:33:09.000Z
|
2018-07-20T13:50:47.000Z
|
homeassistant/components/tradfri/switch.py
|
DoctorU/core
|
5b218d7e1c4164e32d41473977459cbaf23adf42
|
[
"Apache-2.0"
] | 87
|
2020-07-06T22:22:54.000Z
|
2022-03-31T06:01:46.000Z
|
homeassistant/components/tradfri/switch.py
|
DoctorU/core
|
5b218d7e1c4164e32d41473977459cbaf23adf42
|
[
"Apache-2.0"
] | 3
|
2021-05-31T15:32:08.000Z
|
2021-08-10T22:08:42.000Z
|
"""Support for IKEA Tradfri switches."""
from __future__ import annotations
from collections.abc import Callable
from typing import Any, cast
from pytradfri.command import Command
from homeassistant.components.switch import SwitchEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .base_class import TradfriBaseDevice
from .const import CONF_GATEWAY_ID, DEVICES, DOMAIN, KEY_API
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Load Tradfri switches based on a config entry."""
gateway_id = config_entry.data[CONF_GATEWAY_ID]
tradfri_data = hass.data[DOMAIN][config_entry.entry_id]
api = tradfri_data[KEY_API]
devices = tradfri_data[DEVICES]
switches = [dev for dev in devices if dev.has_socket_control]
if switches:
async_add_entities(
TradfriSwitch(switch, api, gateway_id) for switch in switches
)
class TradfriSwitch(TradfriBaseDevice, SwitchEntity):
"""The platform class required by Home Assistant."""
def __init__(
self,
device: Command,
api: Callable[[Command | list[Command]], Any],
gateway_id: str,
) -> None:
"""Initialize a switch."""
super().__init__(device, api, gateway_id)
self._attr_unique_id = f"{gateway_id}-{device.id}"
def _refresh(self, device: Command) -> None:
"""Refresh the switch data."""
super()._refresh(device)
# Caching of switch control and switch object
self._device_control = device.socket_control
self._device_data = device.socket_control.sockets[0]
@property
def is_on(self) -> bool:
"""Return true if switch is on."""
if not self._device_data:
return False
return cast(bool, self._device_data.state)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Instruct the switch to turn off."""
if not self._device_control:
return None
await self._api(self._device_control.set_state(False))
async def async_turn_on(self, **kwargs: Any) -> None:
"""Instruct the switch to turn on."""
if not self._device_control:
return None
await self._api(self._device_control.set_state(True))
| 32.6
| 73
| 0.689571
|
4b8e951228e4274e022d776fb35c1aca15ee861b
| 7,141
|
py
|
Python
|
easy_postgres/connection.py
|
natiiix/postgresql
|
aaa9e4b7229df6fee30ac3e83c5cc3501d5c0f4c
|
[
"MIT"
] | null | null | null |
easy_postgres/connection.py
|
natiiix/postgresql
|
aaa9e4b7229df6fee30ac3e83c5cc3501d5c0f4c
|
[
"MIT"
] | 1
|
2019-09-30T09:55:52.000Z
|
2019-09-30T10:16:21.000Z
|
easy_postgres/connection.py
|
natiiix/postgresql
|
aaa9e4b7229df6fee30ac3e83c5cc3501d5c0f4c
|
[
"MIT"
] | null | null | null |
"""Module containing the Connection class."""
import psycopg2
from .dictionary import Dictionary
from .transaction import Transaction
class Connection:
"""
Hopefully an easier-to-use wrapper for the psycopg2 Connection class.
Single-item tuples are replaced with the value of the only item.
This applies to all tuple-retruning methods: `one`, `all` and `iter`.
The primary use of this feature are single-column `SELECT` queries and
`INSERT` and `UPDATE` statements with a single-value `RETURNING` clause.
"""
def __init__(self, dsn, autocommit=True):
"""
Initialize a new PostgreSQL connection using a DSN.
Raises:
psycopg2.OperationalError - If DSN is not valid.
"""
# Setting `conn` to `None` will make sure it is never undefined.
self.conn = None
self.conn = psycopg2.connect(dsn)
self.conn.autocommit = autocommit
def __enter__(self):
"""
Return reference to the connection object.
This method is called when entering a
block statement such as `with` or `try`.
"""
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
"""
Commit or rollback changes and close the connection.
Changes will be committed unless an exception was raised.
This method is called when leaving a block statement.
"""
if exc_type or exc_value or exc_traceback:
self.rollback()
else:
self.commit()
self.close()
def __del__(self):
"""
Close the connection.
This method is called when the object is being deleted.
This can happen virtually anytime, so please make
no assumptions about when it is going to be called.
"""
self.close()
def __str__(self):
"""Convert the connection to a string."""
return str(self.conn)
def __repr__(self):
"""Get a string representation of the connection."""
return repr(self.conn)
def close(self):
"""
Close the connection.
It is preferrable to use the `with` statement instead
of manually closing the connection via this method.
"""
self.conn.close()
def rollback(self):
"""Roll-back any changes made since the last commit."""
self.conn.rollback()
def commit(self):
"""Commit all changes made using this connection."""
self.conn.commit()
def cursor(self):
"""Get a new `psycopg2` cursor. Use this for a more direct access."""
return self.conn.cursor()
def transaction(self):
"""
Create a new pseudo-transaction.
This method is a shorthand for calling the `Transaction` constructor.
You should always only create a single `Transaction` at a time.
For correct functionality, please use this in a `with` statement.
"""
return Transaction(self)
def run(self, query, *args, **kwargs):
"""Run the SQL query and return `None`."""
return self._exec(query, _fetch_none, None, *args, **kwargs)
def one(self, query, *args, **kwargs):
"""
Run the SQL query and return single row as a direct value or a tuple.
If a single column is returned, the value will
be returned instead of a single-item tuple.
`None` will be returned in case the SQL query
did not return exactly one row.
"""
return self._exec(query, _fetch_one, _row_tuple, *args, **kwargs)
def one_dict(self, query, *args, **kwargs):
"""
Run the SQL query and return a single row as a dictionary.
`None` will be returned in case the SQL query
did not return exactly one row.
"""
return self._exec(query, _fetch_one, _row_dict, *args, **kwargs)
def all(self, query, *args, **kwargs):
"""
Run the SQL query and return a list of values or tuples.
If a single column is returned, the value will
be returned instead of a single-item tuple.
"""
return self._exec(query, _fetch_all, _row_tuple, *args, **kwargs)
def all_dict(self, query, *args, **kwargs):
"""Run the SQL query and return a list of dictionaries."""
return self._exec(query, _fetch_all, _row_dict, *args, **kwargs)
def iter(self, query, *args, **kwargs):
"""
Run the SQL query and return a generator of values or tuples.
If a single column is returned, the value will
be returned instead of a single-item tuple.
"""
return self._exec(query, _fetch_iter, _row_tuple, *args, **kwargs)
def iter_dict(self, query, *args, **kwargs):
"""Run the SQL query and return a generator of dictionaries."""
return self._exec(query, _fetch_iter, _row_dict, *args, **kwargs)
def _exec(self, query, fetch_callback, row_callback, *args, **kwargs):
"""Run the SQL query and apply specified callbacks to the results."""
# Handle variable-length argument list
if len(args) == 1:
first = args[0]
if isinstance(first, (tuple, dict)):
params = first
elif isinstance(first, list):
params = tuple(first)
else:
params = (first,)
else:
params = tuple(args)
# Handle keyword arguments
if kwargs:
# Convert parameters into a dictionary if they aren't one already
if not isinstance(params, dict):
params = {i: v for i, v in enumerate(params)}
params.update(kwargs)
# Execute the query
cur = self.cursor()
cur.execute(query, params)
# Handle query results
result = fetch_callback(cur, row_callback)
# Closing the cursor immediately would be the generator
if fetch_callback is not _fetch_iter:
cur.close()
return result
def _fetch_none(_, __):
"""Fetch no rows and return `None`."""
return None
def _fetch_one(cursor, row_callback):
"""Fetch exactly one row and return it or `None`."""
return row_callback(cursor.fetchone(), cursor) \
if cursor.rowcount == 1 else None
def _fetch_all(cursor, row_callback):
"""Fetch all rows and return them all at once."""
return [row_callback(r, cursor) for r in cursor.fetchall()] \
if cursor.rowcount else []
def _fetch_iter(cursor, row_callback):
"""
Fetch rows one by one and yield them as a generator.
Once the generator runs out of rows, it will close the cursor.
"""
for _ in range(cursor.rowcount):
yield row_callback(cursor.fetchone(), cursor)
cursor.close()
def _row_tuple(row, _):
"""Extract single value from row or return the whole tuple."""
return row[0] if len(row) == 1 else row
def _row_dict(row, cursor):
"""Convert the row into a smart dictionary and return it."""
return Dictionary({column.name: row[i] for i, column in enumerate(cursor.description)})
| 31.047826
| 91
| 0.61728
|
8b8d9e6c4e4eee6e9b9960ae45f09503a0b1e600
| 1,054
|
py
|
Python
|
setuptools/tests/test_test.py
|
eacheson/setuptools
|
247de8a2fbb2d1c34369ee83588100dbbe5f82a2
|
[
"MIT"
] | null | null | null |
setuptools/tests/test_test.py
|
eacheson/setuptools
|
247de8a2fbb2d1c34369ee83588100dbbe5f82a2
|
[
"MIT"
] | null | null | null |
setuptools/tests/test_test.py
|
eacheson/setuptools
|
247de8a2fbb2d1c34369ee83588100dbbe5f82a2
|
[
"MIT"
] | null | null | null |
import pytest
from jaraco import path
from setuptools.command.test import test
from setuptools.dist import Distribution
from .textwrap import DALS
@pytest.mark.usefixtures('tmpdir_cwd')
def test_tests_are_run_once(capfd):
params = dict(
name='foo',
packages=['dummy'],
)
files = {
'setup.py':
'from setuptools import setup; setup('
+ ','.join(f'{name}={params[name]!r}' for name in params)
+ ')',
'dummy': {
'__init__.py': '',
'test_dummy.py': DALS(
"""
import unittest
class TestTest(unittest.TestCase):
def test_test(self):
print('Foo')
"""
),
},
}
path.build(files)
dist = Distribution(params)
dist.script_name = 'setup.py'
cmd = test(dist)
cmd.ensure_finalized()
cmd.run()
out, err = capfd.readouterr()
assert out.endswith('Foo\n')
assert len(out.split('Foo')) == 2
| 25.095238
| 69
| 0.520873
|
e612e308787acddfb4ed9cbb0807701a19c04748
| 42
|
py
|
Python
|
gonzo/webapp/tests.py
|
paulcwatts/1hph
|
407337955121c3f59b4621acb392e6c23b57ae8e
|
[
"BSD-3-Clause"
] | 1
|
2016-10-03T05:29:12.000Z
|
2016-10-03T05:29:12.000Z
|
gonzo/webapp/tests.py
|
paulcwatts/1hph
|
407337955121c3f59b4621acb392e6c23b57ae8e
|
[
"BSD-3-Clause"
] | null | null | null |
gonzo/webapp/tests.py
|
paulcwatts/1hph
|
407337955121c3f59b4621acb392e6c23b57ae8e
|
[
"BSD-3-Clause"
] | null | null | null |
from gonzo.webapp.account.tests import *
| 14
| 40
| 0.785714
|
ba8d8a5d33baecf2aafaee4cbdc1d9749392a289
| 1,062
|
py
|
Python
|
pdc/apps/changeset/migrations/0001_initial.py
|
tzhaoredhat/automation
|
a1867dc2d3591fdae1fa7f80d457c25f9705070e
|
[
"MIT"
] | 18
|
2015-12-15T17:56:18.000Z
|
2021-04-10T13:49:48.000Z
|
pdc/apps/changeset/migrations/0001_initial.py
|
tzhaoredhat/automation
|
a1867dc2d3591fdae1fa7f80d457c25f9705070e
|
[
"MIT"
] | 303
|
2015-11-18T07:37:06.000Z
|
2021-05-26T12:34:01.000Z
|
pdc/apps/changeset/migrations/0001_initial.py
|
tzhaoredhat/automation
|
a1867dc2d3591fdae1fa7f80d457c25f9705070e
|
[
"MIT"
] | 27
|
2015-11-19T20:33:54.000Z
|
2021-03-25T08:15:28.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Change',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('target_class', models.CharField(max_length=200)),
('target_id', models.PositiveIntegerField()),
('old_value', models.TextField()),
('new_value', models.TextField()),
],
),
migrations.CreateModel(
name='Changeset',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('committed_on', models.DateTimeField(auto_now_add=True)),
],
),
]
| 29.5
| 114
| 0.57533
|
6e958156c948203f569ad53856fb187f9ddb20ff
| 2,514
|
py
|
Python
|
Code/detectAndDisplay.py
|
ngbla/memoire_bigdata_face
|
dd9cf3d198f3921cd7fc183902732fd877473edf
|
[
"MIT"
] | null | null | null |
Code/detectAndDisplay.py
|
ngbla/memoire_bigdata_face
|
dd9cf3d198f3921cd7fc183902732fd877473edf
|
[
"MIT"
] | null | null | null |
Code/detectAndDisplay.py
|
ngbla/memoire_bigdata_face
|
dd9cf3d198f3921cd7fc183902732fd877473edf
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 23 05:29:05 2021
@author: ngbla
"""
from __future__ import print_function
import cv2 as cv
import argparse
# Begin fonction
def detectAndDisplay(frame):
#face_cascade = cv.CascadeClassifier('haarcascade_frontalface_default.xml')
#face_cascade = cv.CascadeClassifier('webcammer-master/haarcascade_frontalface_default.xml')
#eyes_cascade = cv.CascadeClassifier('webcammer-master/haarcascade_eye.xml')
parser = argparse.ArgumentParser(description='Code for Cascade Classifier tutorial.')
parser.add_argument('--face_cascade', help='Path to face cascade.', default='webcammer-master/haarcascade_frontalface_alt.xml')
parser.add_argument('--eyes_cascade', help='Path to eyes cascade.', default='webcammer-master/haarcascade_eye_tree_eyeglasses.xml')
parser.add_argument('--camera', help='Camera divide number.', type=int, default=0)
args = parser.parse_args()
face_cascade_name = args.face_cascade
eyes_cascade_name = args.eyes_cascade
face_cascade = cv.CascadeClassifier()
eyes_cascade = cv.CascadeClassifier()
#-- 1. Load the cascades (verification de l'existences des fichier xml)
if not face_cascade.load(cv.samples.findFile(face_cascade_name)):
print('--(!)Error loading face cascade')
exit(0)
if not eyes_cascade.load(cv.samples.findFile(eyes_cascade_name)):
print('--(!)Error loading eyes cascade')
exit(0)
#-- Image en Gris
frame_gray = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)
frame_gray = cv.equalizeHist(frame_gray)
#-- Detect faces
faces = face_cascade.detectMultiScale(frame_gray)
for (x,y,w,h) in faces:
center = (x + w//2, y + h//2)
#frame = cv.ellipse(frame, center, (w//2, h//2), 0, 0, 360, (255, 0, 255), 4)
frame = cv.rectangle(frame,(x,y),(x+w,y+h),(255, 0, 255),2)
#-- Affichage du nom de la personne
text ="Nom visage"
cv.putText(frame,text,(x,y),
cv.FONT_HERSHEY_SIMPLEX,0.45,(0,0,255),2 );
faceROI = frame_gray[y:y+h,x:x+w]
#-- In each face, detect eyes
eyes = eyes_cascade.detectMultiScale(faceROI)
for (x2,y2,w2,h2) in eyes:
eye_center = (x + x2 + w2//2, y + y2 + h2//2)
radius = int(round((w2 + h2)*0.25))
frame = cv.circle(frame, eye_center, radius, (255, 0, 0 ), 4)
cv.imshow('Capture - Face detection', frame)
# End fonction
| 41.213115
| 135
| 0.656325
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.