hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7ee137f194bc07b4f04dd287337983f434793b54
| 208
|
py
|
Python
|
files.py
|
CodeBreaker444/python3_basics-snippets-cheatsheets
|
ac62760602e168e485d0b930df7d5d14a854de6f
|
[
"MIT"
] | 5
|
2018-01-31T00:58:07.000Z
|
2021-09-27T19:39:58.000Z
|
files.py
|
CodeBreaker444/python3_basics-snippets-cheatsheets
|
ac62760602e168e485d0b930df7d5d14a854de6f
|
[
"MIT"
] | null | null | null |
files.py
|
CodeBreaker444/python3_basics-snippets-cheatsheets
|
ac62760602e168e485d0b930df7d5d14a854de6f
|
[
"MIT"
] | null | null | null |
import os
file=open("test.txt","wb")
print(file.mode)
print(file.name)
file.write(bytes("Hello i am ready\n",'UTF-8'))
file.close()
file=open("test.txt","r+")
print(file.read())
#os.remove("test.txt")
| 23.111111
| 48
| 0.653846
|
1e7c71a8ee5e5dab7c627ceaa397fe780b40dc68
| 62,267
|
py
|
Python
|
python/ccxt/base/exchange.py
|
angyvolin/ccxt
|
180208c273c567242938cf2ad14fddb84c798624
|
[
"MIT"
] | null | null | null |
python/ccxt/base/exchange.py
|
angyvolin/ccxt
|
180208c273c567242938cf2ad14fddb84c798624
|
[
"MIT"
] | null | null | null |
python/ccxt/base/exchange.py
|
angyvolin/ccxt
|
180208c273c567242938cf2ad14fddb84c798624
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Base exchange class"""
# -----------------------------------------------------------------------------
__version__ = '1.17.214'
# -----------------------------------------------------------------------------
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import NotSupported
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import DDoSProtection
from ccxt.base.errors import RequestTimeout
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import InvalidAddress
# -----------------------------------------------------------------------------
from ccxt.base.decimal_to_precision import decimal_to_precision
from ccxt.base.decimal_to_precision import DECIMAL_PLACES
# -----------------------------------------------------------------------------
__all__ = [
'Exchange',
]
# -----------------------------------------------------------------------------
# Python 2 & 3
import logging
import base64
import calendar
import collections
import datetime
from email.utils import parsedate
import functools
import gzip
import hashlib
import hmac
import io
import json
import math
from numbers import Number
import re
from requests import Session
from requests.utils import default_user_agent
from requests.exceptions import HTTPError, Timeout, TooManyRedirects, RequestException
# import socket
from ssl import SSLError
# import sys
import time
import uuid
import zlib
from decimal import Decimal
# -----------------------------------------------------------------------------
try:
basestring # basestring was removed in python 3.0
except NameError:
basestring = str
# -----------------------------------------------------------------------------
try:
import urllib.parse as _urlencode # Python 3
except ImportError:
import urllib as _urlencode # Python 2
# -----------------------------------------------------------------------------
# web3/0x imports
try:
# from web3.auto import w3
from web3 import Web3, HTTPProvider
from web3.utils.encoding import hex_encode_abi_type
except ImportError:
Web3 = HTTPProvider = None # web3/0x not supported in Python 2
# -----------------------------------------------------------------------------
class Exchange(object):
"""Base exchange class"""
id = None
version = None
certified = False
# rate limiter settings
enableRateLimit = False
rateLimit = 2000 # milliseconds = seconds * 1000
timeout = 10000 # milliseconds = seconds * 1000
asyncio_loop = None
aiohttp_proxy = None
session = None # Session () by default
logger = None # logging.getLogger(__name__) by default
userAgent = None
userAgents = {
'chrome': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.94 Safari/537.36',
'chrome39': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36',
}
verbose = False
markets = None
symbols = None
fees = {
'trading': {
'fee_loaded': False,
'percentage': True, # subclasses should rarely have to redefine this
},
'funding': {
'fee_loaded': False,
'withdraw': {},
'deposit': {},
},
}
ids = None
tickers = None
api = None
parseJsonResponse = True
proxy = ''
origin = '*' # CORS origin
proxies = None
apiKey = ''
secret = ''
password = ''
uid = ''
privateKey = '' # a "0x"-prefixed hexstring private key for a wallet
walletAddress = '' # the wallet address "0x"-prefixed hexstring
twofa = False
marketsById = None
markets_by_id = None
currencies_by_id = None
precision = None
limits = None
exceptions = None
headers = None
balance = None
orderbooks = None
orders = None
trades = None
transactions = None
currencies = None
options = None # Python does not allow to define properties in run-time with setattr
requiredCredentials = {
'apiKey': True,
'secret': True,
'uid': False,
'login': False,
'password': False,
'twofa': False, # 2-factor authentication (one-time password key)
'privateKey': False, # a "0x"-prefixed hexstring private key for a wallet
'walletAddress': False, # the wallet address "0x"-prefixed hexstring
}
# API method metainfo
has = {
'publicAPI': True,
'privateAPI': True,
'CORS': False,
'cancelOrder': True,
'cancelOrders': False,
'createDepositAddress': False,
'createOrder': True,
'createMarketOrder': True,
'createLimitOrder': True,
'deposit': False,
'editOrder': 'emulated',
'fetchBalance': True,
'fetchClosedOrders': False,
'fetchCurrencies': False,
'fetchDepositAddress': False,
'fetchDeposits': False,
'fetchFundingFees': False,
'fetchL2OrderBook': True,
'fetchMarkets': True,
'fetchMyTrades': False,
'fetchOHLCV': 'emulated',
'fetchOpenOrders': False,
'fetchOrder': False,
'fetchOrderBook': True,
'fetchOrderBooks': False,
'fetchOrders': False,
'fetchTicker': True,
'fetchTickers': False,
'fetchTrades': True,
'fetchTradingFees': False,
'fetchTradingLimits': False,
'fetchTransactions': False,
'fetchWithdrawals': False,
'withdraw': False,
}
precisionMode = DECIMAL_PLACES
minFundingAddressLength = 1 # used in check_address
substituteCommonCurrencyCodes = True
lastRestRequestTimestamp = 0
lastRestPollTimestamp = 0
restRequestQueue = None
restPollerLoopIsRunning = False
rateLimitTokens = 16
rateLimitMaxTokens = 16
rateLimitUpdateTime = 0
last_http_response = None
last_json_response = None
last_response_headers = None
web3 = None
commonCurrencies = {
'XBT': 'BTC',
'BCC': 'BCH',
'DRK': 'DASH',
}
def __init__(self, config={}):
self.precision = dict() if self.precision is None else self.precision
self.limits = dict() if self.limits is None else self.limits
self.exceptions = dict() if self.exceptions is None else self.exceptions
self.headers = dict() if self.headers is None else self.headers
self.balance = dict() if self.balance is None else self.balance
self.orderbooks = dict() if self.orderbooks is None else self.orderbooks
self.orders = dict() if self.orders is None else self.orders
self.trades = dict() if self.trades is None else self.trades
self.transactions = dict() if self.transactions is None else self.transactions
self.currencies = dict() if self.currencies is None else self.currencies
self.options = dict() if self.options is None else self.options # Python does not allow to define properties in run-time with setattr
self.decimalToPrecision = self.decimal_to_precision = decimal_to_precision
# version = '.'.join(map(str, sys.version_info[:3]))
# self.userAgent = {
# 'User-Agent': 'ccxt/' + __version__ + ' (+https://github.com/ccxt/ccxt) Python/' + version
# }
self.userAgent = default_user_agent()
settings = self.deep_extend(self.describe(), config)
for key in settings:
if hasattr(self, key) and isinstance(getattr(self, key), dict):
setattr(self, key, self.deep_extend(getattr(self, key), settings[key]))
else:
setattr(self, key, settings[key])
if self.api:
self.define_rest_api(self.api, 'request')
if self.markets:
self.set_markets(self.markets)
# convert all properties from underscore notation foo_bar to camelcase notation fooBar
for name in dir(self):
if name[0] != '_'and name[-1] != '_' and '_' in name:
parts = name.split('_')
camelcase = parts[0] + ''.join(self.capitalize(i) for i in parts[1:])
setattr(self, camelcase, getattr(self, name))
self.tokenBucket = self.extend({
'refillRate': 1.0 / self.rateLimit,
'delay': 1.0,
'capacity': 1.0,
'defaultCost': 1.0,
}, getattr(self, 'tokenBucket') if hasattr(self, 'tokenBucket') else {})
self.session = self.session if self.session else Session()
self.logger = self.logger if self.logger else logging.getLogger(__name__)
if Web3 and not self.web3:
# self.web3 = w3 if w3 else Web3(HTTPProvider())
self.web3 = Web3(HTTPProvider())
def __del__(self):
if self.session:
self.session.close()
def describe(self):
return {}
def define_rest_api(self, api, method_name, options={}):
delimiters = re.compile('[^a-zA-Z0-9]')
for api_type, methods in api.items():
for http_method, urls in methods.items():
for url in urls:
url = url.strip()
split_path = delimiters.split(url)
uppercase_method = http_method.upper()
lowercase_method = http_method.lower()
camelcase_method = lowercase_method.capitalize()
camelcase_suffix = ''.join([Exchange.capitalize(x) for x in split_path])
lowercase_path = [x.strip().lower() for x in split_path]
underscore_suffix = '_'.join([k for k in lowercase_path if len(k)])
camelcase = api_type + camelcase_method + Exchange.capitalize(camelcase_suffix)
underscore = api_type + '_' + lowercase_method + '_' + underscore_suffix.lower()
if 'suffixes' in options:
if 'camelcase' in options['suffixes']:
camelcase += options['suffixes']['camelcase']
if 'underscore' in options['suffixes']:
underscore += options['suffixes']['underscore']
partial = functools.partial(getattr(self, method_name), url, api_type, uppercase_method)
setattr(self, camelcase, partial)
setattr(self, underscore, partial)
def raise_error(self, exception_type, url=None, method=None, error=None, details=None):
if error:
error = str(error)
output = ' '.join([self.id] + [var for var in (url, method, error, details) if var is not None])
raise exception_type(output)
def throttle(self):
now = float(self.milliseconds())
elapsed = now - self.lastRestRequestTimestamp
if elapsed < self.rateLimit:
delay = self.rateLimit - elapsed
time.sleep(delay / 1000.0)
def fetch2(self, path, api='public', method='GET', params={}, headers=None, body=None):
"""A better wrapper over request for deferred signing"""
if self.enableRateLimit:
self.throttle()
self.lastRestRequestTimestamp = self.milliseconds()
request = self.sign(path, api, method, params, headers, body)
return self.fetch(request['url'], request['method'], request['headers'], request['body'])
def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
return self.fetch2(path, api, method, params, headers, body)
@staticmethod
def gzip_deflate(response, text):
encoding = response.info().get('Content-Encoding')
if encoding in ('gzip', 'x-gzip', 'deflate'):
if encoding == 'deflate':
return zlib.decompress(text, -zlib.MAX_WBITS)
else:
return gzip.GzipFile('', 'rb', 9, io.BytesIO(text)).read()
return text
def handle_errors(self, code, reason, url, method, headers, body):
pass
def prepare_request_headers(self, headers=None):
headers = headers or {}
headers.update(self.headers)
if self.userAgent:
if type(self.userAgent) is str:
headers.update({'User-Agent': self.userAgent})
elif (type(self.userAgent) is dict) and ('User-Agent' in self.userAgent):
headers.update(self.userAgent)
if self.proxy:
headers.update({'Origin': self.origin})
headers.update({'Accept-Encoding': 'gzip, deflate'})
return headers
def fetch(self, url, method='GET', headers=None, body=None):
"""Perform a HTTP request and return decoded JSON data"""
request_headers = self.prepare_request_headers(headers)
url = self.proxy + url
if self.verbose:
print("\nRequest:", method, url, request_headers, body)
self.logger.debug("%s %s, Request: %s %s", method, url, request_headers, body)
if body:
body = body.encode()
self.session.cookies.clear()
response = None
try:
response = self.session.request(
method,
url,
data=body,
headers=request_headers,
timeout=int(self.timeout / 1000),
proxies=self.proxies
)
self.last_http_response = response.text
self.last_response_headers = response.headers
if self.verbose:
print("\nResponse:", method, url, str(response.status_code), str(response.headers), self.last_http_response)
self.logger.debug("%s %s, Response: %s %s %s", method, url, response.status_code, response.headers, self.last_http_response)
response.raise_for_status()
except Timeout as e:
self.raise_error(RequestTimeout, method, url, e)
except TooManyRedirects as e:
self.raise_error(ExchangeError, url, method, e)
except SSLError as e:
self.raise_error(ExchangeError, url, method, e)
except HTTPError as e:
self.handle_errors(response.status_code, response.reason, url, method, self.last_response_headers, self.last_http_response)
self.handle_rest_errors(e, response.status_code, self.last_http_response, url, method)
self.raise_error(ExchangeError, url, method, e, self.last_http_response)
except RequestException as e: # base exception class
self.raise_error(ExchangeError, url, method, e)
self.handle_errors(response.status_code, response.reason, url, method, None, self.last_http_response)
return self.handle_rest_response(self.last_http_response, url, method, headers, body)
def handle_rest_errors(self, exception, http_status_code, response, url, method='GET'):
error = None
if http_status_code in [418, 429]:
error = DDoSProtection
elif http_status_code in [404, 409, 500, 501, 502, 520, 521, 522, 525]:
error = ExchangeNotAvailable
elif http_status_code in [422]:
error = ExchangeError
elif http_status_code in [400, 403, 405, 503, 530]:
# special case to detect ddos protection
error = ExchangeNotAvailable
if response:
ddos_protection = re.search('(cloudflare|incapsula)', response, flags=re.IGNORECASE)
if ddos_protection:
error = DDoSProtection
elif http_status_code in [408, 504]:
error = RequestTimeout
elif http_status_code in [401, 511]:
error = AuthenticationError
if error:
self.raise_error(error, url, method, exception if exception else http_status_code, response)
def handle_rest_response(self, response, url, method='GET', headers=None, body=None):
try:
if self.parseJsonResponse:
last_json_response = json.loads(response) if len(response) > 1 else None
self.last_json_response = last_json_response
return last_json_response
else:
return response
except ValueError as e: # ValueError == JsonDecodeError
ddos_protection = re.search('(cloudflare|incapsula|overload|ddos)', response, flags=re.IGNORECASE)
exchange_not_available = re.search('(offline|busy|retry|wait|unavailable|maintain|maintenance|maintenancing)', response, flags=re.IGNORECASE)
if ddos_protection:
self.raise_error(DDoSProtection, method, url, None, response)
if exchange_not_available:
message = response + ' exchange downtime, exchange closed for maintenance or offline, DDoS protection or rate-limiting in effect'
self.raise_error(ExchangeNotAvailable, method, url, None, message)
self.raise_error(ExchangeError, method, url, e, response)
@staticmethod
def safe_float(dictionary, key, default_value=None):
value = default_value
try:
if isinstance(dictionary, list) and isinstance(key, int) and len(dictionary) > key:
value = float(dictionary[key])
else:
value = float(dictionary[key]) if (key is not None) and (key in dictionary) and (dictionary[key] is not None) else default_value
except ValueError as e:
value = default_value
return value
@staticmethod
def safe_string(dictionary, key, default_value=None):
return str(dictionary[key]) if key is not None and (key in dictionary) and dictionary[key] is not None else default_value
@staticmethod
def safe_integer(dictionary, key, default_value=None):
if key is None or (key not in dictionary):
return default_value
value = dictionary[key]
if isinstance(value, Number) or (isinstance(value, basestring) and value.isnumeric()):
return int(value)
return default_value
@staticmethod
def safe_value(dictionary, key, default_value=None):
return dictionary[key] if key is not None and (key in dictionary) and dictionary[key] is not None else default_value
# we're not using safe_floats with a list argument as we're trying to save some cycles here
# we're not using safe_float_3 either because those cases are too rare to deserve their own optimization
@staticmethod
def safe_float_2(dictionary, key1, key2, default_value=None):
return Exchange.safe_either(Exchange.safe_float, dictionary, key1, key2, default_value)
@staticmethod
def safe_string_2(dictionary, key1, key2, default_value=None):
return Exchange.safe_either(Exchange.safe_string, dictionary, key1, key2, default_value)
@staticmethod
def safe_integer_2(dictionary, key1, key2, default_value=None):
return Exchange.safe_either(Exchange.safe_integer, dictionary, key1, key2, default_value)
@staticmethod
def safe_value_2(dictionary, key1, key2, default_value=None):
return Exchange.safe_either(Exchange.safe_value, dictionary, key1, key2, default_value)
@staticmethod
def safe_either(method, dictionary, key1, key2, default_value=None):
"""A helper-wrapper for the safe_value_2() family."""
value = method(dictionary, key1)
return value if value is not None else method(dictionary, key2, default_value)
@staticmethod
def truncate(num, precision=0):
if precision > 0:
decimal_precision = math.pow(10, precision)
return math.trunc(num * decimal_precision) / decimal_precision
return int(Exchange.truncate_to_string(num, precision))
@staticmethod
def truncate_to_string(num, precision=0):
if precision > 0:
parts = ('{0:.%df}' % precision).format(Decimal(num)).split('.')
decimal_digits = parts[1][:precision].rstrip('0')
decimal_digits = decimal_digits if len(decimal_digits) else '0'
return parts[0] + '.' + decimal_digits
return ('%d' % num)
@staticmethod
def uuid():
return str(uuid.uuid4())
@staticmethod
def capitalize(string): # first character only, rest characters unchanged
# the native pythonic .capitalize() method lowercases all other characters
# which is an unwanted behaviour, therefore we use this custom implementation
# check it yourself: print('foobar'.capitalize(), 'fooBar'.capitalize())
if len(string) > 1:
return "%s%s" % (string[0].upper(), string[1:])
return string.upper()
@staticmethod
def keysort(dictionary):
return collections.OrderedDict(sorted(dictionary.items(), key=lambda t: t[0]))
@staticmethod
def extend(*args):
if args is not None:
result = None
if type(args[0]) is collections.OrderedDict:
result = collections.OrderedDict()
else:
result = {}
for arg in args:
result.update(arg)
return result
return {}
@staticmethod
def deep_extend(*args):
result = None
for arg in args:
if isinstance(arg, dict):
if not isinstance(result, dict):
result = {}
for key in arg:
result[key] = Exchange.deep_extend(result[key] if key in result else None, arg[key])
else:
result = arg
return result
@staticmethod
def filter_by(array, key, value=None):
if value:
grouped = Exchange.group_by(array, key)
if value in grouped:
return grouped[value]
return []
return array
@staticmethod
def filterBy(self, array, key, value=None):
return Exchange.filter_by(array, key, value)
@staticmethod
def group_by(array, key):
result = {}
array = Exchange.to_array(array)
array = [entry for entry in array if (key in entry) and (entry[key] is not None)]
for entry in array:
if entry[key] not in result:
result[entry[key]] = []
result[entry[key]].append(entry)
return result
@staticmethod
def groupBy(array, key):
return Exchange.group_by(array, key)
@staticmethod
def index_by(array, key):
result = {}
if type(array) is dict:
array = Exchange.keysort(array).values()
for element in array:
if (key in element) and (element[key] is not None):
k = element[key]
result[k] = element
return result
@staticmethod
def sort_by(array, key, descending=False):
return sorted(array, key=lambda k: k[key] if k[key] is not None else "", reverse=descending)
@staticmethod
def array_concat(a, b):
return a + b
@staticmethod
def in_array(needle, haystack):
return needle in haystack
@staticmethod
def is_empty(object):
return not object
@staticmethod
def extract_params(string):
return re.findall(r'{([\w-]+)}', string)
@staticmethod
def implode_params(string, params):
for key in params:
string = string.replace('{' + key + '}', str(params[key]))
return string
@staticmethod
def url(path, params={}):
result = Exchange.implode_params(path, params)
query = Exchange.omit(params, Exchange.extract_params(path))
if query:
result += '?' + _urlencode.urlencode(query)
return result
@staticmethod
def urlencode(params={}):
if (type(params) is dict) or isinstance(params, collections.OrderedDict):
return _urlencode.urlencode(params)
return params
@staticmethod
def rawencode(params={}):
return _urlencode.unquote(Exchange.urlencode(params))
@staticmethod
def encode_uri_component(uri):
return _urlencode.quote(uri, safe="~()*!.'")
@staticmethod
def omit(d, *args):
result = d.copy()
for arg in args:
if type(arg) is list:
for key in arg:
if key in result:
del result[key]
else:
if arg in result:
del result[arg]
return result
@staticmethod
def unique(array):
return list(set(array))
@staticmethod
def pluck(array, key):
return [
element[key]
for element in array
if (key in element) and (element[key] is not None)
]
@staticmethod
def sum(*args):
return sum([arg for arg in args if isinstance(arg, (float, int))])
@staticmethod
def ordered(array):
return collections.OrderedDict(array)
@staticmethod
def aggregate(bidasks):
ordered = Exchange.ordered({})
for [price, volume] in bidasks:
if volume > 0:
ordered[price] = (ordered[price] if price in ordered else 0) + volume
result = []
items = list(ordered.items())
for price, volume in items:
result.append([price, volume])
return result
@staticmethod
def sec():
return Exchange.seconds()
@staticmethod
def msec():
return Exchange.milliseconds()
@staticmethod
def usec():
return Exchange.microseconds()
@staticmethod
def seconds():
return int(time.time())
@staticmethod
def milliseconds():
return int(time.time() * 1000)
@staticmethod
def microseconds():
return int(time.time() * 1000000)
@staticmethod
def iso8601(timestamp=None):
if timestamp is None:
return timestamp
if not isinstance(timestamp, int):
return None
if int(timestamp) < 0:
return None
try:
utc = datetime.datetime.utcfromtimestamp(timestamp // 1000)
return utc.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-6] + "{:03d}".format(int(timestamp) % 1000) + 'Z'
except (TypeError, OverflowError, OSError):
return None
@staticmethod
def dmy(timestamp, infix='-'):
utc_datetime = datetime.datetime.utcfromtimestamp(int(round(timestamp / 1000)))
return utc_datetime.strftime('%m' + infix + '%d' + infix + '%Y')
@staticmethod
def ymd(timestamp, infix='-'):
utc_datetime = datetime.datetime.utcfromtimestamp(int(round(timestamp / 1000)))
return utc_datetime.strftime('%Y' + infix + '%m' + infix + '%d')
@staticmethod
def ymdhms(timestamp, infix=' '):
utc_datetime = datetime.datetime.utcfromtimestamp(int(round(timestamp / 1000)))
return utc_datetime.strftime('%Y-%m-%d' + infix + '%H:%M:%S')
@staticmethod
def parse_date(timestamp=None):
if timestamp is None:
return timestamp
if not isinstance(timestamp, str):
return None
if 'GMT' in timestamp:
try:
string = ''.join([str(value) for value in parsedate(timestamp)[:6]]) + '.000Z'
dt = datetime.datetime.strptime(string, "%Y%m%d%H%M%S.%fZ")
return calendar.timegm(dt.utctimetuple()) * 1000
except (TypeError, OverflowError, OSError):
return None
else:
return Exchange.parse8601(timestamp)
@staticmethod
def parse8601(timestamp=None):
if timestamp is None:
return timestamp
yyyy = '([0-9]{4})-?'
mm = '([0-9]{2})-?'
dd = '([0-9]{2})(?:T|[\\s])?'
h = '([0-9]{2}):?'
m = '([0-9]{2}):?'
s = '([0-9]{2})'
ms = '(\\.[0-9]{1,3})?'
tz = '(?:(\\+|\\-)([0-9]{2})\\:?([0-9]{2})|Z)?'
regex = r'' + yyyy + mm + dd + h + m + s + ms + tz
try:
match = re.search(regex, timestamp, re.IGNORECASE)
if match is None:
return None
yyyy, mm, dd, h, m, s, ms, sign, hours, minutes = match.groups()
ms = ms or '.000'
msint = int(ms[1:])
sign = sign or ''
sign = int(sign + '1')
hours = int(hours or 0) * sign
minutes = int(minutes or 0) * sign
offset = datetime.timedelta(hours=hours, minutes=minutes)
string = yyyy + mm + dd + h + m + s + ms + 'Z'
dt = datetime.datetime.strptime(string, "%Y%m%d%H%M%S.%fZ")
dt = dt + offset
return calendar.timegm(dt.utctimetuple()) * 1000 + msint
except (TypeError, OverflowError, OSError, ValueError):
return None
@staticmethod
def hash(request, algorithm='md5', digest='hex'):
h = hashlib.new(algorithm, request)
if digest == 'hex':
return h.hexdigest()
elif digest == 'base64':
return base64.b64encode(h.digest())
return h.digest()
@staticmethod
def hmac(request, secret, algorithm=hashlib.sha256, digest='hex'):
h = hmac.new(secret, request, algorithm)
if digest == 'hex':
return h.hexdigest()
elif digest == 'base64':
return base64.b64encode(h.digest())
return h.digest()
@staticmethod
def binary_concat(*args):
result = bytes()
for arg in args:
result = result + arg
return result
@staticmethod
def binary_to_string(s):
return s.decode('ascii')
@staticmethod
def base64urlencode(s):
return Exchange.decode(base64.urlsafe_b64encode(s)).replace('=', '')
@staticmethod
def jwt(request, secret, algorithm=hashlib.sha256, alg='HS256'):
header = Exchange.encode(Exchange.json({
'alg': alg,
'typ': 'JWT',
}))
encodedHeader = Exchange.base64urlencode(header)
encodedData = Exchange.base64urlencode(Exchange.encode(Exchange.json(request)))
token = encodedHeader + '.' + encodedData
hmac = Exchange.hmac(Exchange.encode(token), Exchange.encode(secret), algorithm, 'binary')
signature = Exchange.base64urlencode(hmac)
return token + '.' + signature
@staticmethod
def unjson(input):
return json.loads(input)
@staticmethod
def json(data, params=None):
return json.dumps(data, separators=(',', ':'))
@staticmethod
def is_json_encoded_object(input):
return (isinstance(input, basestring) and
(len(input) >= 2) and
((input[0] == '{') or (input[0] == '[')))
@staticmethod
def encode(string):
return string.encode()
@staticmethod
def decode(string):
return string.decode()
@staticmethod
def to_array(value):
return list(value.values()) if type(value) is dict else value
def nonce(self):
return Exchange.seconds()
def check_required_credentials(self):
keys = list(self.requiredCredentials.keys())
for key in keys:
if self.requiredCredentials[key] and not getattr(self, key):
self.raise_error(AuthenticationError, details='requires `' + key + '`')
def check_address(self, address):
"""Checks an address is not the same character repeated or an empty sequence"""
if address is None:
self.raise_error(InvalidAddress, details='address is None')
if all(letter == address[0] for letter in address) or len(address) < self.minFundingAddressLength or ' ' in address:
self.raise_error(InvalidAddress, details='address is invalid or has less than ' + str(self.minFundingAddressLength) + ' characters: "' + str(address) + '"')
return address
def account(self):
return {
'free': 0.0,
'used': 0.0,
'total': 0.0,
}
def common_currency_code(self, currency):
if not self.substituteCommonCurrencyCodes:
return currency
return self.safe_string(self.commonCurrencies, currency, currency)
def currency_id(self, commonCode):
if self.currencies:
if commonCode in self.currencies:
return self.currencies[commonCode]['id']
currencyIds = {v: k for k, v in self.commonCurrencies.items()}
return self.safe_string(currencyIds, commonCode, commonCode)
def fromWei(self, amount, unit='ether'):
if Web3 is None:
self.raise_error(NotSupported, details="ethereum web3 methods require Python 3: https://pythonclock.org")
return float(Web3.fromWei(int(amount), unit))
def toWei(self, amount, unit='ether'):
if Web3 is None:
self.raise_error(NotSupported, details="ethereum web3 methods require Python 3: https://pythonclock.org")
return str(Web3.toWei(int(amount), unit))
def precision_from_string(self, string):
parts = re.sub(r'0+$', '', string).split('.')
return len(parts[1]) if len(parts) > 1 else 0
def cost_to_precision(self, symbol, cost):
return ('{:.' + str(self.markets[symbol]['precision']['price']) + 'f}').format(float(cost))
def price_to_precision(self, symbol, price):
return ('{:.' + str(self.markets[symbol]['precision']['price']) + 'f}').format(float(price))
def amount_to_precision(self, symbol, amount):
return self.truncate(amount, self.markets[symbol]['precision']['amount'])
def amount_to_string(self, symbol, amount):
return self.truncate_to_string(amount, self.markets[symbol]['precision']['amount'])
def fee_to_precision(self, symbol, fee):
return ('{:.' + str(self.markets[symbol]['precision']['price']) + 'f}').format(float(fee))
def set_markets(self, markets, currencies=None):
values = list(markets.values()) if type(markets) is dict else markets
for i in range(0, len(values)):
values[i] = self.extend(
self.fees['trading'],
{'precision': self.precision, 'limits': self.limits},
values[i]
)
self.markets = self.index_by(values, 'symbol')
self.markets_by_id = self.index_by(values, 'id')
self.marketsById = self.markets_by_id
self.symbols = sorted(list(self.markets.keys()))
self.ids = sorted(list(self.markets_by_id.keys()))
if currencies:
self.currencies = self.deep_extend(currencies, self.currencies)
else:
base_currencies = [{
'id': market['baseId'] if 'baseId' in market else market['base'],
'numericId': market['baseNumericId'] if 'baseNumericId' in market else None,
'code': market['base'],
'precision': (
market['precision']['base'] if 'base' in market['precision'] else (
market['precision']['amount'] if 'amount' in market['precision'] else None
)
) if 'precision' in market else 8,
} for market in values if 'base' in market]
quote_currencies = [{
'id': market['quoteId'] if 'quoteId' in market else market['quote'],
'numericId': market['quoteNumericId'] if 'quoteNumericId' in market else None,
'code': market['quote'],
'precision': (
market['precision']['quote'] if 'quote' in market['precision'] else (
market['precision']['price'] if 'price' in market['precision'] else None
)
) if 'precision' in market else 8,
} for market in values if 'quote' in market]
currencies = self.sort_by(base_currencies + quote_currencies, 'code')
self.currencies = self.deep_extend(self.index_by(currencies, 'code'), self.currencies)
self.currencies_by_id = self.index_by(list(self.currencies.values()), 'id')
return self.markets
def load_markets(self, reload=False):
if not reload:
if self.markets:
if not self.markets_by_id:
return self.set_markets(self.markets)
return self.markets
markets = self.fetch_markets()
currencies = None
if self.has['fetchCurrencies']:
currencies = self.fetch_currencies()
return self.set_markets(markets, currencies)
def populate_fees(self):
if not (hasattr(self, 'markets') or hasattr(self, 'currencies')):
return
for currency, data in self.currencies.items(): # try load withdrawal fees from currencies
if 'fee' in data and data['fee'] is not None:
self.fees['funding']['withdraw'][currency] = data['fee']
self.fees['funding']['fee_loaded'] = True
# find a way to populate trading fees from markets
def load_fees(self):
self.load_markets()
self.populate_fees()
if not (self.has['fetchTradingFees'] or self.has['fetchFundingFees']):
return self.fees
fetched_fees = self.fetch_fees()
if fetched_fees['funding']:
self.fees['funding']['fee_loaded'] = True
if fetched_fees['trading']:
self.fees['trading']['fee_loaded'] = True
self.fees = self.deep_extend(self.fees, fetched_fees)
return self.fees
def fetch_markets(self):
# markets are returned as a list
# currencies are returned as a dict
# this is for historical reasons
# and may be changed for consistency later
return self.to_array(self.markets)
def fetch_currencies(self, params={}):
# markets are returned as a list
# currencies are returned as a dict
# this is for historical reasons
# and may be changed for consistency later
return self.currencies
def fetch_fees(self):
trading = {}
funding = {}
try:
trading = self.fetch_trading_fees()
except AuthenticationError:
pass
except AttributeError:
pass
try:
funding = self.fetch_funding_fees()
except AuthenticationError:
pass
except AttributeError:
pass
return {
'trading': trading,
'funding': funding,
}
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.raise_error(NotSupported, details='create_order() not implemented yet')
def cancel_order(self, id, symbol=None, params={}):
self.raise_error(NotSupported, details='cancel_order() not implemented yet')
def fetch_bids_asks(self, symbols=None, params={}):
self.raise_error(NotSupported, details='API does not allow to fetch all prices at once with a single call to fetch_bids_asks() for now')
def fetch_tickers(self, symbols=None, params={}):
self.raise_error(NotSupported, details='API does not allow to fetch all tickers at once with a single call to fetch_tickers() for now')
def fetch_order_status(self, id, market=None):
order = self.fetch_order(id)
return order['status']
def purge_cached_orders(self, before):
orders = self.to_array(self.orders)
orders = [order for order in orders if (order['status'] == 'open') or (order['timestamp'] >= before)]
self.orders = self.index_by(orders, 'id')
return self.orders
def fetch_order(self, id, symbol=None, params={}):
self.raise_error(NotSupported, details='fetch_order() is not implemented yet')
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
self.raise_error(NotSupported, details='fetch_orders() is not implemented yet')
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
self.raise_error(NotSupported, details='fetch_open_orders() is not implemented yet')
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
self.raise_error(NotSupported, details='fetch_closed_orders() is not implemented yet')
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
self.raise_error(NotSupported, details='fetch_my_trades() is not implemented yet')
def fetch_order_trades(self, id, symbol=None, params={}):
self.raise_error(NotSupported, details='fetch_order_trades() is not implemented yet')
def parse_ohlcv(self, ohlcv, market=None, timeframe='1m', since=None, limit=None):
return ohlcv[0:6] if isinstance(ohlcv, list) else ohlcv
def parse_ohlcvs(self, ohlcvs, market=None, timeframe='1m', since=None, limit=None):
ohlcvs = self.to_array(ohlcvs)
num_ohlcvs = len(ohlcvs)
result = []
i = 0
while i < num_ohlcvs:
if limit and (len(result) >= limit):
break
ohlcv = self.parse_ohlcv(ohlcvs[i], market, timeframe, since, limit)
i = i + 1
if since and (ohlcv[0] < since):
continue
result.append(ohlcv)
return self.sort_by(result, 0)
def parse_bid_ask(self, bidask, price_key=0, amount_key=0):
return [float(bidask[price_key]), float(bidask[amount_key])]
def parse_bids_asks(self, bidasks, price_key=0, amount_key=1):
result = []
if len(bidasks):
if type(bidasks[0]) is list:
for bidask in bidasks:
if bidask[price_key] and bidask[amount_key]:
result.append(self.parse_bid_ask(bidask, price_key, amount_key))
elif type(bidasks[0]) is dict:
for bidask in bidasks:
if (price_key in bidask) and (amount_key in bidask) and (bidask[price_key] and bidask[amount_key]):
result.append(self.parse_bid_ask(bidask, price_key, amount_key))
else:
self.raise_error(ExchangeError, details='unrecognized bidask format: ' + str(bidasks[0]))
return result
def fetch_l2_order_book(self, symbol, limit=None, params={}):
orderbook = self.fetch_order_book(symbol, limit, params)
return self.extend(orderbook, {
'bids': self.sort_by(self.aggregate(orderbook['bids']), 0, True),
'asks': self.sort_by(self.aggregate(orderbook['asks']), 0),
})
def parse_order_book(self, orderbook, timestamp=None, bids_key='bids', asks_key='asks', price_key=0, amount_key=1):
return {
'bids': self.sort_by(self.parse_bids_asks(orderbook[bids_key], price_key, amount_key) if (bids_key in orderbook) and isinstance(orderbook[bids_key], list) else [], 0, True),
'asks': self.sort_by(self.parse_bids_asks(orderbook[asks_key], price_key, amount_key) if (asks_key in orderbook) and isinstance(orderbook[asks_key], list) else [], 0),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp) if timestamp is not None else None,
'nonce': None,
}
def parse_balance(self, balance):
currencies = self.omit(balance, 'info').keys()
for account in ['free', 'used', 'total']:
balance[account] = {}
for currency in currencies:
balance[account][currency] = balance[currency][account]
return balance
def fetch_partial_balance(self, part, params={}):
balance = self.fetch_balance(params)
return balance[part]
def fetch_free_balance(self, params={}):
return self.fetch_partial_balance('free', params)
def fetch_used_balance(self, params={}):
return self.fetch_partial_balance('used', params)
def fetch_total_balance(self, params={}):
return self.fetch_partial_balance('total', params)
def load_trading_limits(self, symbols=None, reload=False, params={}):
if self.has['fetchTradingLimits']:
if reload or not('limitsLoaded' in list(self.options.keys())):
response = self.fetch_trading_limits(symbols)
limits = response['limits']
keys = list(limits.keys())
for i in range(0, len(keys)):
symbol = keys[i]
self.markets[symbol] = self.deep_extend(self.markets[symbol], {
'limits': limits[symbol],
})
self.options['limitsLoaded'] = self.milliseconds()
return self.markets
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
if not self.has['fetchTrades']:
self.raise_error(NotSupported, details='fetch_ohlcv() not implemented yet')
self.load_markets()
trades = self.fetch_trades(symbol, since, limit, params)
return self.build_ohlcv(trades, timeframe, since, limit)
def fetchOHLCV(self, symbol, timeframe='1m', since=None, limit=None, params={}):
return self.fetch_ohlcv(symbol, timeframe, since, limit, params)
def convert_trading_view_to_ohlcv(self, ohlcvs):
result = []
for i in range(0, len(ohlcvs['t'])):
result.append([
ohlcvs['t'][i] * 1000,
ohlcvs['o'][i],
ohlcvs['h'][i],
ohlcvs['l'][i],
ohlcvs['c'][i],
ohlcvs['v'][i],
])
return result
def convert_ohlcv_to_trading_view(self, ohlcvs):
result = {
't': [],
'o': [],
'h': [],
'l': [],
'c': [],
'v': [],
}
for i in range(0, len(ohlcvs)):
result['t'].append(int(ohlcvs[i][0] / 1000))
result['o'].append(ohlcvs[i][1])
result['h'].append(ohlcvs[i][2])
result['l'].append(ohlcvs[i][3])
result['c'].append(ohlcvs[i][4])
result['v'].append(ohlcvs[i][5])
return result
def build_ohlcv(self, trades, timeframe='1m', since=None, limit=None):
ms = self.parse_timeframe(timeframe) * 1000
ohlcvs = []
(high, low, close, volume) = (2, 3, 4, 5)
num_trades = len(trades)
oldest = (num_trades - 1) if limit is None else min(num_trades - 1, limit)
for i in range(0, oldest):
trade = trades[i]
if (since is not None) and (trade['timestamp'] < since):
continue
opening_time = int(math.floor(trade['timestamp'] / ms) * ms) # Shift the edge of the m/h/d (but not M)
j = len(ohlcvs)
if (j == 0) or opening_time >= ohlcvs[j - 1][0] + ms:
# moved to a new timeframe -> create a new candle from opening trade
ohlcvs.append([
opening_time,
trade['price'],
trade['price'],
trade['price'],
trade['price'],
trade['amount'],
])
else:
# still processing the same timeframe -> update opening trade
ohlcvs[j - 1][high] = max(ohlcvs[j - 1][high], trade['price'])
ohlcvs[j - 1][low] = min(ohlcvs[j - 1][low], trade['price'])
ohlcvs[j - 1][close] = trade['price']
ohlcvs[j - 1][volume] += trade['amount']
return ohlcvs
def parse_timeframe(self, timeframe):
amount = int(timeframe[0:-1])
unit = timeframe[-1]
if 'y' in unit:
scale = 60 * 60 * 24 * 365
elif 'M' in unit:
scale = 60 * 60 * 24 * 30
elif 'w' in unit:
scale = 60 * 60 * 24 * 7
elif 'd' in unit:
scale = 60 * 60 * 24
elif 'h' in unit:
scale = 60 * 60
else:
scale = 60 # 1m by default
return amount * scale
def parse_trades(self, trades, market=None, since=None, limit=None):
array = self.to_array(trades)
array = [self.parse_trade(trade, market) for trade in array]
array = self.sort_by(array, 'timestamp')
symbol = market['symbol'] if market else None
return self.filter_by_symbol_since_limit(array, symbol, since, limit)
def parse_transactions(self, transactions, currency=None, since=None, limit=None):
array = self.to_array(transactions)
array = [self.parse_transaction(transaction, currency) for transaction in array]
array = self.sort_by(array, 'timestamp')
code = currency['code'] if currency else None
return self.filter_by_currency_since_limit(array, code, since, limit)
def parse_orders(self, orders, market=None, since=None, limit=None):
array = self.to_array(orders)
array = [self.parse_order(order, market) for order in array]
array = self.sort_by(array, 'timestamp')
symbol = market['symbol'] if market else None
return self.filter_by_symbol_since_limit(array, symbol, since, limit)
def filter_by_value_since_limit(self, array, field, value=None, since=None, limit=None):
array = self.to_array(array)
if value:
array = [entry for entry in array if entry[field] == value]
if since:
array = [entry for entry in array if entry['timestamp'] >= since]
if limit:
array = array[0:limit]
return array
def filter_by_symbol_since_limit(self, array, symbol=None, since=None, limit=None):
return self.filter_by_value_since_limit(array, 'symbol', symbol, since, limit)
def filter_by_currency_since_limit(self, array, code=None, since=None, limit=None):
return self.filter_by_value_since_limit(array, 'currency', code, since, limit)
def filter_by_since_limit(self, array, since=None, limit=None):
array = self.to_array(array)
if since:
array = [entry for entry in array if entry['timestamp'] >= since]
if limit:
array = array[0:limit]
return array
def filter_by_symbol(self, array, symbol=None):
array = self.to_array(array)
if symbol:
return [entry for entry in array if entry['symbol'] == symbol]
return array
def filter_by_array(self, objects, key, values=None, indexed=True):
objects = self.to_array(objects)
# return all of them if no values were passed in
if values is None:
return self.index_by(objects, key) if indexed else objects
result = []
for i in range(0, len(objects)):
value = objects[i][key] if key in objects[i] else None
if value in values:
result.append(objects[i])
return self.index_by(result, key) if indexed else result
def currency(self, code):
if not self.currencies:
self.raise_error(ExchangeError, details='Currencies not loaded')
if isinstance(code, basestring) and (code in self.currencies):
return self.currencies[code]
self.raise_error(ExchangeError, details='Does not have currency code ' + str(code))
def find_market(self, string):
if not self.markets:
self.raise_error(ExchangeError, details='Markets not loaded')
if isinstance(string, basestring):
if string in self.markets_by_id:
return self.markets_by_id[string]
if string in self.markets:
return self.markets[string]
return string
def find_symbol(self, string, market=None):
if market is None:
market = self.find_market(string)
if isinstance(market, dict):
return market['symbol']
return string
def market(self, symbol):
if not self.markets:
self.raise_error(ExchangeError, details='Markets not loaded')
if isinstance(symbol, basestring) and (symbol in self.markets):
return self.markets[symbol]
self.raise_error(ExchangeError, details='No market symbol ' + str(symbol))
def market_ids(self, symbols):
return [self.market_id(symbol) for symbol in symbols]
def market_id(self, symbol):
market = self.market(symbol)
return market['id'] if type(market) is dict else symbol
def calculate_fee(self, symbol, type, side, amount, price, takerOrMaker='taker', params={}):
market = self.markets[symbol]
rate = market[takerOrMaker]
cost = float(self.cost_to_precision(symbol, amount * price))
return {
'rate': rate,
'type': takerOrMaker,
'currency': market['quote'],
'cost': float(self.fee_to_precision(symbol, rate * cost)),
}
def edit_limit_buy_order(self, id, symbol, *args):
return self.edit_limit_order(id, symbol, 'buy', *args)
def edit_limit_sell_order(self, id, symbol, *args):
return self.edit_limit_order(id, symbol, 'sell', *args)
def edit_limit_order(self, id, symbol, *args):
return self.edit_order(id, symbol, 'limit', *args)
def edit_order(self, id, symbol, *args):
if not self.enableRateLimit:
self.raise_error(ExchangeError, details='edit_order() requires enableRateLimit = true')
self.cancel_order(id, symbol)
return self.create_order(symbol, *args)
def create_limit_order(self, symbol, *args):
return self.create_order(symbol, 'limit', *args)
def create_market_order(self, symbol, *args):
return self.create_order(symbol, 'market', *args)
def create_limit_buy_order(self, symbol, *args):
return self.create_order(symbol, 'limit', 'buy', *args)
def create_limit_sell_order(self, symbol, *args):
return self.create_order(symbol, 'limit', 'sell', *args)
def create_market_buy_order(self, symbol, amount, params={}):
return self.create_order(symbol, 'market', 'buy', amount, None, params)
def create_market_sell_order(self, symbol, amount, params={}):
return self.create_order(symbol, 'market', 'sell', amount, None, params)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
raise NotSupported(self.id + ' sign() pure method must be redefined in derived classes')
# -------------------------------------------------------------------------
# web3 / 0x methods
def decryptAccountFromJSON(self, value, password):
return self.decryptAccount(json.loads(value) if isinstance(value, basestring) else value, password)
def decryptAccount(self, key, password):
return self.web3.eth.accounts.decrypt(key, password)
def decryptAccountFromPrivateKey(self, privateKey):
return self.web3.eth.accounts.privateKeyToAccount(privateKey)
def soliditySha3(self, array):
values = self.solidityValues(array)
types = self.solidityTypes(values)
return self.web3.soliditySha3(types, values).hex()
def soliditySha256(self, values):
types = self.solidityTypes(values)
solidity_values = self.solidityValues(values)
encoded_values = [hex_encode_abi_type(abi_type, value)[2:] for abi_type, value in zip(types, solidity_values)]
hex_string = '0x' + ''.join(encoded_values)
return '0x' + self.hash(self.encode(self.web3.toText(hex_string)), 'sha256')
def solidityTypes(self, array):
return ['address' if self.web3.isAddress(value) else 'uint256' for value in array]
def solidityValues(self, array):
return [self.web3.toChecksumAddress(value) if self.web3.isAddress(value) else int(value) for value in array]
def getZeroExOrderHash2(self, order):
return self.soliditySha3([
order['exchangeContractAddress'], # address
order['maker'], # address
order['taker'], # address
order['makerTokenAddress'], # address
order['takerTokenAddress'], # address
order['feeRecipient'], # address
order['makerTokenAmount'], # uint256
order['takerTokenAmount'], # uint256
order['makerFee'], # uint256
order['takerFee'], # uint256
order['expirationUnixTimestampSec'], # uint256
order['salt'], # uint256
])
def getZeroExOrderHash(self, order):
unpacked = [
self.web3.toChecksumAddress(order['exchangeContractAddress']), # { value: order.exchangeContractAddress, type: types_1.SolidityTypes.Address },
self.web3.toChecksumAddress(order['maker']), # { value: order.maker, type: types_1.SolidityTypes.Address },
self.web3.toChecksumAddress(order['taker']), # { value: order.taker, type: types_1.SolidityTypes.Address },
self.web3.toChecksumAddress(order['makerTokenAddress']), # { value: order.makerTokenAddress, type: types_1.SolidityTypes.Address },
self.web3.toChecksumAddress(order['takerTokenAddress']), # { value: order.takerTokenAddress, type: types_1.SolidityTypes.Address },
self.web3.toChecksumAddress(order['feeRecipient']), # { value: order.feeRecipient, type: types_1.SolidityTypes.Address },
int(order['makerTokenAmount']), # { value: bigNumberToBN(order.makerTokenAmount), type: types_1.SolidityTypes.Uint256, },
int(order['takerTokenAmount']), # { value: bigNumberToBN(order.takerTokenAmount), type: types_1.SolidityTypes.Uint256, },
int(order['makerFee']), # { value: bigNumberToBN(order.makerFee), type: types_1.SolidityTypes.Uint256, },
int(order['takerFee']), # { value: bigNumberToBN(order.takerFee), type: types_1.SolidityTypes.Uint256, },
int(order['expirationUnixTimestampSec']), # { value: bigNumberToBN(order.expirationUnixTimestampSec), type: types_1.SolidityTypes.Uint256, },
int(order['salt']), # { value: bigNumberToBN(order.salt), type: types_1.SolidityTypes.Uint256 },
]
types = [
'address', # { value: order.exchangeContractAddress, type: types_1.SolidityTypes.Address },
'address', # { value: order.maker, type: types_1.SolidityTypes.Address },
'address', # { value: order.taker, type: types_1.SolidityTypes.Address },
'address', # { value: order.makerTokenAddress, type: types_1.SolidityTypes.Address },
'address', # { value: order.takerTokenAddress, type: types_1.SolidityTypes.Address },
'address', # { value: order.feeRecipient, type: types_1.SolidityTypes.Address },
'uint256', # { value: bigNumberToBN(order.makerTokenAmount), type: types_1.SolidityTypes.Uint256, },
'uint256', # { value: bigNumberToBN(order.takerTokenAmount), type: types_1.SolidityTypes.Uint256, },
'uint256', # { value: bigNumberToBN(order.makerFee), type: types_1.SolidityTypes.Uint256, },
'uint256', # { value: bigNumberToBN(order.takerFee), type: types_1.SolidityTypes.Uint256, },
'uint256', # { value: bigNumberToBN(order.expirationUnixTimestampSec), type: types_1.SolidityTypes.Uint256, },
'uint256', # { value: bigNumberToBN(order.salt), type: types_1.SolidityTypes.Uint256 },
]
return self.web3.soliditySha3(types, unpacked).hex()
def signZeroExOrder(self, order):
orderHash = self.getZeroExOrderHash(order)
signature = self.signMessage(orderHash[-64:], self.privateKey)
return self.extend(order, {
'orderHash': orderHash,
'ecSignature': signature, # todo fix v if needed
})
def hashMessage(self, message):
message_bytes = bytes.fromhex(message)
return self.web3.sha3(b"\x19Ethereum Signed Message:\n" + str(len(message_bytes)).encode() + message_bytes).hex()
def signHash(self, hash, privateKey):
signature = self.web3.eth.account.signHash(hash[-64:], private_key=privateKey[-64:])
return {
'v': signature.v, # integer
'r': self.web3.toHex(signature.r), # '0x'-prefixed hex string
's': self.web3.toHex(signature.s), # '0x'-prefixed hex string
}
def signMessage(self, message, privateKey):
#
# The following comment is related to MetaMask, we use the upper type of signature prefix:
#
# z.ecSignOrderHashAsync ('0xcfdb0a485324ff37699b4c8557f6858f25916fc6fce5993b32fe018aea510b9f',
# '0x731fc101bbe102221c91c31ed0489f1ddfc439a3', {
# prefixType: 'ETH_SIGN',
# shouldAddPrefixBeforeCallingEthSign: true
# }).then ((e, r) => console.log (e,r))
#
# { ↓
# v: 28,
# r: "0xea7a68268b47c48d5d7a4c900e6f9af0015bf70951b3db2f1d835c5d544aaec2",
# s: "0x5d1db2a060c955c1fde4c967237b995c2361097405407b33c6046c8aeb3ccbdf"
# }
#
# --------------------------------------------------------------------
#
# z.ecSignOrderHashAsync ('0xcfdb0a485324ff37699b4c8557f6858f25916fc6fce5993b32fe018aea510b9f',
# '0x731fc101bbe102221c91c31ed0489f1ddfc439a3', {
# prefixType: 'NONE',
# shouldAddPrefixBeforeCallingEthSign: true
# }).then ((e, r) => console.log (e,r))
#
# { ↓
# v: 27,
# r: "0xc8c710022c57de4f529d448e9b40517dd9bfb49ff1eb245f5856664b865d14a6",
# s: "0x0740bb21f4f094fbbdbafa903bb8f057f82e0c6e4fe65d19a1daed4ed97cd394"
# }
#
message_hash = self.hashMessage(message)
signature = self.signHash(message_hash[-64:], privateKey[-64:])
return signature
| 40.146357
| 185
| 0.596865
|
42557a451eda1befec198667b0055a44bd2717e8
| 8,315
|
py
|
Python
|
tests/test_build.py
|
joelostblom/jupyter-book
|
9a2c8e7149e0184c0d0924e7d3b54b7c720bfd5e
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_build.py
|
joelostblom/jupyter-book
|
9a2c8e7149e0184c0d0924e7d3b54b7c720bfd5e
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_build.py
|
joelostblom/jupyter-book
|
9a2c8e7149e0184c0d0924e7d3b54b7c720bfd5e
|
[
"BSD-3-Clause"
] | 1
|
2021-07-02T17:34:18.000Z
|
2021-07-02T17:34:18.000Z
|
from pathlib import Path
import pytest
from bs4 import BeautifulSoup
from click.testing import CliRunner
import sphinx
from jupyter_book import commands
def test_version(cli: CliRunner):
result = cli.invoke(commands.main, "--version")
assert result.exit_code == 0, result.output
assert "Jupyter Book" in result.output, result.output
def test_create(temp_with_override: Path, cli):
book = temp_with_override / "new_book"
result = cli.invoke(commands.create, book.as_posix())
assert result.exit_code == 0
assert book.joinpath("_config.yml").exists()
assert len(list(book.iterdir())) == 9
def test_create_from_cookiecutter(temp_with_override: Path, cli):
book = temp_with_override / "new_book"
result = cli.invoke(commands.create, [book.as_posix(), "--cookiecutter"])
assert result.exit_code == 0
# this test uses default cookiecutter prompt values
# note that default cookiecutter book name is "my_book"
assert book.joinpath("my_book", "my_book", "_config.yml").exists()
assert len(list(book.joinpath("my_book").iterdir())) == 7
assert len(list(book.joinpath("my_book", ".github", "workflows").iterdir())) == 1
assert len(list(book.joinpath("my_book", "my_book").iterdir())) == 8
def test_build_from_template(temp_with_override, cli):
"""Test building the book template and a few test configs."""
# Create the book from the template
book = temp_with_override / "new_book"
_ = cli.invoke(commands.create, book.as_posix())
build_result = cli.invoke(commands.build, book.as_posix())
assert build_result.exit_code == 0, build_result.output
html = book.joinpath("_build", "html")
assert html.joinpath("index.html").exists()
assert html.joinpath("intro.html").exists()
def test_custom_config(cli, build_resources):
"""Test a variety of custom configuration values."""
books, _ = build_resources
config = books.joinpath("config")
result = cli.invoke(commands.build, config.as_posix())
assert result.exit_code == 0
html = config.joinpath("_build", "html", "index.html").read_text(encoding="utf8")
soup = BeautifulSoup(html, "html.parser")
assert '<h1 class="site-logo" id="site-title">TEST PROJECT NAME</h1>' in html
assert '<div class="sphinx-tabs docutils container">' in html
assert '<link rel="stylesheet" type="text/css" href="_static/mycss.css" />' in html
assert '<script src="_static/js/myjs.js"></script>' in html
# Check that our comments engines were correctly added
assert soup.find("script", attrs={"kind": "hypothesis"})
assert soup.find("script", attrs={"kind": "utterances"})
@pytest.mark.parametrize("toc", ["_toc.yml", "_toc_startwithlist.yml"])
def test_toc_builds(cli, build_resources, toc):
"""Test building the book template with several different TOC files."""
books, tocs = build_resources
result = cli.invoke(
commands.build, [tocs.as_posix(), "--toc", (tocs / toc).as_posix(), "-W"]
)
assert result.exit_code == 0, result.output
def test_toc_rebuild(cli, build_resources):
"""Changes to the TOC should force a re-build of pages. Also tests for changes
to the relative ordering of content pages.
"""
_, tocs = build_resources
toc = tocs / "_toc_simple.yml"
index_html = tocs.joinpath("_build", "html", "index.html")
# Not using -W because we expect warnings for pages not listed in TOC
result = cli.invoke(commands.build, [tocs.as_posix(), "--toc", toc.as_posix()])
html = BeautifulSoup(index_html.read_text(encoding="utf8"), "html.parser")
tags = html.find_all("a", "reference internal")
assert result.exit_code == 0
assert tags[1].attrs["href"] == "content1.html"
assert tags[2].attrs["href"] == "content2.html"
toc.write_text("- file: index\n- file: content2\n- file: content1\n")
result = cli.invoke(commands.build, [tocs.as_posix(), "--toc", toc.as_posix()])
assert result.exit_code == 0
html = BeautifulSoup(index_html.read_text(encoding="utf8"), "html.parser")
tags = html.find_all("a", "reference internal")
# The rendered TOC should reflect the order in the modified _toc.yml
assert tags[1].attrs["href"] == "content2.html"
assert tags[2].attrs["href"] == "content1.html"
@pytest.mark.parametrize(
"toc,msg",
[
("_toc_emptysections.yml", "Found an empty section"),
("_toc_urlwithouttitle.yml", "`url:` link should"),
("_toc_url.yml", "Rename `url:` to `file:`"),
("_toc_wrongkey.yml", "Unknown key in `_toc.yml`"),
],
)
def test_corrupt_toc(build_resources, cli, toc, msg):
books, tocs = build_resources
with pytest.raises(RuntimeError):
result = cli.invoke(
commands.build, [tocs.as_posix(), "--toc", (tocs / toc).as_posix(), "-W"]
)
assert result.exit_code == 1
assert msg in result.output
raise result.exception
def test_build_errors(build_resources, cli):
books, tocs = build_resources
path = books.joinpath("mybook").absolute()
# Bad builder
result = cli.invoke(commands.build, [path.as_posix(), "--builder", "blah"])
assert result.exit_code == 2
# No table of contents message
p_notoc = books.joinpath("notoc")
with pytest.raises(RuntimeError):
result = cli.invoke(commands.build, [p_notoc.as_posix()])
assert result.exit_code == 1
assert "Couldn't find a Table of Contents file" in str(result.exception)
raise result.exception
# Test error on warnings and book error message
p_syntax = books.joinpath("sphinx_syntaxerr")
with pytest.raises(RuntimeError):
result = cli.invoke(commands.build, [p_syntax.as_posix(), "-W"])
assert result.exit_code == 1
assert "There was an error in building your book" in str(result.exception)
raise result.exception
# Config file path does not exist
with pytest.raises(IOError):
result = cli.invoke(
commands.build, [p_syntax.as_posix(), "--config", "non_existent_path"]
)
assert result.exit_code == 1
assert "Config file path given, but not found" in str(result.exception)
raise result.exception
def test_build_docs(docs, cli):
"""Test building the documentation book."""
html = docs.joinpath("_build", "html")
result = cli.invoke(commands.build, [docs.as_posix()])
assert result.exit_code == 0
assert html.joinpath("index.html").exists()
assert html.joinpath("intro.html").exists()
assert html.joinpath("content", "citations.html").exists()
def test_build_page(pages, cli):
"""Test building a page."""
page = pages.joinpath("single_page.ipynb")
html = pages.joinpath("_build", "_page", "single_page", "html")
index = html.joinpath("index.html")
result = cli.invoke(commands.build, [page.as_posix()])
assert result.exit_code == 0
assert html.joinpath("single_page.html").exists()
assert not html.joinpath("extra_page.html").exists()
assert 'url=single_page.html" />' in index.read_text(encoding="utf8")
def test_build_page_nested(build_resources, cli):
"""Test building a page."""
books, _ = build_resources
src = books.joinpath("nested")
page = src.joinpath("contents", "markdown.md")
html = src.joinpath("_build", "_page", "contents-markdown", "html")
index = html.joinpath("index.html")
result = cli.invoke(commands.build, [page.as_posix()])
assert result.exit_code == 0
assert html.joinpath("markdown.html").exists()
assert not html.joinpath("extra_page.html").exists()
assert 'url=markdown.html" />' in index.read_text(encoding="utf8")
@pytest.mark.skipif(sphinx.version_info[0] == 2, reason="randomly fails on CI")
def test_execution_timeout(pages, build_resources, cli):
"""Testing timeout execution for a page."""
books, _ = build_resources
path_page = pages.joinpath("loop_unrun.ipynb")
path_c = books.joinpath("config", "_config_timeout.yml")
path_html = pages.joinpath("_build", "_page", "loop_unrun", "html")
result = cli.invoke(
commands.build, [path_page.as_posix(), "--config", path_c.as_posix()]
)
assert "Execution Failed" in result.stdout
assert path_html.joinpath("reports", "loop_unrun.log").exists()
| 40.560976
| 87
| 0.678172
|
414ede1a67253d42970e133b115f9e0ed12bc56b
| 5,947
|
py
|
Python
|
pyInstaller/dash/dash_html_components/Iframe.py
|
rianawillers/dash-lineplot
|
b72c3e4799d39bdc33bbcae2202fdefd6f4af00e
|
[
"MIT"
] | 3
|
2020-02-04T21:39:20.000Z
|
2020-11-17T19:07:07.000Z
|
pyInstaller/dash/dash_html_components/Iframe.py
|
rianawillers/dash-lineplot
|
b72c3e4799d39bdc33bbcae2202fdefd6f4af00e
|
[
"MIT"
] | 12
|
2020-06-06T01:22:26.000Z
|
2022-03-12T00:13:42.000Z
|
pyInstaller/dash/dash_html_components/Iframe.py
|
rianawillers/dash-lineplot
|
b72c3e4799d39bdc33bbcae2202fdefd6f4af00e
|
[
"MIT"
] | 1
|
2020-01-25T20:08:59.000Z
|
2020-01-25T20:08:59.000Z
|
# AUTO GENERATED FILE - DO NOT EDIT
from dash.development.base_component import Component, _explicitize_args
class Iframe(Component):
"""An Iframe component.
Iframe is a wrapper for the <iframe> HTML5 element.
For detailed attribute info see:
https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe
Keyword arguments:
- children (a list of or a singular dash component, string or number; optional): The children of this component
- id (string; optional): The ID of this component, used to identify dash components
in callbacks. The ID needs to be unique across all of the
components in an app.
- n_clicks (number; default 0): An integer that represents the number of times
that this element has been clicked on.
- n_clicks_timestamp (number; default -1): An integer that represents the time (in ms since 1970)
at which n_clicks changed. This can be used to tell
which button was changed most recently.
- key (string; optional): A unique identifier for the component, used to improve
performance by React.js while rendering components
See https://reactjs.org/docs/lists-and-keys.html for more info
- role (string; optional): The ARIA role attribute
- data-* (string; optional): A wildcard data attribute
- aria-* (string; optional): A wildcard aria attribute
- height (string | number; optional): Specifies the height of elements listed here. For all other elements, use the CSS height property. Note: In some instances, such as <div>, this is a legacy attribute, in which case the CSS height property should be used instead.
- name (string; optional): Name of the element. For example used by the server to identify the fields in form submits.
- sandbox (string; optional): Stops a document loaded in an iframe from using certain features (such as submitting forms or opening new windows).
- src (string; optional): The URL of the embeddable content.
- srcDoc (string; optional)
- width (string | number; optional): For the elements listed here, this establishes the element's width. Note: For all other instances, such as <div>, this is a legacy attribute, in which case the CSS width property should be used instead.
- accessKey (string; optional): Defines a keyboard shortcut to activate or add focus to the element.
- className (string; optional): Often used with CSS to style elements with common properties.
- contentEditable (string; optional): Indicates whether the element's content is editable.
- contextMenu (string; optional): Defines the ID of a <menu> element which will serve as the element's context menu.
- dir (string; optional): Defines the text direction. Allowed values are ltr (Left-To-Right) or rtl (Right-To-Left)
- draggable (string; optional): Defines whether the element can be dragged.
- hidden (a value equal to: 'hidden', 'HIDDEN' | boolean; optional): Prevents rendering of given element, while keeping child elements, e.g. script elements, active.
- lang (string; optional): Defines the language used in the element.
- spellCheck (string; optional): Indicates whether spell checking is allowed for the element.
- style (dict; optional): Defines CSS styles which will override styles previously set.
- tabIndex (string; optional): Overrides the browser's default tab order and follows the one specified instead.
- title (string; optional): Text to be displayed in a tooltip when hovering over the element.
- loading_state (dict; optional): Object that holds the loading state object coming from dash-renderer. loading_state has the following type: dict containing keys 'is_loading', 'prop_name', 'component_name'.
Those keys have the following types:
- is_loading (boolean; optional): Determines if the component is loading or not
- prop_name (string; optional): Holds which property is loading
- component_name (string; optional): Holds the name of the component that is loading"""
@_explicitize_args
def __init__(self, children=None, id=Component.UNDEFINED, n_clicks=Component.UNDEFINED, n_clicks_timestamp=Component.UNDEFINED, key=Component.UNDEFINED, role=Component.UNDEFINED, height=Component.UNDEFINED, name=Component.UNDEFINED, sandbox=Component.UNDEFINED, src=Component.UNDEFINED, srcDoc=Component.UNDEFINED, width=Component.UNDEFINED, accessKey=Component.UNDEFINED, className=Component.UNDEFINED, contentEditable=Component.UNDEFINED, contextMenu=Component.UNDEFINED, dir=Component.UNDEFINED, draggable=Component.UNDEFINED, hidden=Component.UNDEFINED, lang=Component.UNDEFINED, spellCheck=Component.UNDEFINED, style=Component.UNDEFINED, tabIndex=Component.UNDEFINED, title=Component.UNDEFINED, loading_state=Component.UNDEFINED, **kwargs):
self._prop_names = ['children', 'id', 'n_clicks', 'n_clicks_timestamp', 'key', 'role', 'data-*', 'aria-*', 'height', 'name', 'sandbox', 'src', 'srcDoc', 'width', 'accessKey', 'className', 'contentEditable', 'contextMenu', 'dir', 'draggable', 'hidden', 'lang', 'spellCheck', 'style', 'tabIndex', 'title', 'loading_state']
self._type = 'Iframe'
self._namespace = 'dash_html_components'
self._valid_wildcard_attributes = ['data-', 'aria-']
self.available_properties = ['children', 'id', 'n_clicks', 'n_clicks_timestamp', 'key', 'role', 'data-*', 'aria-*', 'height', 'name', 'sandbox', 'src', 'srcDoc', 'width', 'accessKey', 'className', 'contentEditable', 'contextMenu', 'dir', 'draggable', 'hidden', 'lang', 'spellCheck', 'style', 'tabIndex', 'title', 'loading_state']
self.available_wildcard_properties = ['data-', 'aria-']
_explicit_args = kwargs.pop('_explicit_args')
_locals = locals()
_locals.update(kwargs) # For wildcard attrs
args = {k: _locals[k] for k in _explicit_args if k != 'children'}
for k in []:
if k not in args:
raise TypeError(
'Required argument `' + k + '` was not specified.')
super(Iframe, self).__init__(children=children, **args)
| 84.957143
| 749
| 0.740541
|
42e0288d869232372c634f2a8773dd922c1a41f1
| 3,715
|
py
|
Python
|
dash_daq/PowerButton.py
|
luiztauffer/dash-daq
|
4975093449bdc4d7ff4cd366ac82a847cdf24c34
|
[
"MIT"
] | null | null | null |
dash_daq/PowerButton.py
|
luiztauffer/dash-daq
|
4975093449bdc4d7ff4cd366ac82a847cdf24c34
|
[
"MIT"
] | null | null | null |
dash_daq/PowerButton.py
|
luiztauffer/dash-daq
|
4975093449bdc4d7ff4cd366ac82a847cdf24c34
|
[
"MIT"
] | null | null | null |
# AUTO GENERATED FILE - DO NOT EDIT
from dash.development.base_component import Component, _explicitize_args
class PowerButton(Component):
"""A PowerButton component.
A power button component can be
turned on and off.
Keyword arguments:
- id (string; optional):
The ID used to identify this compnent in Dash callbacks.
- className (string; optional):
Class to apply to the root component element.
- color (string; optional):
The indicator color to display when power button is on.
- disabled (boolean; optional):
If True, power button cannot be clicked.
- label (dict; optional):
Description to be displayed alongside the button. To control
styling, pass an object with label and style properties.
`label` is a string | dict with keys:
- label (string; optional)
- style (dict; optional)
- labelPosition (a value equal to: 'top', 'bottom'; default 'top'):
Where the button label is positioned.
- on (boolean; default False):
Whether or not the power button is on.
- persisted_props (list of a value equal to: 'on's; default ['on']):
Properties whose user interactions will persist after refreshing
the component or the page. Since only `on` is allowed this prop
can normally be ignored.
- persistence (boolean | string | number; optional):
Used to allow user interactions in this component to be persisted
when the component - or the page - is refreshed. If `persisted` is
truthy and hasn't changed from its previous value, a `value` that
the user has changed while using the app will keep that change, as
long as the new `value` also matches what was given originally.
Used in conjunction with `persistence_type`.
- persistence_type (a value equal to: 'local', 'session', 'memory'; default 'local'):
Where persisted user changes will be stored: memory: only kept in
memory, reset on page refresh. local: window.localStorage, data is
kept after the browser quit. session: window.sessionStorage, data
is cleared once the browser quit.
- size (number; default 48):
The size (diameter) of the power button in pixels.
- style (dict; optional):
Style to apply to the root component element.
- theme (dict; default light):
Theme configuration to be set by a ThemeProvider."""
@_explicitize_args
def __init__(self, id=Component.UNDEFINED, on=Component.UNDEFINED, color=Component.UNDEFINED, size=Component.UNDEFINED, disabled=Component.UNDEFINED, theme=Component.UNDEFINED, label=Component.UNDEFINED, labelPosition=Component.UNDEFINED, className=Component.UNDEFINED, style=Component.UNDEFINED, persistence=Component.UNDEFINED, persisted_props=Component.UNDEFINED, persistence_type=Component.UNDEFINED, **kwargs):
self._prop_names = ['id', 'className', 'color', 'disabled', 'label', 'labelPosition', 'on', 'persisted_props', 'persistence', 'persistence_type', 'size', 'style', 'theme']
self._type = 'PowerButton'
self._namespace = 'dash_daq'
self._valid_wildcard_attributes = []
self.available_properties = ['id', 'className', 'color', 'disabled', 'label', 'labelPosition', 'on', 'persisted_props', 'persistence', 'persistence_type', 'size', 'style', 'theme']
self.available_wildcard_properties = []
_explicit_args = kwargs.pop('_explicit_args')
_locals = locals()
_locals.update(kwargs) # For wildcard attrs
args = {k: _locals[k] for k in _explicit_args if k != 'children'}
for k in []:
if k not in args:
raise TypeError(
'Required argument `' + k + '` was not specified.')
super(PowerButton, self).__init__(**args)
| 43.705882
| 419
| 0.700673
|
adbf4983dc42973726f775ca74bd7dc6ef4270e1
| 9,366
|
py
|
Python
|
pyscf/semiempirical/rmindo3_grad.py
|
mfkasim1/pyscf
|
7be5e015b2b40181755c71d888449db936604660
|
[
"Apache-2.0"
] | 1
|
2021-11-12T11:55:25.000Z
|
2021-11-12T11:55:25.000Z
|
pyscf/semiempirical/rmindo3_grad.py
|
mfkasim1/pyscf
|
7be5e015b2b40181755c71d888449db936604660
|
[
"Apache-2.0"
] | 36
|
2018-08-22T19:44:03.000Z
|
2020-05-09T10:02:36.000Z
|
pyscf/semiempirical/rmindo3_grad.py
|
mfkasim1/pyscf
|
7be5e015b2b40181755c71d888449db936604660
|
[
"Apache-2.0"
] | 4
|
2018-02-14T16:28:28.000Z
|
2019-08-12T16:40:30.000Z
|
#!/usr/bin/env python
import numpy
from pyscf import lib
from pyscf import gto
from pyscf.grad import rhf as rhf_grad
from pyscf.semiempirical import mindo3
from pyscf.semiempirical import mopac_param
def grad_nuc(mol, atmlst=None):
atom_charges = mol.atom_charges()
atom_coords = mol.atom_coords()
natm = atom_charges.size
dR = (atom_coords[:,None,:] - atom_coords) * lib.param.BOHR
distances_in_AA = numpy.linalg.norm(dR, axis=2)
# numerically exclude atomic self-interaction terms
distances_in_AA[numpy.diag_indices(mol.natm)] = 1e60
# one atom is H, another atom is N or O
where_NO = (atom_charges == 7) | (atom_charges == 8)
mask = (atom_charges[:,None] == 1) & where_NO
mask = mask | mask.T
scale = alpha = mindo3._get_alpha(atom_charges[:,None], atom_charges)
scale[mask] *= numpy.exp(-distances_in_AA[mask])
scale[~mask] = numpy.exp(-alpha[~mask] * distances_in_AA[~mask])
z_eff = mopac_param.CORE[atom_charges]
gamma = mindo3._get_gamma(mol)
gamma1 = _get_gamma1_half(mol)
#gs = .5 * numpy.einsum('i,sxij,j->sx', z_eff, _get_gamma1(mol), z_eff)
gs = numpy.einsum('i,xij,j->ix', z_eff, gamma1, z_eff)
alpha = mindo3._get_alpha(atom_charges[:,None], atom_charges)
div = numpy.zeros((natm,natm))
div[mask] = (-alpha[mask] * numpy.exp(-distances_in_AA[mask])
/ distances_in_AA[mask])
div[~mask] = (-alpha[~mask] * numpy.exp(-alpha[~mask] * distances_in_AA[~mask])
/ distances_in_AA[~mask])
# scale1 = numpy.zeros((natm,3,natm,natm))
# for i in range(natm):
# v = dR[i] * div[i,:,None]
# scale1[i,:,i] = v.T
# scale1[i,:,:,i] = v.T
#
# gs += .5 * numpy.einsum('i,j,sxij,ij->sx', z_eff, z_eff, scale1,
# mopac_param.E2/distances_in_AA - gamma)
gs += numpy.einsum('i,j,ijx,ij,ij->ix', z_eff, z_eff, dR, div,
mopac_param.E2/distances_in_AA - gamma)
div = -mopac_param.E2 / distances_in_AA**3
div[numpy.diag_indices(natm)] = 0
# t1 = numpy.zeros((natm,3,natm,natm))
# for i in range(natm):
# v = dR[i] * div[i,:,None]
# t1[i,:,i] = v.T
# t1[i,:,:,i] = v.T
#
# gs += .5 * numpy.einsum('i,j,ij,sxij->sx', z_eff, z_eff, scale,
# t1-_get_gamma1(mol))
t1 = numpy.einsum('ijx,ij->xij', dR, div)
gs += numpy.einsum('i,j,ij,xij->ix', z_eff, z_eff, scale, t1-gamma1)
if atmlst is not None:
gs = gs[atmlst]
gs *= lib.param.BOHR
return gs
def _get_gamma1(mol):
natm = mol.natm
gamma1_half = _get_gamma1_half(mol)
gamma1 = numpy.zeros((natm,3,natm,natm))
for i in range(natm):
gamma1[i,:,i] = gamma1[i,:,:,i] = gamma1_half[:,i]
return gamma1
def _get_gamma1_half(mol):
'''gamma1_half[:,i,:] == gamma1[i,:,i,:] == gamma1[i,:,:,i]'''
atom_charges = mol.atom_charges()
atom_coords = mol.atom_coords()
natm = atom_charges.size
dR = (atom_coords[:,None,:] - atom_coords) * lib.param.BOHR
distances_in_AA = numpy.linalg.norm(dR, axis=2)
rho = numpy.array([mopac_param.E2/mopac_param.F03[z] for z in atom_charges])
div = -mopac_param.E2 / (distances_in_AA**2 + (rho[:,None] + rho)**2*.25)**1.5
div[numpy.diag_indices(natm)] = 0 # remove self-interaction terms
gamma1_dense = numpy.einsum('ijx,ij->xij', dR, div)
return gamma1_dense
def hcore_generator(mf_grad, mol=None):
mol = mf_grad.base._mindo_mol
nao = mol.nao
atom_charges = mol.atom_charges()
basis_atom_charges = atom_charges[mol._bas[:,gto.ATOM_OF]]
natm = atom_charges.size
basis_ip = []
for i, z in enumerate(basis_atom_charges):
l = mol.bas_angular(i)
if l == 0:
basis_ip.append(mopac_param.VS[z])
else:
basis_ip.append(mopac_param.VP[z])
ao_atom_charges = mindo3._to_ao_labels(mol, basis_atom_charges)
ao_ip = mindo3._to_ao_labels(mol, basis_ip)
# Off-diagonal terms
hcore = mol.intor('int1e_ipovlp', comp=3)
hcore *= ao_ip[:,None] + ao_ip
hcore *= mindo3._get_beta0(ao_atom_charges[:,None], ao_atom_charges)
# int1e_ipovlp is computed in atomic unit. Scale it to AA
hcore *= 1./ lib.param.BOHR
# Nuclear attraction
gamma1 = _get_gamma1_half(mol)
z_eff = mopac_param.CORE[atom_charges]
aoslices = mol.aoslice_by_atom()
def hcore_deriv(atm_id):
gamma1p = numpy.zeros((3,natm,natm))
gamma1p[:,atm_id] = gamma1p[:,:,atm_id] = gamma1[:,atm_id]
vnuc = numpy.einsum('xij,j->xi', gamma1p, z_eff)
h1 = numpy.zeros((3,nao,nao))
for ia, (p0, p1) in enumerate(aoslices[:,2:]):
idx = numpy.arange(p0, p1)
h1[:,idx,idx] -= vnuc[:,ia,None]
p0, p1 = aoslices[atm_id,2:]
h1[:,p0:p1,:] -= hcore[:,p0:p1]
h1[:,:,p0:p1] -= hcore[:,p0:p1].transpose(0,2,1)
return h1
return hcore_deriv
def get_jk(mol, dm):
dm = numpy.asarray(dm)
dm_shape = dm.shape
nao = dm_shape[-1]
dm = dm.reshape(-1,nao,nao)
ndm = dm.shape[0]
vj = numpy.zeros((ndm,3,nao,nao))
vk = numpy.zeros((ndm,3,nao,nao))
# Two-center contributions to the J/K matrices
gamma1 = _get_gamma1_half(mol)
# Scaling integrals by .5 because rhf_grad.get_jk function only computes
# the bra-derivatives. gamma is the diagonal part of the ERIs. gamma1 is
# the full derivatives of the diagonal elements rho_ii, which include the
# derivatives of ket functions.
gamma1 *= .5
aoslices = mol.aoslice_by_atom()
pop_atom = [numpy.einsum('tii->t', dm[:,p0:p1,p0:p1])
for p0, p1 in aoslices[:,2:]]
vj_diag = numpy.einsum('xij,jt->txi', gamma1, pop_atom)
for ia, (p0, p1) in enumerate(aoslices[:,2:]):
idx = numpy.arange(p0, p1)
vj[:,:,idx,idx] += vj_diag[:,:,ia,numpy.newaxis]
for ja, (q0, q1) in enumerate(aoslices[:,2:]):
ksub = numpy.einsum('x,tij->txij', gamma1[:,ia,ja], dm[:,p0:p1,q0:q1])
vk[:,:,p0:p1,q0:q1] += ksub
if ndm == 1:
vj = vj[0]
vk = vk[0]
return vj, vk
class Gradients(rhf_grad.Gradients):
get_hcore = None
hcore_generator = hcore_generator
def get_ovlp(self, mol=None):
nao = self.base._mindo_mol.nao
return numpy.zeros((3,nao,nao))
def get_jk(self, mol=None, dm=None, hermi=0):
if dm is None: dm = self.base.make_rdm1()
vj, vk = get_jk(self.base._mindo_mol, dm)
return vj, vk
def grad_nuc(self, mol=None, atmlst=None):
mol = self.base._mindo_mol
return grad_nuc(mol, atmlst)
def grad_elec(self, mo_energy=None, mo_coeff=None, mo_occ=None, atmlst=None):
# grad_elec function use self.mol. However, self.mol points to the
# input molecule with the input basis. MINDO/3 basis (in _mindo_mol)
# should be passed to grad_elec.
with lib.temporary_env(self, mol=self.base._mindo_mol):
return rhf_grad.grad_elec(self, mo_energy, mo_coeff, mo_occ,
atmlst) * lib.param.BOHR
Grad = Gradients
if __name__ == '__main__':
from pyscf.data.nist import HARTREE2EV
mol = gto.Mole()
mol.atom = [
['O' , (0. , 0. , 0.)],
[1 , (0. , -0.757 , 0.587)],
[1 , (0. , 0.757 , 0.587)] ]
mol.verbose = 0
mol.build()
mfs = mindo3.RMINDO3(mol).set(conv_tol=1e-8).as_scanner()
mfs(mol)
print(mfs.e_tot - -341.50046431149383/HARTREE2EV)
mol1 = mol.copy()
mol1.set_geom_([['O' , (0. , 0. , 0.0001)],
[1 , (0. , -0.757 , 0.587)],
[1 , (0. , 0.757 , 0.587)]])
mol2 = mol.copy()
mindo_mol1 = mindo3._make_mindo_mol(mol1)
mol2.set_geom_([['O' , (0. , 0. ,-0.0001)],
[1 , (0. , -0.757 , 0.587)],
[1 , (0. , 0.757 , 0.587)]])
mindo_mol2 = mindo3._make_mindo_mol(mol2)
g1 = mfs.nuc_grad_method().kernel()
e1 = mfs(mol1)
e2 = mfs(mol2)
print(abs((e1-e2)/0.0002*lib.param.BOHR - g1[0,2]))
dgamma = _get_gamma1(mol)
gamma1 = mindo3._get_gamma(mol1)
gamma2 = mindo3._get_gamma(mol2)
print(abs((gamma1 - gamma2)/0.0002 - dgamma[0,2,:,:]).max())
dgd = _get_gamma1_half(mol)
print(abs(dgamma[0,:,0] - dgd[:,0]).max())
print(abs(dgamma[0,:,:,0] - dgd[:,0]).max())
denuc = grad_nuc(mol)[0,2]
enuc1 = mindo3.energy_nuc(mol1)
enuc2 = mindo3.energy_nuc(mol2)
print(abs((enuc1 - enuc2)/0.0002*lib.param.BOHR - denuc).max())
fcore = hcore_generator(mindo3.RMINDO3(mol).nuc_grad_method())
dh = fcore(0)[2]
h1 = mindo3.get_hcore(mindo_mol1)
h2 = mindo3.get_hcore(mindo_mol2)
print(abs((h1 - h2)/0.0002 - dh).max())
nao = mindo_mol1.nao
numpy.random.seed(1)
dm = numpy.random.random((nao,nao))
dm = dm + dm.T
djk = get_jk(mindo3.RMINDO3(mol)._mindo_mol, dm)
dj = numpy.zeros((nao,nao))
dk = numpy.zeros((nao,nao))
dj[0:4 ] = djk[0][2,0:4]
dj[:,0:4] += djk[0][2,0:4].T
dk[0:4 ] = djk[1][2,0:4]
dk[:,0:4] += djk[1][2,0:4].T
jk1 = mindo3.get_jk(mindo_mol1, dm)
jk2 = mindo3.get_jk(mindo_mol2, dm)
print(abs(numpy.einsum('ij,ji->', dm, (jk1[0] - jk2[0])/0.0002 - 2*dj)).max())
print(abs(numpy.einsum('ij,ji->', dm, (jk1[1] - jk2[1])/0.0002 - 2*dk)).max())
| 34.433824
| 83
| 0.595985
|
785af27ba506dcde617ca51b5415e918b4d72b82
| 2,810
|
py
|
Python
|
src/primaires/joueur/commandes/module/__init__.py
|
stormi/tsunami
|
bdc853229834b52b2ee8ed54a3161a1a3133d926
|
[
"BSD-3-Clause"
] | null | null | null |
src/primaires/joueur/commandes/module/__init__.py
|
stormi/tsunami
|
bdc853229834b52b2ee8ed54a3161a1a3133d926
|
[
"BSD-3-Clause"
] | null | null | null |
src/primaires/joueur/commandes/module/__init__.py
|
stormi/tsunami
|
bdc853229834b52b2ee8ed54a3161a1a3133d926
|
[
"BSD-3-Clause"
] | null | null | null |
# -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant la commande 'module' et ses sous-commandes.
Dans ce fichier se trouve la commande même.
"""
from primaires.interpreteur.commande.commande import Commande
from primaires.joueur.commandes.module.liste import PrmListe
from primaires.joueur.commandes.module.hotboot import PrmHotboot
class CmdModule(Commande):
"""Commande 'module'.
"""
def __init__(self):
"""Constructeur de la commande"""
Commande.__init__(self, "module", "module")
self.groupe = "administrateur"
self.nom_categorie = "groupes"
self.aide_courte = "manipulation des modules"
self.aide_longue = \
"Cette commande permet de manipuler les modules, connaître la " \
"liste des modules chargés, en redémarrer certains ou les " \
"reconfigurer pendant l'exécution. Cette commande doit être " \
"réservée aux administrateurs, ceux ayant un accès aux fichiers " \
"de configuration ou au code."
def ajouter_parametres(self):
"""Ajout des paramètres"""
prm_liste = PrmListe()
prm_hotboot = PrmHotboot()
self.ajouter_parametre(prm_liste)
self.ajouter_parametre(prm_hotboot)
| 42.575758
| 79
| 0.727402
|
a3ca279e204af31e5c267fe4b7ca0d73a7cc6f49
| 42,273
|
py
|
Python
|
src/metpy/calc/kinematics.py
|
gerritholl/MetPy
|
3f08b770485835982989f34aedb87791af250301
|
[
"BSD-3-Clause"
] | null | null | null |
src/metpy/calc/kinematics.py
|
gerritholl/MetPy
|
3f08b770485835982989f34aedb87791af250301
|
[
"BSD-3-Clause"
] | null | null | null |
src/metpy/calc/kinematics.py
|
gerritholl/MetPy
|
3f08b770485835982989f34aedb87791af250301
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2009,2017,2018,2019 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Contains calculation of kinematic parameters (e.g. divergence or vorticity)."""
import numpy as np
from . import coriolis_parameter
from .tools import first_derivative, get_layer_heights, gradient
from .. import constants as mpconsts
from ..package_tools import Exporter
from ..units import check_units, units
from ..xarray import add_grid_arguments_from_xarray, preprocess_and_wrap
exporter = Exporter(globals())
@exporter.export
@add_grid_arguments_from_xarray
@preprocess_and_wrap(wrap_like='u')
@check_units('[speed]', '[speed]', dx='[length]', dy='[length]')
def vorticity(u, v, *, dx=None, dy=None, x_dim=-1, y_dim=-2):
r"""Calculate the vertical vorticity of the horizontal wind.
Parameters
----------
u : (..., M, N) `xarray.DataArray` or `pint.Quantity`
x component of the wind
v : (..., M, N) `xarray.DataArray` or `pint.Quantity`
y component of the wind
dx : `pint.Quantity`, optional
The grid spacing(s) in the x-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input. Keyword-only argument.
dy : `pint.Quantity`, optional
The grid spacing(s) in the y-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input. Keyword-only argument.
x_dim : int, optional
Axis number of x dimension. Defaults to -1 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`. Keyword-only argument.
y_dim : int, optional
Axis number of y dimension. Defaults to -2 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`. Keyword-only argument.
Returns
-------
(..., M, N) `xarray.DataArray` or `pint.Quantity`
vertical vorticity
.. versionchanged:: 1.0
Changed signature from ``(u, v, dx, dy)``
See Also
--------
divergence
"""
dudy = first_derivative(u, delta=dy, axis=y_dim)
dvdx = first_derivative(v, delta=dx, axis=x_dim)
return dvdx - dudy
@exporter.export
@add_grid_arguments_from_xarray
@preprocess_and_wrap(wrap_like='u')
@check_units(dx='[length]', dy='[length]')
def divergence(u, v, *, dx=None, dy=None, x_dim=-1, y_dim=-2):
r"""Calculate the horizontal divergence of a vector.
Parameters
----------
u : (..., M, N) `xarray.DataArray` or `pint.Quantity`
x component of the vector
v : (..., M, N) `xarray.DataArray` or `pint.Quantity`
y component of the vector
dx : `pint.Quantity`, optional
The grid spacing(s) in the x-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input. Keyword-only argument.
dy : `pint.Quantity`, optional
The grid spacing(s) in the y-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input. Keyword-only argument.
x_dim : int, optional
Axis number of x dimension. Defaults to -1 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`. Keyword-only argument.
y_dim : int, optional
Axis number of y dimension. Defaults to -2 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`. Keyword-only argument.
Returns
-------
(..., M, N) `xarray.DataArray` or `pint.Quantity`
The horizontal divergence
.. versionchanged:: 1.0
Changed signature from ``(u, v, dx, dy)``
See Also
--------
vorticity
"""
dudx = first_derivative(u, delta=dx, axis=x_dim)
dvdy = first_derivative(v, delta=dy, axis=y_dim)
return dudx + dvdy
@exporter.export
@add_grid_arguments_from_xarray
@preprocess_and_wrap(wrap_like='u')
@check_units('[speed]', '[speed]', '[length]', '[length]')
def shearing_deformation(u, v, dx=None, dy=None, x_dim=-1, y_dim=-2):
r"""Calculate the shearing deformation of the horizontal wind.
Parameters
----------
u : (..., M, N) `xarray.DataArray` or `pint.Quantity`
x component of the wind
v : (..., M, N) `xarray.DataArray` or `pint.Quantity`
y component of the wind
dx : `pint.Quantity`, optional
The grid spacing(s) in the x-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
dy : `pint.Quantity`, optional
The grid spacing(s) in the y-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
x_dim : int, optional
Axis number of x dimension. Defaults to -1 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
y_dim : int, optional
Axis number of y dimension. Defaults to -2 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
Returns
-------
(..., M, N) `xarray.DataArray` or `pint.Quantity`
Shearing Deformation
.. versionchanged:: 1.0
Changed signature from ``(u, v, dx, dy)``
See Also
--------
stretching_deformation, total_deformation
"""
dudy = first_derivative(u, delta=dy, axis=y_dim)
dvdx = first_derivative(v, delta=dx, axis=x_dim)
return dvdx + dudy
@exporter.export
@add_grid_arguments_from_xarray
@preprocess_and_wrap(wrap_like='u')
@check_units('[speed]', '[speed]', '[length]', '[length]')
def stretching_deformation(u, v, dx=None, dy=None, x_dim=-1, y_dim=-2):
r"""Calculate the stretching deformation of the horizontal wind.
Parameters
----------
u : (..., M, N) `xarray.DataArray` or `pint.Quantity`
x component of the wind
v : (..., M, N) `xarray.DataArray` or `pint.Quantity`
y component of the wind
dx : `pint.Quantity`, optional
The grid spacing(s) in the x-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
dy : `pint.Quantity`, optional
The grid spacing(s) in the y-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
x_dim : int, optional
Axis number of x dimension. Defaults to -1 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
y_dim : int, optional
Axis number of y dimension. Defaults to -2 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
Returns
-------
(..., M, N) `xarray.DataArray` or `pint.Quantity`
Stretching Deformation
.. versionchanged:: 1.0
Changed signature from ``(u, v, dx, dy)``
See Also
--------
shearing_deformation, total_deformation
"""
dudx = first_derivative(u, delta=dx, axis=x_dim)
dvdy = first_derivative(v, delta=dy, axis=y_dim)
return dudx - dvdy
@exporter.export
@add_grid_arguments_from_xarray
@preprocess_and_wrap(wrap_like='u')
@check_units('[speed]', '[speed]', '[length]', '[length]')
def total_deformation(u, v, dx=None, dy=None, x_dim=-1, y_dim=-2):
r"""Calculate the horizontal total deformation of the horizontal wind.
Parameters
----------
u : (..., M, N) `xarray.DataArray` or `pint.Quantity`
x component of the wind
v : (..., M, N) `xarray.DataArray` or `pint.Quantity`
y component of the wind
dx : `pint.Quantity`, optional
The grid spacing(s) in the x-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
dy : `pint.Quantity`, optional
The grid spacing(s) in the y-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
x_dim : int, optional
Axis number of x dimension. Defaults to -1 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
y_dim : int, optional
Axis number of y dimension. Defaults to -2 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
Returns
-------
(..., M, N) `xarray.DataArray` or `pint.Quantity`
Total Deformation
Notes
-----
If inputs have more than two dimensions, they are assumed to have either leading dimensions
of (x, y) or trailing dimensions of (y, x), depending on the value of ``dim_order``.
.. versionchanged:: 1.0
Changed signature from ``(u, v, dx, dy)``
See Also
--------
shearing_deformation, stretching_deformation
"""
dudy, dudx = gradient(u, deltas=(dy, dx), axes=(y_dim, x_dim))
dvdy, dvdx = gradient(v, deltas=(dy, dx), axes=(y_dim, x_dim))
return np.sqrt((dvdx + dudy)**2 + (dudx - dvdy)**2)
@exporter.export
@add_grid_arguments_from_xarray
@preprocess_and_wrap(wrap_like='scalar', broadcast=('scalar', 'u', 'v', 'w'))
def advection(
scalar,
u=None,
v=None,
w=None,
*,
dx=None,
dy=None,
dz=None,
x_dim=-1,
y_dim=-2,
vertical_dim=-3
):
r"""Calculate the advection of a scalar field by the wind.
Parameters
----------
scalar : `pint.Quantity` or `xarray.DataArray`
Array (with N-dimensions) with the quantity to be advected. Use `xarray.DataArray` to
have dimension ordering automatically determined, otherwise, use default
[..., Z, Y, X] ordering or specify \*_dim keyword arguments.
u, v, w : `pint.Quantity` or `xarray.DataArray` or None
N-dimensional arrays with units of velocity representing the flow, with a component of
the wind in each dimension. For 1D advection, use 1 positional argument (with `dx` for
grid spacing and `x_dim` to specify axis if not the default of -1) or use 1 applicable
keyword argument (u, v, or w) for proper physical dimension (with corresponding `d\*`
for grid spacing and `\*_dim` to specify axis). For 2D/horizontal advection, use 2
positional arguments in order for u and v winds respectively (with `dx` and `dy` for
grid spacings and `x_dim` and `y_dim` keyword arguments to specify axes), or specify u
and v as keyword arguments (grid spacings and axes likewise). For 3D advection,
likewise use 3 positional arguments in order for u, v, and w winds respectively or
specify u, v, and w as keyword arguments (either way, with `dx`, `dy`, `dz` for grid
spacings and `x_dim`, `y_dim`, and `vertical_dim` for axes).
dx, dy, dz: `pint.Quantity` or None, optional
Grid spacing in applicable dimension(s). If using arrays, each array should have one
item less than the size of `scalar` along the applicable axis. If `scalar` is an
`xarray.DataArray`, these are automatically determined from its coordinates, and are
therefore optional. Required if `scalar` is a `pint.Quantity`. These are keyword-only
arguments.
x_dim, y_dim, vertical_dim: int or None, optional
Axis number in applicable dimension(s). Defaults to -1, -2, and -3 respectively for
(..., Z, Y, X) dimension ordering. If `scalar` is an `xarray.DataArray`, these are
automatically determined from its coordinates. These are keyword-only arguments.
Returns
-------
`pint.Quantity` or `xarray.DataArray`
An N-dimensional array containing the advection at all grid points.
.. versionchanged:: 1.0
Changed signature from ``(scalar, wind, deltas)``
"""
return -sum(
wind * first_derivative(scalar, axis=axis, delta=delta)
for wind, delta, axis in (
(u, dx, x_dim),
(v, dy, y_dim),
(w, dz, vertical_dim)
)
if wind is not None
)
@exporter.export
@add_grid_arguments_from_xarray
@preprocess_and_wrap(
wrap_like='potential_temperature',
broadcast=('potential_temperature', 'u', 'v')
)
@check_units('[temperature]', '[speed]', '[speed]', '[length]', '[length]')
def frontogenesis(potential_temperature, u, v, dx=None, dy=None, x_dim=-1, y_dim=-2):
r"""Calculate the 2D kinematic frontogenesis of a temperature field.
The implementation is a form of the Petterssen Frontogenesis and uses the formula
outlined in [Bluestein1993]_ pg.248-253
.. math:: F=\frac{1}{2}\left|\nabla \theta\right|[D cos(2\beta)-\delta]
* :math:`F` is 2D kinematic frontogenesis
* :math:`\theta` is potential temperature
* :math:`D` is the total deformation
* :math:`\beta` is the angle between the axis of dilitation and the isentropes
* :math:`\delta` is the divergence
Parameters
----------
potential_temperature : (..., M, N) `xarray.DataArray` or `pint.Quantity`
Potential temperature
u : (..., M, N) `xarray.DataArray` or `pint.Quantity`
x component of the wind
v : (..., M, N) `xarray.DataArray` or `pint.Quantity`
y component of the wind
dx : `pint.Quantity`, optional
The grid spacing(s) in the x-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
dy : `pint.Quantity`, optional
The grid spacing(s) in the y-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
x_dim : int, optional
Axis number of x dimension. Defaults to -1 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
y_dim : int, optional
Axis number of y dimension. Defaults to -2 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
Returns
-------
(..., M, N) `xarray.DataArray` or `pint.Quantity`
2D Frontogenesis in [temperature units]/m/s
Notes
-----
Conversion factor to go from [temperature units]/m/s to [temperature units/100km/3h]
:math:`1.08e4*1.e5`
.. versionchanged:: 1.0
Changed signature from ``(thta, u, v, dx, dy, dim_order='yx')``
"""
# Get gradients of potential temperature in both x and y
ddy_theta = first_derivative(potential_temperature, delta=dy, axis=y_dim)
ddx_theta = first_derivative(potential_temperature, delta=dx, axis=x_dim)
# Compute the magnitude of the potential temperature gradient
mag_theta = np.sqrt(ddx_theta**2 + ddy_theta**2)
# Get the shearing, stretching, and total deformation of the wind field
shrd = shearing_deformation(u, v, dx, dy, x_dim=x_dim, y_dim=y_dim)
strd = stretching_deformation(u, v, dx, dy, x_dim=x_dim, y_dim=y_dim)
tdef = total_deformation(u, v, dx, dy, x_dim=x_dim, y_dim=y_dim)
# Get the divergence of the wind field
div = divergence(u, v, dx=dx, dy=dy, x_dim=x_dim, y_dim=y_dim)
# Compute the angle (beta) between the wind field and the gradient of potential temperature
psi = 0.5 * np.arctan2(shrd, strd)
beta = np.arcsin((-ddx_theta * np.cos(psi) - ddy_theta * np.sin(psi)) / mag_theta)
return 0.5 * mag_theta * (tdef * np.cos(2 * beta) - div)
@exporter.export
@add_grid_arguments_from_xarray
@preprocess_and_wrap(wrap_like=('height', 'height'), broadcast=('height', 'latitude'))
@check_units(dx='[length]', dy='[length]', latitude='[dimensionless]')
def geostrophic_wind(height, dx=None, dy=None, latitude=None, x_dim=-1, y_dim=-2):
r"""Calculate the geostrophic wind given from the height or geopotential.
Parameters
----------
height : (..., M, N) `xarray.DataArray` or `pint.Quantity`
The height or geopotential field.
dx : `pint.Quantity`, optional
The grid spacing(s) in the x-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
dy : `pint.Quantity`, optional
The grid spacing(s) in the y-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
latitude : `xarray.DataArray` or `pint.Quantity`
The latitude, which is used to calculate the Coriolis parameter. Its dimensions must
be broadcastable with those of height. Optional if `xarray.DataArray` with latitude
coordinate used as input. Note that an argument without units is treated as
dimensionless, which is equivalent to radians.
x_dim : int, optional
Axis number of x dimension. Defaults to -1 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
y_dim : int, optional
Axis number of y dimension. Defaults to -2 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
Returns
-------
A 2-item tuple of arrays
A tuple of the u-component and v-component of the geostrophic wind.
.. versionchanged:: 1.0
Changed signature from ``(heights, f, dx, dy)``
"""
f = coriolis_parameter(latitude)
if height.dimensionality['[length]'] == 2.0:
norm_factor = 1. / f
else:
norm_factor = mpconsts.g / f
dhdy = first_derivative(height, delta=dy, axis=y_dim)
dhdx = first_derivative(height, delta=dx, axis=x_dim)
return -norm_factor * dhdy, norm_factor * dhdx
@exporter.export
@add_grid_arguments_from_xarray
@preprocess_and_wrap(
wrap_like=('height', 'height'),
broadcast=('height', 'u', 'v', 'latitude')
)
@check_units(
u='[speed]',
v='[speed]',
dx='[length]',
dy='[length]',
latitude='[dimensionless]'
)
def ageostrophic_wind(height, u, v, dx=None, dy=None, latitude=None, x_dim=-1, y_dim=-2):
r"""Calculate the ageostrophic wind given from the height or geopotential.
Parameters
----------
height : (M, N) ndarray
The height or geopotential field.
u : (..., M, N) `xarray.DataArray` or `pint.Quantity`
The u wind field.
v : (..., M, N) `xarray.DataArray` or `pint.Quantity`
The u wind field.
dx : `pint.Quantity`, optional
The grid spacing(s) in the x-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
dy : `pint.Quantity`, optional
The grid spacing(s) in the y-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
latitude : `xarray.DataArray` or `pint.Quantity`
The latitude, which is used to calculate the Coriolis parameter. Its dimensions must
be broadcastable with those of height. Optional if `xarray.DataArray` with latitude
coordinate used as input. Note that an argument without units is treated as
dimensionless, which is equivalent to radians.
x_dim : int, optional
Axis number of x dimension. Defaults to -1 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
y_dim : int, optional
Axis number of y dimension. Defaults to -2 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
Returns
-------
A 2-item tuple of arrays
A tuple of the u-component and v-component of the ageostrophic wind
.. versionchanged:: 1.0
Changed signature from ``(heights, f, dx, dy, u, v, dim_order='yx')``
"""
u_geostrophic, v_geostrophic = geostrophic_wind(
height,
dx,
dy,
latitude,
x_dim=x_dim,
y_dim=y_dim
)
return u - u_geostrophic, v - v_geostrophic
@exporter.export
def montgomery_streamfunction(height, temperature):
r"""Compute the Montgomery Streamfunction on isentropic surfaces.
The Montgomery Streamfunction is the streamfunction of the geostrophic wind on an
isentropic surface. This quantity is proportional to the geostrophic wind in isentropic
coordinates, and its gradient can be interpreted similarly to the pressure gradient in
isobaric coordinates.
Parameters
----------
height : `pint.Quantity` or `xarray.DataArray`
Array of geopotential height of isentropic surfaces
temperature : `pint.Quantity` or `xarray.DataArray`
Array of temperature on isentropic surfaces
Returns
-------
stream_func : `pint.Quantity` or `xarray.DataArray`
Notes
-----
The formula used is that from [Lackmann2011]_ p. 69.
.. math:: \Psi = gZ + C_pT
* :math:`\Psi` is Montgomery Streamfunction
* :math:`g` is avg. gravitational acceleration on Earth
* :math:`Z` is geopotential height of the isentropic surface
* :math:`C_p` is specific heat at constant pressure for dry air
* :math:`T` is temperature of the isentropic surface
See Also
--------
get_isentropic_pressure, dry_static_energy
"""
from . import dry_static_energy
return dry_static_energy(height, temperature)
@exporter.export
@preprocess_and_wrap()
@check_units('[length]', '[speed]', '[speed]', '[length]',
bottom='[length]', storm_u='[speed]', storm_v='[speed]')
def storm_relative_helicity(height, u, v, depth, *, bottom=None, storm_u=None, storm_v=None):
# Partially adapted from similar SharpPy code
r"""Calculate storm relative helicity.
Calculates storm relatively helicity following [Markowski2010]_ pg.230-231
.. math:: \int\limits_0^d (\bar v - c) \cdot \bar\omega_{h} \,dz
This is applied to the data from a hodograph with the following summation:
.. math:: \sum_{n = 1}^{N-1} [(u_{n+1} - c_{x})(v_{n} - c_{y}) -
(u_{n} - c_{x})(v_{n+1} - c_{y})]
Parameters
----------
u : array-like
U component winds
v : array-like
V component winds
height : array-like
Atmospheric height, will be converted to AGL
depth : number
Depth of the layer
bottom : number
Height of layer bottom AGL (default is surface)
storm_u : number
U component of storm motion (default is 0 m/s)
storm_v : number
V component of storm motion (default is 0 m/s)
Returns
-------
`pint.Quantity`
Positive storm-relative helicity
`pint.Quantity`
Negative storm-relative helicity
`pint.Quantity`
Total storm-relative helicity
Notes
-----
Only functions on 1D profiles (not higher-dimension vertical cross sections or grids).
Since this function returns scalar values when given a profile, this will return Pint
Quantities even when given xarray DataArray profiles.
.. versionchanged:: 1.0
Renamed ``heights`` parameter to ``height`` and converted ``bottom``, ``storm_u``, and
``storm_v`` parameters to keyword-only arguments
"""
if bottom is None:
bottom = units.Quantity(0, 'm')
if storm_u is None:
storm_u = units.Quantity(0, 'm/s')
if storm_v is None:
storm_v = units.Quantity(0, 'm/s')
_, u, v = get_layer_heights(height, depth, u, v, with_agl=True, bottom=bottom)
storm_relative_u = u - storm_u
storm_relative_v = v - storm_v
int_layers = (storm_relative_u[1:] * storm_relative_v[:-1]
- storm_relative_u[:-1] * storm_relative_v[1:])
# Need to manually check for masked value because sum() on masked array with non-default
# mask will return a masked value rather than 0. See numpy/numpy#11736
positive_srh = int_layers[int_layers.magnitude > 0.].sum()
if np.ma.is_masked(positive_srh):
positive_srh = units.Quantity(0.0, 'meter**2 / second**2')
negative_srh = int_layers[int_layers.magnitude < 0.].sum()
if np.ma.is_masked(negative_srh):
negative_srh = units.Quantity(0.0, 'meter**2 / second**2')
return (positive_srh.to('meter ** 2 / second ** 2'),
negative_srh.to('meter ** 2 / second ** 2'),
(positive_srh + negative_srh).to('meter ** 2 / second ** 2'))
@exporter.export
@add_grid_arguments_from_xarray
@preprocess_and_wrap(wrap_like='u', broadcast=('u', 'v', 'latitude'))
@check_units('[speed]', '[speed]', '[length]', '[length]')
def absolute_vorticity(u, v, dx=None, dy=None, latitude=None, x_dim=-1, y_dim=-2):
"""Calculate the absolute vorticity of the horizontal wind.
Parameters
----------
u : (..., M, N) `xarray.DataArray` or `pint.Quantity`
x component of the wind
v : (..., M, N) `xarray.DataArray` or `pint.Quantity`
y component of the wind
dx : `pint.Quantity`, optional
The grid spacing(s) in the x-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
dy : `pint.Quantity`, optional
The grid spacing(s) in the y-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
latitude : `pint.Quantity`, optional
Latitude of the wind data. Optional if `xarray.DataArray` with latitude/longitude
coordinates used as input. Note that an argument without units is treated as
dimensionless, which translates to radians.
x_dim : int, optional
Axis number of x dimension. Defaults to -1 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
y_dim : int, optional
Axis number of y dimension. Defaults to -2 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
Returns
-------
(..., M, N) `xarray.DataArray` or `pint.Quantity`
absolute vorticity
.. versionchanged:: 1.0
Changed signature from ``(u, v, dx, dy, lats, dim_order='yx')``
"""
f = coriolis_parameter(latitude)
relative_vorticity = vorticity(u, v, dx=dx, dy=dy, x_dim=x_dim, y_dim=y_dim)
return relative_vorticity + f
@exporter.export
@add_grid_arguments_from_xarray
@preprocess_and_wrap(
wrap_like='potential_temperature',
broadcast=('potential_temperature', 'pressure', 'u', 'v', 'latitude')
)
@check_units('[temperature]', '[pressure]', '[speed]', '[speed]',
'[length]', '[length]', '[dimensionless]')
def potential_vorticity_baroclinic(
potential_temperature,
pressure,
u,
v,
dx=None,
dy=None,
latitude=None,
x_dim=-1,
y_dim=-2,
vertical_dim=-3
):
r"""Calculate the baroclinic potential vorticity.
.. math:: PV = -g \left(\frac{\partial u}{\partial p}\frac{\partial \theta}{\partial y}
- \frac{\partial v}{\partial p}\frac{\partial \theta}{\partial x}
+ \frac{\partial \theta}{\partial p}(\zeta + f) \right)
This formula is based on equation 4.5.93 [Bluestein1993]_
Parameters
----------
potential_temperature : (..., P, M, N) `xarray.DataArray` or `pint.Quantity`
potential temperature
pressure : (..., P, M, N) `xarray.DataArray` or `pint.Quantity`
vertical pressures
u : (..., P, M, N) `xarray.DataArray` or `pint.Quantity`
x component of the wind
v : (..., P, M, N) `xarray.DataArray` or `pint.Quantity`
y component of the wind
dx : `pint.Quantity`, optional
The grid spacing(s) in the x-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
dy : `pint.Quantity`, optional
The grid spacing(s) in the y-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
latitude : `pint.Quantity`, optional
Latitude of the wind data. Optional if `xarray.DataArray` with latitude/longitude
coordinates used as input. Note that an argument without units is treated as
dimensionless, which translates to radians.
x_dim : int, optional
Axis number of x dimension. Defaults to -1 (implying [..., Z, Y, X] order).
Automatically parsed from input if using `xarray.DataArray`.
y_dim : int, optional
Axis number of y dimension. Defaults to -2 (implying [..., Z, Y, X] order).
Automatically parsed from input if using `xarray.DataArray`.
vertical_dim : int, optional
Axis number of vertical dimension. Defaults to -3 (implying [..., Z, Y, X] order).
Automatically parsed from input if using `xarray.DataArray`.
Returns
-------
(..., P, M, N) `xarray.DataArray` or `pint.Quantity`
baroclinic potential vorticity
Notes
-----
The same function can be used for isobaric and isentropic PV analysis. Provide winds
for vorticity calculations on the desired isobaric or isentropic surface. At least three
layers of pressure/potential temperature are required in order to calculate the vertical
derivative (one above and below the desired surface). The first two terms will be zero if
isentropic level data is used due to the gradient of theta in both the x and y-directions
will be zero since you are on an isentropic surface.
This function expects pressure/isentropic level to increase with increasing array element
(e.g., from higher in the atmosphere to closer to the surface. If the pressure array is
one-dimensional, and not given as `xarray.DataArray`, p[:, None, None] can be used to make
it appear multi-dimensional.)
.. versionchanged:: 1.0
Changed signature from ``(potential_temperature, pressure, u, v, dx, dy, lats)``
"""
if (
np.shape(potential_temperature)[vertical_dim] < 3
or np.shape(pressure)[vertical_dim] < 3
or np.shape(potential_temperature)[vertical_dim] != np.shape(pressure)[vertical_dim]
):
raise ValueError('Length of potential temperature along the vertical axis '
'{} must be at least 3.'.format(vertical_dim))
avor = absolute_vorticity(u, v, dx, dy, latitude, x_dim=x_dim, y_dim=y_dim)
dthetadp = first_derivative(potential_temperature, x=pressure, axis=vertical_dim)
if (
(np.shape(potential_temperature)[y_dim] == 1)
and (np.shape(potential_temperature)[x_dim] == 1)
):
dthetady = units.Quantity(0, 'K/m') # axis=y_dim only has one dimension
dthetadx = units.Quantity(0, 'K/m') # axis=x_dim only has one dimension
else:
dthetady = first_derivative(potential_temperature, delta=dy, axis=y_dim)
dthetadx = first_derivative(potential_temperature, delta=dx, axis=x_dim)
dudp = first_derivative(u, x=pressure, axis=vertical_dim)
dvdp = first_derivative(v, x=pressure, axis=vertical_dim)
return (-mpconsts.g * (dudp * dthetady - dvdp * dthetadx
+ avor * dthetadp)).to('K * m**2 / (s * kg)')
@exporter.export
@add_grid_arguments_from_xarray
@preprocess_and_wrap(wrap_like='height', broadcast=('height', 'u', 'v', 'latitude'))
@check_units('[length]', '[speed]', '[speed]', '[length]', '[length]', '[dimensionless]')
def potential_vorticity_barotropic(
height,
u,
v,
dx=None,
dy=None,
latitude=None,
x_dim=-1,
y_dim=-2
):
r"""Calculate the barotropic (Rossby) potential vorticity.
.. math:: PV = \frac{f + \zeta}{H}
This formula is based on equation 7.27 [Hobbs2006]_.
Parameters
----------
height : (..., M, N) `xarray.DataArray` or `pint.Quantity`
atmospheric height
u : (..., M, N) `xarray.DataArray` or `pint.Quantity`
x component of the wind
v : (..., M, N) `xarray.DataArray` or `pint.Quantity`
y component of the wind
dx : `pint.Quantity`, optional
The grid spacing(s) in the x-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
dy : `pint.Quantity`, optional
The grid spacing(s) in the y-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
latitude : `pint.Quantity`, optional
Latitude of the wind data. Optional if `xarray.DataArray` with latitude/longitude
coordinates used as input. Note that an argument without units is treated as
dimensionless, which translates to radians.
x_dim : int, optional
Axis number of x dimension. Defaults to -1 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
y_dim : int, optional
Axis number of y dimension. Defaults to -2 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
Returns
-------
(..., M, N) `xarray.DataArray` or `pint.Quantity`
barotropic potential vorticity
.. versionchanged:: 1.0
Changed signature from ``(heights, u, v, dx, dy, lats, dim_order='yx')``
"""
avor = absolute_vorticity(u, v, dx, dy, latitude, x_dim=x_dim, y_dim=y_dim)
return (avor / height).to('meter**-1 * second**-1')
@exporter.export
@add_grid_arguments_from_xarray
@preprocess_and_wrap(
wrap_like=('u', 'u'),
broadcast=('u', 'v', 'u_geostrophic', 'v_geostrophic', 'latitude')
)
@check_units('[speed]', '[speed]', '[speed]', '[speed]', '[length]', '[length]',
'[dimensionless]')
def inertial_advective_wind(
u,
v,
u_geostrophic,
v_geostrophic,
dx=None,
dy=None,
latitude=None,
x_dim=-1,
y_dim=-2
):
r"""Calculate the inertial advective wind.
.. math:: \frac{\hat k}{f} \times (\vec V \cdot \nabla)\hat V_g
.. math:: \frac{\hat k}{f} \times \left[ \left( u \frac{\partial u_g}{\partial x} + v
\frac{\partial u_g}{\partial y} \right) \hat i + \left( u \frac{\partial v_g}
{\partial x} + v \frac{\partial v_g}{\partial y} \right) \hat j \right]
.. math:: \left[ -\frac{1}{f}\left(u \frac{\partial v_g}{\partial x} + v
\frac{\partial v_g}{\partial y} \right) \right] \hat i + \left[ \frac{1}{f}
\left( u \frac{\partial u_g}{\partial x} + v \frac{\partial u_g}{\partial y}
\right) \right] \hat j
This formula is based on equation 27 of [Rochette2006]_.
Parameters
----------
u : (..., M, N) `xarray.DataArray` or `pint.Quantity`
x component of the advecting wind
v : (..., M, N) `xarray.DataArray` or `pint.Quantity`
y component of the advecting wind
u_geostrophic : (..., M, N) `xarray.DataArray` or `pint.Quantity`
x component of the geostrophic (advected) wind
v_geostrophic : (..., M, N) `xarray.DataArray` or `pint.Quantity`
y component of the geostrophic (advected) wind
dx : `pint.Quantity`, optional
The grid spacing(s) in the x-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
dy : `pint.Quantity`, optional
The grid spacing(s) in the y-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
latitude : `pint.Quantity`, optional
Latitude of the wind data. Optional if `xarray.DataArray` with latitude/longitude
coordinates used as input. Note that an argument without units is treated as
dimensionless, which translates to radians.
x_dim : int, optional
Axis number of x dimension. Defaults to -1 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
y_dim : int, optional
Axis number of y dimension. Defaults to -2 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
Returns
-------
(..., M, N) `xarray.DataArray` or `pint.Quantity`
x component of inertial advective wind
(..., M, N) `xarray.DataArray` or `pint.Quantity`
y component of inertial advective wind
Notes
-----
Many forms of the inertial advective wind assume the advecting and advected
wind to both be the geostrophic wind. To do so, pass the x and y components
of the geostrophic wind for u and u_geostrophic/v and v_geostrophic.
.. versionchanged:: 1.0
Changed signature from ``(u, v, u_geostrophic, v_geostrophic, dx, dy, lats)``
"""
f = coriolis_parameter(latitude)
dugdy, dugdx = gradient(u_geostrophic, deltas=(dy, dx), axes=(y_dim, x_dim))
dvgdy, dvgdx = gradient(v_geostrophic, deltas=(dy, dx), axes=(y_dim, x_dim))
u_component = -(u * dvgdx + v * dvgdy) / f
v_component = (u * dugdx + v * dugdy) / f
return u_component, v_component
@exporter.export
@add_grid_arguments_from_xarray
@preprocess_and_wrap(
wrap_like=('u', 'u'),
broadcast=('u', 'v', 'temperature', 'pressure', 'static_stability')
)
@check_units('[speed]', '[speed]', '[temperature]', '[pressure]', '[length]', '[length]')
def q_vector(
u,
v,
temperature,
pressure,
dx=None,
dy=None,
static_stability=1,
x_dim=-1,
y_dim=-2
):
r"""Calculate Q-vector at a given pressure level using the u, v winds and temperature.
.. math:: \vec{Q} = (Q_1, Q_2)
= - \frac{R}{\sigma p}\left(
\frac{\partial \vec{v}_g}{\partial x} \cdot \nabla_p T,
\frac{\partial \vec{v}_g}{\partial y} \cdot \nabla_p T
\right)
This formula follows equation 5.7.55 from [Bluestein1992]_, and can be used with the
the below form of the quasigeostrophic omega equation to assess vertical motion
([Bluestein1992]_ equation 5.7.54):
.. math:: \left( \nabla_p^2 + \frac{f_0^2}{\sigma} \frac{\partial^2}{\partial p^2}
\right) \omega =
- 2 \nabla_p \cdot \vec{Q} -
\frac{R}{\sigma p} \beta \frac{\partial T}{\partial x}
Parameters
----------
u : (..., M, N) `xarray.DataArray` or `pint.Quantity`
x component of the wind (geostrophic in QG-theory)
v : (..., M, N) `xarray.DataArray` or `pint.Quantity`
y component of the wind (geostrophic in QG-theory)
temperature : (..., M, N) `xarray.DataArray` or `pint.Quantity`
Array of temperature at pressure level
pressure : `pint.Quantity`
Pressure at level
dx : `pint.Quantity`, optional
The grid spacing(s) in the x-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
dy : `pint.Quantity`, optional
The grid spacing(s) in the y-direction. If an array, there should be one item less than
the size of `u` along the applicable axis. Optional if `xarray.DataArray` with
latitude/longitude coordinates used as input.
static_stability : `pint.Quantity`, optional
The static stability at the pressure level. Defaults to 1 if not given to calculate
the Q-vector without factoring in static stability.
x_dim : int, optional
Axis number of x dimension. Defaults to -1 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
y_dim : int, optional
Axis number of y dimension. Defaults to -2 (implying [..., Y, X] order). Automatically
parsed from input if using `xarray.DataArray`.
Returns
-------
tuple of (..., M, N) `xarray.DataArray` or `pint.Quantity`
The components of the Q-vector in the u- and v-directions respectively
.. versionchanged:: 1.0
Changed signature from ``(u, v, temperature, pressure, dx, dy, static_stability=1)``
See Also
--------
static_stability
"""
dudy, dudx = gradient(u, deltas=(dy, dx), axes=(y_dim, x_dim))
dvdy, dvdx = gradient(v, deltas=(dy, dx), axes=(y_dim, x_dim))
dtempdy, dtempdx = gradient(temperature, deltas=(dy, dx), axes=(y_dim, x_dim))
q1 = -mpconsts.Rd / (pressure * static_stability) * (dudx * dtempdx + dvdx * dtempdy)
q2 = -mpconsts.Rd / (pressure * static_stability) * (dudy * dtempdx + dvdy * dtempdy)
return q1.to_base_units(), q2.to_base_units()
| 40.375358
| 95
| 0.651811
|
598b5ecaf403fea94c4c8ff07343df7f3a784ba9
| 451
|
py
|
Python
|
flask_app/clock/views.py
|
julien-bonnefoy/website
|
a00d70697cc3a367dcdb32ca62ed29493029cf91
|
[
"Apache-2.0"
] | null | null | null |
flask_app/clock/views.py
|
julien-bonnefoy/website
|
a00d70697cc3a367dcdb32ca62ed29493029cf91
|
[
"Apache-2.0"
] | null | null | null |
flask_app/clock/views.py
|
julien-bonnefoy/website
|
a00d70697cc3a367dcdb32ca62ed29493029cf91
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
""" CLOCK VIEWS
BLUEPRINT: clock_bp
ROUTES FUNCTIONS: clock
OTHER FUNCTIONS:
"""
from flask import Blueprint, render_template
# Blueprint Configuration
clock_bp = Blueprint(
'clock_bp',
__name__,
template_folder='../templates/',
static_folder='../static/'
)
@clock_bp.route('/clock', methods=['GET'])
def clock():
return render_template(
'clock/clock.html',
title='Clock Clock 24',
)
| 18.791667
| 44
| 0.656319
|
7f5641a63849cfec25fa2f560d50e92dc78576c3
| 8,529
|
py
|
Python
|
pydiffvg/save_svg.py
|
13952522076/diffvg
|
2c5af9ecf470b1c7071e821583e5ba09cb2c4622
|
[
"Apache-2.0"
] | null | null | null |
pydiffvg/save_svg.py
|
13952522076/diffvg
|
2c5af9ecf470b1c7071e821583e5ba09cb2c4622
|
[
"Apache-2.0"
] | null | null | null |
pydiffvg/save_svg.py
|
13952522076/diffvg
|
2c5af9ecf470b1c7071e821583e5ba09cb2c4622
|
[
"Apache-2.0"
] | null | null | null |
import torch
import pydiffvg
import xml.etree.ElementTree as etree
from xml.dom import minidom
def prettify(elem):
"""Return a pretty-printed XML string for the Element.
"""
rough_string = etree.tostring(elem, 'utf-8')
reparsed = minidom.parseString(rough_string)
return reparsed.toprettyxml(indent=" ")
def save_svg(filename, width, height, shapes, shape_groups, use_gamma = False, background=None):
root = etree.Element('svg')
root.set('version', '1.1')
root.set('xmlns', 'http://www.w3.org/2000/svg')
root.set('width', str(width))
root.set('height', str(height))
if background is not None:
print(f"setting background to {background}")
root.set('style', str(background))
defs = etree.SubElement(root, 'defs')
g = etree.SubElement(root, 'g')
if use_gamma:
f = etree.SubElement(defs, 'filter')
f.set('id', 'gamma')
f.set('x', '0')
f.set('y', '0')
f.set('width', '100%')
f.set('height', '100%')
gamma = etree.SubElement(f, 'feComponentTransfer')
gamma.set('color-interpolation-filters', 'sRGB')
feFuncR = etree.SubElement(gamma, 'feFuncR')
feFuncR.set('type', 'gamma')
feFuncR.set('amplitude', str(1))
feFuncR.set('exponent', str(1/2.2))
feFuncG = etree.SubElement(gamma, 'feFuncG')
feFuncG.set('type', 'gamma')
feFuncG.set('amplitude', str(1))
feFuncG.set('exponent', str(1/2.2))
feFuncB = etree.SubElement(gamma, 'feFuncB')
feFuncB.set('type', 'gamma')
feFuncB.set('amplitude', str(1))
feFuncB.set('exponent', str(1/2.2))
feFuncA = etree.SubElement(gamma, 'feFuncA')
feFuncA.set('type', 'gamma')
feFuncA.set('amplitude', str(1))
feFuncA.set('exponent', str(1/2.2))
g.set('style', 'filter:url(#gamma)')
# Store color
for i, shape_group in enumerate(shape_groups):
def add_color(shape_color, name):
if isinstance(shape_color, pydiffvg.LinearGradient):
lg = shape_color
color = etree.SubElement(defs, 'linearGradient')
color.set('id', name)
color.set('x1', str(lg.begin[0].item()/width))
color.set('y1', str(lg.begin[1].item()/height))
color.set('x2', str(lg.end[0].item()/width))
color.set('y2', str(lg.end[1].item()/height))
offsets = lg.offsets.data.cpu().numpy()
stop_colors = lg.stop_colors.data.cpu().numpy()
for j in range(offsets.shape[0]):
stop = etree.SubElement(color, 'stop')
stop.set('offset', str(offsets[j]))
c = lg.stop_colors[j, :]
stop.set('stop-color', 'rgb({}, {}, {})'.format(\
int(255 * c[0]), int(255 * c[1]), int(255 * c[2])))
stop.set('stop-opacity', '{}'.format(c[3]))
if isinstance(shape_color, pydiffvg.RadialGradient):
lg = shape_color
color = etree.SubElement(defs, 'radialGradient')
color.set('id', name)
color.set('cx', str(lg.center[0].item()/width))
color.set('cy', str(lg.center[1].item()/height))
# this only support width=height
color.set('r', str(lg.radius[0].item()/width))
offsets = lg.offsets.data.cpu().numpy()
stop_colors = lg.stop_colors.data.cpu().numpy()
for j in range(offsets.shape[0]):
stop = etree.SubElement(color, 'stop')
stop.set('offset', str(offsets[j]))
c = lg.stop_colors[j, :]
stop.set('stop-color', 'rgb({}, {}, {})'.format(\
int(255 * c[0]), int(255 * c[1]), int(255 * c[2])))
stop.set('stop-opacity', '{}'.format(c[3]))
if shape_group.fill_color is not None:
add_color(shape_group.fill_color, 'shape_{}_fill'.format(i))
if shape_group.stroke_color is not None:
add_color(shape_group.stroke_color, 'shape_{}_stroke'.format(i))
for i, shape_group in enumerate(shape_groups):
shape = shapes[shape_group.shape_ids[0]]
if isinstance(shape, pydiffvg.Circle):
shape_node = etree.SubElement(g, 'circle')
shape_node.set('r', str(shape.radius.item()))
shape_node.set('cx', str(shape.center[0].item()))
shape_node.set('cy', str(shape.center[1].item()))
elif isinstance(shape, pydiffvg.Polygon):
shape_node = etree.SubElement(g, 'polygon')
points = shape.points.data.cpu().numpy()
path_str = ''
for j in range(0, shape.points.shape[0]):
path_str += '{} {}'.format(points[j, 0], points[j, 1])
if j != shape.points.shape[0] - 1:
path_str += ' '
shape_node.set('points', path_str)
elif isinstance(shape, pydiffvg.Path):
shape_node = etree.SubElement(g, 'path')
num_segments = shape.num_control_points.shape[0]
num_control_points = shape.num_control_points.data.cpu().numpy()
points = shape.points.data.cpu().numpy()
num_points = shape.points.shape[0]
path_str = 'M {} {}'.format(points[0, 0], points[0, 1])
point_id = 1
for j in range(0, num_segments):
if num_control_points[j] == 0:
p = point_id % num_points
path_str += ' L {} {}'.format(\
points[p, 0], points[p, 1])
point_id += 1
elif num_control_points[j] == 1:
p1 = (point_id + 1) % num_points
path_str += ' Q {} {} {} {}'.format(\
points[point_id, 0], points[point_id, 1],
points[p1, 0], points[p1, 1])
point_id += 2
elif num_control_points[j] == 2:
p2 = (point_id + 2) % num_points
path_str += ' C {} {} {} {} {} {}'.format(\
points[point_id, 0], points[point_id, 1],
points[point_id + 1, 0], points[point_id + 1, 1],
points[p2, 0], points[p2, 1])
point_id += 3
shape_node.set('d', path_str)
elif isinstance(shape, pydiffvg.Rect):
shape_node = etree.SubElement(g, 'rect')
shape_node.set('x', str(shape.p_min[0].item()))
shape_node.set('y', str(shape.p_min[1].item()))
shape_node.set('width', str(shape.p_max[0].item() - shape.p_min[0].item()))
shape_node.set('height', str(shape.p_max[1].item() - shape.p_min[1].item()))
else:
assert(False)
shape_node.set('stroke-width', str(2 * shape.stroke_width.data.cpu().item()))
if shape_group.fill_color is not None:
if isinstance(shape_group.fill_color, pydiffvg.LinearGradient):
shape_node.set('fill', 'url(#shape_{}_fill)'.format(i))
elif isinstance(shape_group.fill_color, pydiffvg.RadialGradient):
shape_node.set('fill', 'url(#shape_{}_fill)'.format(i))
else:
c = shape_group.fill_color.data.cpu().numpy()
shape_node.set('fill', 'rgb({}, {}, {})'.format(\
int(255 * c[0]), int(255 * c[1]), int(255 * c[2])))
shape_node.set('opacity', str(c[3]))
else:
shape_node.set('fill', 'none')
if shape_group.stroke_color is not None:
if isinstance(shape_group.stroke_color, pydiffvg.LinearGradient):
shape_node.set('stroke', 'url(#shape_{}_stroke)'.format(i))
elif isinstance(shape_group.stroke_color, pydiffvg.LinearGradient):
shape_node.set('stroke', 'url(#shape_{}_stroke)'.format(i))
else:
c = shape_group.stroke_color.data.cpu().numpy()
shape_node.set('stroke', 'rgb({}, {}, {})'.format(\
int(255 * c[0]), int(255 * c[1]), int(255 * c[2])))
shape_node.set('stroke-opacity', str(c[3]))
shape_node.set('stroke-linecap', 'round')
shape_node.set('stroke-linejoin', 'round')
with open(filename, "w") as f:
f.write(prettify(root))
| 50.767857
| 96
| 0.528315
|
5507698333305cc7e9b4dcffbb38bac994bfb22c
| 6,529
|
py
|
Python
|
asclepias_broker/config.py
|
diegodelemos/asclepias-broker
|
363ebf620ceea8cd4bd387d414c9cfaca226ce35
|
[
"MIT"
] | null | null | null |
asclepias_broker/config.py
|
diegodelemos/asclepias-broker
|
363ebf620ceea8cd4bd387d414c9cfaca226ce35
|
[
"MIT"
] | null | null | null |
asclepias_broker/config.py
|
diegodelemos/asclepias-broker
|
363ebf620ceea8cd4bd387d414c9cfaca226ce35
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2018 CERN.
#
# Asclepias Broker is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Default configuration for Asclepias Broker."""
from __future__ import absolute_import, print_function
from datetime import timedelta
from invenio_app.config import APP_DEFAULT_SECURE_HEADERS
from invenio_records_rest.facets import terms_filter
from invenio_records_rest.utils import deny_all
from invenio_search.api import RecordsSearch
from asclepias_broker.search import enum_term_filter, nested_range_filter, \
nested_terms_filter
def _(x):
"""Identity function used to trigger string extraction."""
return x
# Rate limiting
# =============
RATELIMIT_STORAGE_URL = 'redis://localhost:6379/3'
# I18N
# ====
#: Default language
BABEL_DEFAULT_LANGUAGE = 'en'
#: Default time zone
BABEL_DEFAULT_TIMEZONE = 'Europe/Zurich'
# Base templates
# ==============
#: Global base template.
BASE_TEMPLATE = 'invenio_theme/page.html'
#: Cover page base template (used for e.g. login/sign-up).
COVER_TEMPLATE = 'invenio_theme/page_cover.html'
#: Footer base template.
FOOTER_TEMPLATE = 'invenio_theme/footer.html'
#: Header base template.
HEADER_TEMPLATE = 'invenio_theme/header.html'
#: Settings base template.
SETTINGS_TEMPLATE = 'invenio_theme/page_settings.html'
# Theme configuration
# ===================
#: Site name
THEME_SITENAME = _('Asclepias Broker')
#: Use default frontpage.
THEME_FRONTPAGE = False
# Email configuration
# ===================
#: Email address for support.
SUPPORT_EMAIL = "info@inveniosoftware.org"
#: Disable email sending by default.
MAIL_SUPPRESS_SEND = True
# Assets
# ======
#: Static files collection method (defaults to copying files).
COLLECT_STORAGE = 'flask_collect.storage.file'
# Accounts
# ========
SECURITY_EMAIL_SENDER = SUPPORT_EMAIL
SECURITY_EMAIL_SUBJECT_REGISTER = _(
"Welcome to Asclepias Broker!")
ACCOUNTS_SESSION_REDIS_URL = 'redis://localhost:6379/1'
SECURITY_REGISTERABLE = False
SECURITY_RECOVERABLE = False
SECURITY_CONFIRMABLE = False
SECURITY_CHANGEABLE = False
# Celery configuration
# ====================
BROKER_URL = 'amqp://guest:guest@mq:5672/'
CELERY_BROKER_URL = 'amqp://guest:guest@localhost:5672/'
CELERY_RESULT_BACKEND = 'redis://localhost:6379/2'
#: Scheduled tasks configuration (aka cronjobs).
CELERY_BEAT_SCHEDULE = {
'indexer': {
'task': 'invenio_indexer.tasks.process_bulk_queue',
'schedule': timedelta(minutes=5),
},
'accounts': {
'task': 'invenio_accounts.tasks.clean_session_table',
'schedule': timedelta(minutes=60),
},
}
# Database
# ========
SQLALCHEMY_DATABASE_URI = \
'postgresql+psycopg2://asclepias:asclepias@localhost/asclepias'
# Search
# ======
SEARCH_MAPPINGS = ['relationships']
# JSONSchemas
# ===========
JSONSCHEMAS_HOST = 'asclepias-broker.com'
# Flask configuration
# ===================
# See details on
# http://flask.pocoo.org/docs/0.12/config/#builtin-configuration-values
SECRET_KEY = 'CHANGE_ME'
#: Max upload size for form data via application/mulitpart-formdata
MAX_CONTENT_LENGTH = 100 * 1024 * 1024 # 100 MiB
# REST
# ====
#: Enable Cross-Origin Resource Sharing support.
REST_ENABLE_CORS = True
RECORDS_REST_ENDPOINTS = dict(
relid=dict(
pid_type='relid',
pid_minter='relid',
pid_fetcher='relid',
# TODO: Make our own search class
search_class=RecordsSearch,
indexer_class=None,
search_index='relationships',
search_type=None,
search_factory_imp='asclepias_broker.search.search_factory',
# Only the List GET view is available
create_permission_factory_imp=deny_all,
delete_permission_factory_imp=deny_all,
update_permission_factory_imp=deny_all,
read_permission_factory_imp=deny_all,
links_factory_imp=lambda p, **_: None,
record_serializers={
'application/json': ('invenio_records_rest.serializers'
':json_v1_response'),
},
# TODO: Implement marshmallow serializers
search_serializers={
'application/json': ('invenio_records_rest.serializers'
':json_v1_search'),
},
list_route='/relationships',
item_route='/relationships/<pid(relid):pid_value>',
default_media_type='application/json',
max_result_window=10000,
error_handlers=dict(),
),
)
RECORDS_REST_FACETS = dict(
relationships=dict(
aggs=dict(
type=dict(
terms=dict(field='Source.Type.Name')
),
),
# TODO: Investigate using a webargs-powered search_factory to better
# validate and build the query...
filters=dict(
id=nested_terms_filter('Target.Identifier.ID'),
scheme=nested_terms_filter('Target.Identifier.IDScheme'),
groupBy=enum_term_filter(
label='groupBy',
field='Grouping',
choices={'identity': 'identity', 'version': 'version'}
),
**{'from': nested_range_filter(
'from', 'History.LinkPublicationDate', op='gte')},
to=nested_range_filter(
'to', 'History.LinkPublicationDate', op='lte'),
relation=enum_term_filter(
label='relation',
field='RelationshipType',
choices={
'isCitedBy': 'Cites',
'isSupplementedBy': 'IsSupplementTo',
'isRelatedTo': 'IsRelatedTo'
}
),
type=terms_filter('Source.Type.Name'),
),
)
)
# TODO: See if this actually works
RECORDS_REST_SORT_OPTIONS = dict(
relationships=dict(
mostrecent=dict(
fields=['Source.PublicationDate'],
default_order='desc',
),
),
)
RECORDS_REST_DEFAULT_SORT = {
'relationships': {
'noquery': 'mostrecent'
}
}
APP_DEFAULT_SECURE_HEADERS['force_https'] = True
APP_DEFAULT_SECURE_HEADERS['session_cookie_secure'] = True
# Debug
# =====
# Flask-DebugToolbar is by default enabled when the application is running in
# debug mode. More configuration options are available at
# https://flask-debugtoolbar.readthedocs.io/en/latest/#configuration
#: Switches off incept of redirects by Flask-DebugToolbar.
DEBUG_TB_INTERCEPT_REDIRECTS = False
| 27.782979
| 77
| 0.65906
|
b44a1ad4bc67115b72b8566662c258e214525316
| 22,748
|
py
|
Python
|
nova/network/api.py
|
venusource/nova
|
0c6e6f180eebe71a3431abf726a0fd0c66578162
|
[
"Apache-2.0"
] | 7
|
2015-09-22T11:27:16.000Z
|
2015-11-02T12:33:46.000Z
|
nova/network/api.py
|
venusource/nova
|
0c6e6f180eebe71a3431abf726a0fd0c66578162
|
[
"Apache-2.0"
] | 2
|
2015-09-07T22:14:46.000Z
|
2020-08-12T08:51:56.000Z
|
nova/network/api.py
|
venusource/nova
|
0c6e6f180eebe71a3431abf726a0fd0c66578162
|
[
"Apache-2.0"
] | 4
|
2015-09-09T16:48:56.000Z
|
2022-03-15T20:52:57.000Z
|
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
from oslo.config import cfg
from nova import exception
from nova.i18n import _LI
from nova.network import base_api
from nova.network import floating_ips
from nova.network import model as network_model
from nova.network import rpcapi as network_rpcapi
from nova import objects
from nova.objects import base as obj_base
from nova.openstack.common import log as logging
from nova import policy
from nova import utils
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
def wrap_check_policy(func):
"""Check policy corresponding to the wrapped methods prior to execution."""
@functools.wraps(func)
def wrapped(self, context, *args, **kwargs):
action = func.__name__
check_policy(context, action)
return func(self, context, *args, **kwargs)
return wrapped
def check_policy(context, action):
target = {
'project_id': context.project_id,
'user_id': context.user_id,
}
_action = 'network:%s' % action
policy.enforce(context, _action, target)
class API(base_api.NetworkAPI):
"""API for doing networking via the nova-network network manager.
This is a pluggable module - other implementations do networking via
other services (such as Neutron).
"""
def __init__(self, **kwargs):
self.network_rpcapi = network_rpcapi.NetworkAPI()
helper = utils.ExceptionHelper
# NOTE(vish): this local version of floating_manager has to convert
# ClientExceptions back since they aren't going over rpc.
self.floating_manager = helper(floating_ips.LocalManager())
super(API, self).__init__(**kwargs)
@wrap_check_policy
def get_all(self, context):
"""Get all the networks.
If it is an admin user then api will return all the
networks. If it is a normal user and nova Flat or FlatDHCP
networking is being used then api will return all
networks. Otherwise api will only return the networks which
belong to the user's project.
"""
if "nova.network.manager.Flat" in CONF.network_manager:
project_only = "allow_none"
else:
project_only = True
try:
return objects.NetworkList.get_all(context,
project_only=project_only)
except exception.NoNetworksFound:
return []
@wrap_check_policy
def get(self, context, network_uuid):
return objects.Network.get_by_uuid(context.elevated(), network_uuid)
@wrap_check_policy
def create(self, context, **kwargs):
return self.network_rpcapi.create_networks(context, **kwargs)
@wrap_check_policy
def delete(self, context, network_uuid):
return self.network_rpcapi.delete_network(context, network_uuid, None)
@wrap_check_policy
def disassociate(self, context, network_uuid):
network = self.get(context, network_uuid)
objects.Network.disassociate(context, network.id,
host=True, project=True)
@wrap_check_policy
def get_fixed_ip(self, context, id):
return objects.FixedIP.get_by_id(context, id)
@wrap_check_policy
def get_fixed_ip_by_address(self, context, address):
return objects.FixedIP.get_by_address(context, address)
@wrap_check_policy
def get_floating_ip(self, context, id):
if not utils.is_int_like(id):
raise exception.InvalidID(id=id)
return objects.FloatingIP.get_by_id(context, id)
@wrap_check_policy
def get_floating_ip_pools(self, context):
return objects.FloatingIP.get_pool_names(context)
@wrap_check_policy
def get_floating_ip_by_address(self, context, address):
return objects.FloatingIP.get_by_address(context, address)
@wrap_check_policy
def get_floating_ips_by_project(self, context):
return objects.FloatingIPList.get_by_project(context,
context.project_id)
@wrap_check_policy
def get_instance_id_by_floating_address(self, context, address):
fixed_ip = objects.FixedIP.get_by_floating_address(context, address)
if fixed_ip is None:
return None
else:
return fixed_ip.instance_uuid
@wrap_check_policy
def get_vifs_by_instance(self, context, instance):
vifs = objects.VirtualInterfaceList.get_by_instance_uuid(context,
instance.uuid)
for vif in vifs:
if vif.network_id is not None:
network = objects.Network.get_by_id(context, vif.network_id,
project_only='allow_none')
vif.net_uuid = network.uuid
return vifs
@wrap_check_policy
def get_vif_by_mac_address(self, context, mac_address):
vif = objects.VirtualInterface.get_by_address(context,
mac_address)
if vif.network_id is not None:
network = objects.Network.get_by_id(context, vif.network_id,
project_only='allow_none')
vif.net_uuid = network.uuid
return vif
@wrap_check_policy
def allocate_floating_ip(self, context, pool=None):
"""Adds (allocates) a floating ip to a project from a pool."""
return self.floating_manager.allocate_floating_ip(context,
context.project_id, False, pool)
@wrap_check_policy
def release_floating_ip(self, context, address,
affect_auto_assigned=False):
"""Removes (deallocates) a floating ip with address from a project."""
return self.floating_manager.deallocate_floating_ip(context, address,
affect_auto_assigned)
def disassociate_and_release_floating_ip(self, context, instance,
floating_ip):
"""Removes (deallocates) and deletes the floating ip.
This api call was added to allow this to be done in one operation
if using neutron.
"""
address = floating_ip['address']
if floating_ip.get('fixed_ip_id'):
try:
self.disassociate_floating_ip(context, instance, address)
except exception.FloatingIpNotAssociated:
msg = ("Floating ip %s has already been disassociated, "
"perhaps by another concurrent action.") % address
LOG.debug(msg)
# release ip from project
return self.release_floating_ip(context, address)
@wrap_check_policy
@base_api.refresh_cache
def associate_floating_ip(self, context, instance,
floating_address, fixed_address,
affect_auto_assigned=False):
"""Associates a floating ip with a fixed ip.
Ensures floating ip is allocated to the project in context.
Does not verify ownership of the fixed ip. Caller is assumed to have
checked that the instance is properly owned.
"""
orig_instance_uuid = self.floating_manager.associate_floating_ip(
context, floating_address, fixed_address, affect_auto_assigned)
if orig_instance_uuid:
msg_dict = dict(address=floating_address,
instance_id=orig_instance_uuid)
LOG.info(_LI('re-assign floating IP %(address)s from '
'instance %(instance_id)s'), msg_dict)
orig_instance = objects.Instance.get_by_uuid(context,
orig_instance_uuid)
# purge cached nw info for the original instance
base_api.update_instance_cache_with_nw_info(self, context,
orig_instance)
@wrap_check_policy
@base_api.refresh_cache
def disassociate_floating_ip(self, context, instance, address,
affect_auto_assigned=False):
"""Disassociates a floating ip from fixed ip it is associated with."""
return self.floating_manager.disassociate_floating_ip(context, address,
affect_auto_assigned)
@wrap_check_policy
@base_api.refresh_cache
def allocate_for_instance(self, context, instance, vpn,
requested_networks, macs=None,
security_groups=None,
dhcp_options=None):
"""Allocates all network structures for an instance.
:param context: The request context.
:param instance: nova.objects.instance.Instance object.
:param vpn: A boolean, if True, indicate a vpn to access the instance.
:param requested_networks: A dictionary of requested_networks,
Optional value containing network_id, fixed_ip, and port_id.
:param macs: None or a set of MAC addresses that the instance
should use. macs is supplied by the hypervisor driver (contrast
with requested_networks which is user supplied).
:param security_groups: None or security groups to allocate for
instance.
:param dhcp_options: None or a set of key/value pairs that should
determine the DHCP BOOTP response, eg. for PXE booting an instance
configured with the baremetal hypervisor. It is expected that these
are already formatted for the neutron v2 api.
See nova/virt/driver.py:dhcp_options_for_instance for an example.
:returns: network info as from get_instance_nw_info() below
"""
# NOTE(vish): We can't do the floating ip allocation here because
# this is called from compute.manager which shouldn't
# have db access so we do it on the other side of the
# rpc.
flavor = instance.get_flavor()
args = {}
args['vpn'] = vpn
args['requested_networks'] = requested_networks
args['instance_id'] = instance.uuid
args['project_id'] = instance.project_id
args['host'] = instance.host
args['rxtx_factor'] = flavor['rxtx_factor']
args['macs'] = macs
args['dhcp_options'] = dhcp_options
nw_info = self.network_rpcapi.allocate_for_instance(context, **args)
return network_model.NetworkInfo.hydrate(nw_info)
@wrap_check_policy
def deallocate_for_instance(self, context, instance,
requested_networks=None):
"""Deallocates all network structures related to instance."""
# NOTE(vish): We can't do the floating ip deallocation here because
# this is called from compute.manager which shouldn't
# have db access so we do it on the other side of the
# rpc.
if not isinstance(instance, obj_base.NovaObject):
instance = objects.Instance._from_db_object(context,
objects.Instance(), instance)
self.network_rpcapi.deallocate_for_instance(context, instance=instance,
requested_networks=requested_networks)
# NOTE(danms): Here for neutron compatibility
def allocate_port_for_instance(self, context, instance, port_id,
network_id=None, requested_ip=None):
raise NotImplementedError()
# NOTE(danms): Here for neutron compatibility
def deallocate_port_for_instance(self, context, instance, port_id):
raise NotImplementedError()
# NOTE(danms): Here for neutron compatibility
def list_ports(self, *args, **kwargs):
raise NotImplementedError()
# NOTE(danms): Here for neutron compatibility
def show_port(self, *args, **kwargs):
raise NotImplementedError()
@wrap_check_policy
@base_api.refresh_cache
def add_fixed_ip_to_instance(self, context, instance, network_id):
"""Adds a fixed ip to instance from specified network."""
flavor = instance.get_flavor()
args = {'instance_id': instance['uuid'],
'rxtx_factor': flavor['rxtx_factor'],
'host': instance['host'],
'network_id': network_id}
nw_info = self.network_rpcapi.add_fixed_ip_to_instance(
context, **args)
return network_model.NetworkInfo.hydrate(nw_info)
@wrap_check_policy
@base_api.refresh_cache
def remove_fixed_ip_from_instance(self, context, instance, address):
"""Removes a fixed ip from instance from specified network."""
flavor = instance.get_flavor()
args = {'instance_id': instance['uuid'],
'rxtx_factor': flavor['rxtx_factor'],
'host': instance['host'],
'address': address}
nw_info = self.network_rpcapi.remove_fixed_ip_from_instance(
context, **args)
return network_model.NetworkInfo.hydrate(nw_info)
@wrap_check_policy
def add_network_to_project(self, context, project_id, network_uuid=None):
"""Force adds another network to a project."""
self.network_rpcapi.add_network_to_project(context, project_id,
network_uuid)
@wrap_check_policy
def associate(self, context, network_uuid, host=base_api.SENTINEL,
project=base_api.SENTINEL):
"""Associate or disassociate host or project to network."""
network = self.get(context, network_uuid)
if host is not base_api.SENTINEL:
if host is None:
objects.Network.disassociate(context, network.id,
host=True, project=False)
else:
network.host = host
network.save()
if project is not base_api.SENTINEL:
if project is None:
objects.Network.disassociate(context, network.id,
host=False, project=True)
else:
objects.Network.associate(context, project,
network_id=network.id, force=True)
@wrap_check_policy
def get_instance_nw_info(self, context, instance, **kwargs):
"""Returns all network info related to an instance."""
result = self._get_instance_nw_info(context, instance)
# NOTE(comstud): Don't update API cell with new info_cache every
# time we pull network info for an instance. The periodic healing
# of info_cache causes too many cells messages. Healing the API
# will happen separately.
base_api.update_instance_cache_with_nw_info(self, context, instance,
result, update_cells=False)
return result
def _get_instance_nw_info(self, context, instance):
"""Returns all network info related to an instance."""
flavor = instance.get_flavor()
args = {'instance_id': instance['uuid'],
'rxtx_factor': flavor['rxtx_factor'],
'host': instance['host'],
'project_id': instance['project_id']}
nw_info = self.network_rpcapi.get_instance_nw_info(context, **args)
return network_model.NetworkInfo.hydrate(nw_info)
@wrap_check_policy
def validate_networks(self, context, requested_networks, num_instances):
"""validate the networks passed at the time of creating
the server.
Return the number of instances that can be successfully allocated
with the requested network configuration.
"""
if requested_networks:
self.network_rpcapi.validate_networks(context,
requested_networks)
# Neutron validation checks and returns how many of num_instances
# instances can be supported by the quota. For Nova network
# this is part of the subsequent quota check, so we just return
# the requested number in this case.
return num_instances
def create_pci_requests_for_sriov_ports(self, context,
pci_requests,
requested_networks):
"""Check requested networks for any SR-IOV port request.
Create a PCI request object for each SR-IOV port, and add it to the
pci_requests object that contains a list of PCI request object.
"""
# This is NOOP for Nova network since it doesn't support SR-IOV.
pass
@wrap_check_policy
def get_dns_domains(self, context):
"""Returns a list of available dns domains.
These can be used to create DNS entries for floating ips.
"""
return self.network_rpcapi.get_dns_domains(context)
@wrap_check_policy
def add_dns_entry(self, context, address, name, dns_type, domain):
"""Create specified DNS entry for address."""
args = {'address': address,
'name': name,
'dns_type': dns_type,
'domain': domain}
return self.network_rpcapi.add_dns_entry(context, **args)
@wrap_check_policy
def modify_dns_entry(self, context, name, address, domain):
"""Create specified DNS entry for address."""
args = {'address': address,
'name': name,
'domain': domain}
return self.network_rpcapi.modify_dns_entry(context, **args)
@wrap_check_policy
def delete_dns_entry(self, context, name, domain):
"""Delete the specified dns entry."""
args = {'name': name, 'domain': domain}
return self.network_rpcapi.delete_dns_entry(context, **args)
@wrap_check_policy
def delete_dns_domain(self, context, domain):
"""Delete the specified dns domain."""
return self.network_rpcapi.delete_dns_domain(context, domain=domain)
@wrap_check_policy
def get_dns_entries_by_address(self, context, address, domain):
"""Get entries for address and domain."""
args = {'address': address, 'domain': domain}
return self.network_rpcapi.get_dns_entries_by_address(context, **args)
@wrap_check_policy
def get_dns_entries_by_name(self, context, name, domain):
"""Get entries for name and domain."""
args = {'name': name, 'domain': domain}
return self.network_rpcapi.get_dns_entries_by_name(context, **args)
@wrap_check_policy
def create_private_dns_domain(self, context, domain, availability_zone):
"""Create a private DNS domain with nova availability zone."""
args = {'domain': domain, 'av_zone': availability_zone}
return self.network_rpcapi.create_private_dns_domain(context, **args)
@wrap_check_policy
def create_public_dns_domain(self, context, domain, project=None):
"""Create a public DNS domain with optional nova project."""
args = {'domain': domain, 'project': project}
return self.network_rpcapi.create_public_dns_domain(context, **args)
@wrap_check_policy
def setup_networks_on_host(self, context, instance, host=None,
teardown=False):
"""Setup or teardown the network structures on hosts related to
instance.
"""
host = host or instance['host']
# NOTE(tr3buchet): host is passed in cases where we need to setup
# or teardown the networks on a host which has been migrated to/from
# and instance['host'] is not yet or is no longer equal to
args = {'instance_id': instance['id'],
'host': host,
'teardown': teardown}
self.network_rpcapi.setup_networks_on_host(context, **args)
def _get_multi_addresses(self, context, instance):
try:
fixed_ips = objects.FixedIPList.get_by_instance_uuid(
context, instance['uuid'])
except exception.FixedIpNotFoundForInstance:
return False, []
addresses = []
for fixed in fixed_ips:
for floating in fixed.floating_ips:
addresses.append(floating.address)
return fixed_ips[0].network.multi_host, addresses
@wrap_check_policy
def migrate_instance_start(self, context, instance, migration):
"""Start to migrate the network of an instance."""
flavor = instance.get_flavor()
args = dict(
instance_uuid=instance['uuid'],
rxtx_factor=flavor['rxtx_factor'],
project_id=instance['project_id'],
source_compute=migration['source_compute'],
dest_compute=migration['dest_compute'],
floating_addresses=None,
)
multi_host, addresses = self._get_multi_addresses(context, instance)
if multi_host:
args['floating_addresses'] = addresses
args['host'] = migration['source_compute']
self.network_rpcapi.migrate_instance_start(context, **args)
@wrap_check_policy
def migrate_instance_finish(self, context, instance, migration):
"""Finish migrating the network of an instance."""
flavor = instance.get_flavor()
args = dict(
instance_uuid=instance['uuid'],
rxtx_factor=flavor['rxtx_factor'],
project_id=instance['project_id'],
source_compute=migration['source_compute'],
dest_compute=migration['dest_compute'],
floating_addresses=None,
)
multi_host, addresses = self._get_multi_addresses(context, instance)
if multi_host:
args['floating_addresses'] = addresses
args['host'] = migration['dest_compute']
self.network_rpcapi.migrate_instance_finish(context, **args)
| 41.97048
| 79
| 0.6361
|
b11bdf1b5bf73c81d6610521638b9269830a3ab0
| 4,592
|
py
|
Python
|
ltfx/functions/train.py
|
laguro-developers/tfx-utils
|
af85352ea0046ba60b429b37680da6878e7ee0e5
|
[
"MIT"
] | 1
|
2021-01-09T16:38:44.000Z
|
2021-01-09T16:38:44.000Z
|
ltfx/functions/train.py
|
laguro-developers/tfx-utils
|
af85352ea0046ba60b429b37680da6878e7ee0e5
|
[
"MIT"
] | null | null | null |
ltfx/functions/train.py
|
laguro-developers/tfx-utils
|
af85352ea0046ba60b429b37680da6878e7ee0e5
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
import tensorflow_transform as tft
from tensorflow.python.ops import lookup_ops
from ltfx.models.resnet50 import build_resnet50
from ltfx.functions.input import _input_fn
from ltfx.functions.serving import _serving_receiver_fn
from ltfx.functions.eval import _eval_input_receiver_fn
def _train_fn(trainer_fn_args, schema, **kwargs):
transform_output = tft.TFTransformOutput(trainer_fn_args.transform_output)
vocabulary = transform_output.vocabulary_by_name('label_encoder')
input_fn = lambda: _input_fn(trainer_fn_args.train_files, transform_output,
kwargs['train_batch_size'],
kwargs['image_key'],
kwargs['label_key'])
eval_fn = lambda : _input_fn(trainer_fn_args.eval_files, transform_output,
kwargs['eval_batch_size'],
kwargs['image_key'],
kwargs['label_key'])
train_spec = tf.estimator.TrainSpec(input_fn, max_steps=trainer_fn_args.train_steps)
serving_fn = lambda: _serving_receiver_fn(transform_output, schema,
kwargs['raw_label_key'],
kwargs['image_key'],
kwargs['label_key'])
exporter = tf.estimator.FinalExporter(kwargs['model_name'], serving_fn)
eval_spec = tf.estimator.EvalSpec(eval_fn,
steps=trainer_fn_args.eval_steps,
exporters=[exporter],
name=kwargs['model_name'])
def model_fn(features, labels, mode):
model = build_resnet50(kwargs['image_key'],
image_height=kwargs['image_height'],
image_width=kwargs['image_width'],
number_of_classes=len(vocabulary),
weights=kwargs['pretrained'])
logits = model(features, training=False)
class_ids = tf.cast(tf.sort(tf.argsort(logits, axis=1)), tf.int64)
classes = lookup_ops.index_to_string_table_from_tensor(vocabulary_list=tuple(vocabulary),
name='class_string_lookup').lookup(class_ids)
predictions = {
'probabilities': tf.nn.softmax(logits),
'class_id': classes,
'logits': logits,
}
if mode == tf.estimator.ModeKeys.PREDICT:
return tf.estimator.EstimatorSpec(mode=mode,
predictions=predictions)
label_ids = lookup_ops.index_table_from_tensor(vocabulary_list=tuple(vocabulary),
name='class_id_lookup').lookup(labels)
optimizer = tf.compat.v1.train.GradientDescentOptimizer(learning_rate=.001)
loss = tf.keras.losses.CategoricalCrossentropy(from_logits=True,
reduction=tf.keras.losses.Reduction.NONE)(label_ids, logits)
loss = tf.reduce_sum(loss) * (1. / 4)
mean = tf.compat.v1.metrics.mean(loss)
accuracy = tf.compat.v1.metrics.accuracy(class_ids, label_ids)
tf.summary.scalar('accuracy', accuracy[1])
eval_metric_ops = {"accuracy": accuracy, "mean": mean}
train_op = optimizer.minimize(loss, tf.compat.v1.train.get_or_create_global_step())
if mode == tf.estimator.ModeKeys.EVAL:
return tf.estimator.EstimatorSpec(mode=mode,
predictions=predictions,
loss=loss,
eval_metric_ops=eval_metric_ops)
return tf.estimator.EstimatorSpec(
mode=mode,
loss=loss,
train_op=train_op,
predictions=predictions,
eval_metric_ops=eval_metric_ops)
run_config = tf.estimator.RunConfig(save_checkpoints_steps=999, keep_checkpoint_max=1)
run_config = run_config.replace(model_dir=trainer_fn_args.serving_model_dir)
estimator = tf.estimator.Estimator(model_fn=model_fn, config=run_config)
receiver_fn = lambda: _eval_input_receiver_fn(transform_output, schema, kwargs['image_key'], kwargs['label_key'])
return {
"estimator": estimator,
"train_spec": train_spec,
"eval_spec": eval_spec,
"eval_input_receiver_fn": receiver_fn
}
| 45.92
| 117
| 0.583841
|
1db4df96f4313ba5fc8ec70c3b73bdbad9653708
| 14,408
|
py
|
Python
|
libtbx/scheduling/process_pool.py
|
rimmartin/cctbx_project
|
644090f9432d9afc22cfb542fc3ab78ca8e15e5d
|
[
"BSD-3-Clause-LBNL"
] | 155
|
2016-11-23T12:52:16.000Z
|
2022-03-31T15:35:44.000Z
|
libtbx/scheduling/process_pool.py
|
rimmartin/cctbx_project
|
644090f9432d9afc22cfb542fc3ab78ca8e15e5d
|
[
"BSD-3-Clause-LBNL"
] | 590
|
2016-12-10T11:31:18.000Z
|
2022-03-30T23:10:09.000Z
|
libtbx/scheduling/process_pool.py
|
rimmartin/cctbx_project
|
644090f9432d9afc22cfb542fc3ab78ca8e15e5d
|
[
"BSD-3-Clause-LBNL"
] | 115
|
2016-11-15T08:17:28.000Z
|
2022-02-09T15:30:14.000Z
|
"""
Process Pool
Pools offer lower control over execution, as it is not possible to know the
state of individual workers
Common methods:
has_results(): returns whether there are any results available
results(): return an iterator yielding ( identifier, result ) tuples
submit(target, args = (), kwargs = {}): submits job, return identifier
is_empty(): returns whether there are no more jobs or results waiting
is_full(): returns whether the number of currently processing jobs is at maximum
shutdown(): sets the number of workers to zero
resume(): starts workers as necessary
join(): finish processing all jobs, and then shutdown
terminate(): kills all processing
Pool methods:
job_count(): return number of jobs submitted (waiting + running)
worker_count(): return an approximate number of active workers
"""
from __future__ import absolute_import, division, print_function
import time
from collections import deque
from six.moves.queue import Empty, Full
from libtbx.scheduling import SchedulingError, identifier
from libtbx.scheduling import result
# Accounting
class process_register(object):
"""
A simple class that encapsulates job accounting
"""
def __init__(self):
self.running_on = {}
self.terminateds = deque()
self.requested_shutdowns = deque()
self.results = deque()
def process_count(self):
return len( self.running_on )
def record_process_startup(self, pid):
if pid in self.running_on:
raise SchedulingError("Existing worker with identical processID")
self.running_on[ pid ] = None
def record_job_start(self, jobid, pid):
if pid not in self.running_on:
raise SchedulingError("Unknown processID")
if self.running_on[ pid ] is not None:
raise SchedulingError("Attempt to start process on busy worker")
self.running_on[ pid ] = jobid
def record_job_finish(self, jobid, pid, value):
if pid not in self.running_on:
raise SchedulingError("Unknown processID")
if self.running_on[ pid ] != jobid:
raise SchedulingError("Inconsistent register information: jobid/pid mismatch")
self.running_on[ pid ] = None
self.results.append( ( jobid, value ) )
def record_process_shutdown(self, pid):
self.record_process_exit( pid = pid, container = self.requested_shutdowns )
def record_process_termination(self, pid):
self.record_process_exit( pid = pid, container = self.terminateds )
def record_process_exit(self, pid, container):
if pid not in self.running_on:
raise SchedulingError("Unknown processID")
if self.running_on[ pid ] is not None:
raise SchedulingError("Shutdown of busy worker")
container.append( pid )
del self.running_on[ pid ]
def record_process_crash(self, pid, exception, traceback):
if pid not in self.running_on:
raise SchedulingError("Unknown processID")
jobid = self.running_on[ pid ]
if jobid is not None:
self.results.append(
( jobid, result.error( exception = exception, traceback = traceback ) )
)
del self.running_on[ pid ]
self.terminateds.append( pid )
# Stand-alone functions for pickling
def job_started_event(register, data):
( jobid, pid ) = data
register.record_job_start( jobid = jobid, pid = pid )
def job_finished_event(register, data):
( jobid, pid, value ) = data
register.record_job_finish( jobid = jobid, pid = pid, value = value )
def worker_startup_event(register, data):
register.record_process_startup( pid = data )
def worker_shutdown_event(register, data):
register.record_process_shutdown( pid = data )
def worker_termination_event(register, data):
register.record_process_termination( pid = data )
def worker_crash_event(register, data):
( pid, exception, traceback ) = data
register.record_process_crash( pid = pid, exception = exception, traceback = traceback )
# Lifecycle controllers
class unlimited(object):
"""
A worker that is not supposed to die
"""
def active(self):
return True
def record_job_start(self):
pass
def record_job_end(self):
pass
class jobcount_limit(object):
"""
A worker that completes N jobs, and then exits
"""
def __init__(self, maxjobs):
self.jobs_to_go = maxjobs
def active(self):
return 0 < self.jobs_to_go
def record_job_start(self):
pass
def record_job_end(self):
self.jobs_to_go -= 1
class runtime_limit(object):
"""
A worker that exits when a time limit has been reached
"""
def __init__(self, seconds):
self.endtime = time.time() + seconds
def active(self):
return time.time() < self.endtime
def record_job_start(self):
pass
def record_job_done(self):
pass
class wallclock_limit(object):
"""
A worker that exits when a wall clock time limit has been reached
"""
def __init__(self, seconds):
self.maxtime = time.clock() + seconds
def active(self):
return time.clock() < self.maxtime
def record_job_start(self):
pass
def record_job_done(self):
pass
def pool_process_cycle(
pid,
inqueue,
outqueue,
waittime,
lifecycle,
termination_signal,
idle_timeout,
):
controller = lifecycle()
outqueue.put( ( worker_startup_event, pid ) )
last_activity = time.time()
while controller.active():
if last_activity + idle_timeout < time.time():
outqueue.put( ( worker_termination_event, pid ) )
break
try:
data = inqueue.get( timeout = waittime )
except Empty:
continue
if data == termination_signal:
outqueue.put( ( worker_shutdown_event, pid ) )
break
assert len( data ) == 4
( jobid, target, args, kwargs ) = data
outqueue.put( ( job_started_event, ( jobid, pid ) ) )
controller.record_job_start()
try:
value = target( *args, **kwargs )
except Exception as e:
res = result.error( exception = e, traceback = result.get_traceback_info() )
else:
res = result.success( value = value )
outqueue.put( ( job_finished_event, ( jobid, pid, res ) ) )
controller.record_job_end()
last_activity = time.time()
else:
outqueue.put( ( worker_termination_event, pid ) )
# Autoscaling
class constant_capacity(object):
"""
Keep pool size constant
"""
def __init__(self, capacity):
self.capacity = capacity
def __call__(self, manager):
if manager.worker_count() != self.capacity:
manager.set_worker_count( target = self.capacity )
class upscaling_capacity(object):
"""
Increase pool size when needed, rely on timeout for downscaling
"""
def __init__(self, minimum, maximum, buffering):
self.minimum = minimum
self.maximum = maximum
self.buffering = buffering
@property
def capacity(self):
return self.maximum
def __call__(self, manager):
ideal = max(
min( self.maximum, manager.job_count() + self.buffering ),
self.minimum,
)
worker_count = manager.worker_count()
if worker_count < ideal:
manager.set_worker_count( target = ideal )
class breathing_capacity(object):
"""
Varies pool size according to load
"""
def __init__(self, minimum, maximum, buffering, downsize_step = 2, downsize_delay = 5):
self.minimum = minimum
self.maximum = maximum
self.buffering = buffering
self.downsize_step = downsize_step
self.downsize_delay = downsize_delay
self.downsize_wait_start = None
@property
def capacity(self):
return self.maximum
def __call__(self, manager):
ideal = max(
min( self.maximum, manager.job_count() + self.buffering ),
self.minimum,
)
worker_count = manager.worker_count()
if worker_count < ideal:
manager.set_worker_count( target = ideal )
self.downsize_wait_start = None
elif ideal < worker_count:
if self.downsize_wait_start is None:
self.downsize_wait_start = time.time()
elif self.downsize_delay < time.time() - self.downsize_wait_start:
target = max( worker_count - self.downsize_step, ideal )
manager.set_worker_count( target = target )
self.downsize_wait_start = time.time()
else:
self.downsize_wait_start = None
class manager(object):
"""
Process pool
"""
TERMINATION_SIGNAL = None
def __init__(self,
inqueue,
outqueue,
job_factory,
autoscaling,
lifecycle,
waittime = 0.01,
stalltime = 2,
idle_timeout = 120,
):
self.job_factory = job_factory
self.autoscaling = autoscaling
self.lifecycle = lifecycle
self.inqueue = inqueue
self.outqueue = outqueue
self.waittime = waittime
self.stalltime = stalltime
self.idle_timeout = idle_timeout
self.register = process_register()
from itertools import count
self.pid_assigner = count()
self.process_numbered_as = {}
self.recycleds = deque()
self.terminatings = set()
self.unreporteds = deque()
self.outstanding_shutdown_requests = 0
self.running_jobs = set()
self.completed_results = deque()
self.manage()
def job_count(self):
return len( self.running_jobs )
def worker_count(self):
return max(
( len( self.process_numbered_as ) + len( self.terminatings )
- self.outstanding_shutdown_requests ),
0,
)
def is_empty(self):
return not self.running_jobs and not self.completed_results
def is_full(self):
return self.autoscaling.capacity <= self.job_count()
def has_results(self):
return self.completed_results
def results(self):
while not self.is_empty():
while not self.has_results():
self.wait()
self.poll()
self.manage()
yield self.completed_results.popleft()
def submit(self, target, args = (), kwargs = {}):
jobid = identifier()
self.outqueue.put( ( jobid, target, args, kwargs ) )
self.running_jobs.add( jobid )
return jobid
def shutdown(self):
self.set_worker_count( target = 0 )
def resume(self):
self.manage()
def join(self):
while self.running_jobs:
self.poll()
self.wait()
self.poll()
def terminate(self):
self.shutdown()
self.wait()
self.poll()
for process in self.process_numbered_as.values():
if process.is_alive():
if hasattr( process, "terminate" ): # Thread has no terminate
try:
process.terminate()
except Exception:
pass
while self.process_numbered_as:
self.poll()
self.wait()
# Internal methods
def start_process(self):
try:
pid = self.recycleds.popleft()
except IndexError:
pid = next(self.pid_assigner)
process = self.job_factory(
target = pool_process_cycle,
kwargs = {
"pid": pid,
"inqueue": self.outqueue,
"outqueue": self.inqueue,
"waittime": self.waittime,
"lifecycle": self.lifecycle,
"termination_signal": self.TERMINATION_SIGNAL,
"idle_timeout": self.idle_timeout,
},
)
process.start()
self.process_numbered_as[ pid ] = process
def stop_process(self):
self.outqueue.put( self.TERMINATION_SIGNAL )
self.outstanding_shutdown_requests += 1
def wait(self):
time.sleep( self.waittime )
def poll(self):
for (pid, process) in list(self.process_numbered_as.items()):
if not process.is_alive():
process.join()
exit_code = getattr( process, "exitcode", 0 ) # Thread has no "exitcode" attribute
if exit_code != 0:
data = (
pid,
result.get_exception( process = process, exit_code = exit_code ),
result.get_crash_info( process = process ),
)
self.unreporteds.append( ( worker_crash_event, data ) )
self.terminatings.add( pid )
del self.process_numbered_as[ pid ]
while self.unreporteds:
try:
self.inqueue.put( self.unreporteds[0], timeout = self.stalltime )
except Full:
break
self.unreporteds.popleft()
while True:
try:
( event, data ) = self.inqueue.get( timeout = self.waittime )
except Empty:
break
event( register = self.register, data = data )
while self.register.terminateds:
pid = self.register.terminateds[0]
try:
self.terminatings.remove( pid )
except KeyError:
break
self.register.terminateds.popleft()
self.recycleds.append( pid )
while self.register.requested_shutdowns:
pid = self.register.requested_shutdowns[0]
try:
self.terminatings.remove( pid )
except KeyError:
break
self.register.requested_shutdowns.popleft()
self.recycleds.append( pid )
assert 0 < self.outstanding_shutdown_requests
self.outstanding_shutdown_requests -= 1
while self.register.results:
( jobid, res ) = self.register.results.popleft()
self.completed_results.append( ( jobid, res ) )
self.running_jobs.remove( jobid )
def set_worker_count(self, target):
while self.worker_count() < target:
self.start_process()
while target < self.worker_count():
self.stop_process()
def manage(self):
self.autoscaling( manager = self )
class creator(object):
"""
Information to create and destroy a manager
"""
def __init__(
self,
job_factory,
inq_factory,
outq_factory,
autoscaling,
lifecycle,
waittime = 0.01,
stalltime = 2,
idle_timeout = 120,
):
self.job_factory = job_factory
self.inq_factory = inq_factory
self.outq_factory = outq_factory
self.autoscaling = autoscaling
self.lifecycle = lifecycle
self.waittime = waittime
self.stalltime = stalltime
self.idle_timeout = idle_timeout
def create(self):
return manager(
inqueue = self.inq_factory.create(),
outqueue = self.outq_factory.create(),
job_factory = self.job_factory,
autoscaling = self.autoscaling,
lifecycle = self.lifecycle,
waittime = self.waittime,
stalltime = self.stalltime,
idle_timeout = self.idle_timeout,
)
def destroy(self, manager):
manager.terminate()
manager.join()
self.inq_factory.destroy( manager.inqueue )
self.outq_factory.destroy( manager.outqueue )
| 20.671449
| 90
| 0.665672
|
e7547417be62e339404962802bd1fa2b1e7e5daa
| 1,465
|
py
|
Python
|
day_ok/schedule/data_classes/lessons.py
|
bostud/day_ok
|
2bcee68252b698f5818808d1766fb3ec3f07fce8
|
[
"MIT"
] | null | null | null |
day_ok/schedule/data_classes/lessons.py
|
bostud/day_ok
|
2bcee68252b698f5818808d1766fb3ec3f07fce8
|
[
"MIT"
] | 16
|
2021-02-27T08:36:19.000Z
|
2021-04-07T11:43:31.000Z
|
day_ok/schedule/data_classes/lessons.py
|
bostud/day_ok
|
2bcee68252b698f5818808d1766fb3ec3f07fce8
|
[
"MIT"
] | null | null | null |
from typing import List, Optional
from dataclasses import dataclass
from datetime import datetime, date
from ..models import Teacher, Lessons
class Base:
def to_dict(self, key_prefix: str = '') -> dict:
return {
self.get_key_name(k, key_prefix): v
for k, v in self.__dict__.items()
}
@staticmethod
def get_key_name(key, key_prefix = '') -> str:
return f"{key_prefix}_{key}" if key_prefix else f"{key}"
@dataclass
class AddLessonsDC(Base):
classrooms: list
lessons_types: list
subjects: list
teachers: list
students: list
groups: list
weekdays: tuple
@dataclass
class EditLessonsDC(Base):
classroom: int
date_from_valid: datetime
time_start: datetime
lessons_type: int
teacher: int
parent: int
subject: int
group: int = None
student: int = None
weekdays_for_repeating: Optional[List[int]] = None
date_until_valid: datetime = None
change_all: bool = False
def to_dict(self, **kwargs) -> dict:
return super().to_dict('edit')
@dataclass
class LessonsScheduleTeachers(Base):
teacher: Teacher
date: date
lessons: List[Lessons]
@dataclass
class EmptyLessons(Base):
time_start: datetime.time
is_empty: bool = True
duration: int = 30
@dataclass
class LessonsDayWeekView(Base):
date_name: str
date: datetime.date
is_today: bool
lessons: List[Lessons]
head_color: str
| 20.347222
| 64
| 0.666894
|
fac39f104c0a7146fbdb377cf06ceb3842dedf2c
| 11,162
|
py
|
Python
|
chrome/test/functional/find_in_page.py
|
Gitman1989/chromium
|
2b1cceae1075ef012fb225deec8b4c8bbe4bc897
|
[
"BSD-3-Clause"
] | 2
|
2017-09-02T19:08:28.000Z
|
2021-11-15T15:15:14.000Z
|
chrome/test/functional/find_in_page.py
|
Gitman1989/chromium
|
2b1cceae1075ef012fb225deec8b4c8bbe4bc897
|
[
"BSD-3-Clause"
] | null | null | null |
chrome/test/functional/find_in_page.py
|
Gitman1989/chromium
|
2b1cceae1075ef012fb225deec8b4c8bbe4bc897
|
[
"BSD-3-Clause"
] | 1
|
2020-04-13T05:45:10.000Z
|
2020-04-13T05:45:10.000Z
|
#!/usr/bin/python
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import codecs
import os
import unittest
import pyauto_functional
import pyauto
import test_utils
class FindMatchTests(pyauto.PyUITest):
# Data dir where all find test data files are kept
find_test_data_dir = 'find_in_page'
def testCanFindMatchCount(self):
"""Verify Find match count for valid search"""
url = self.GetFileURLForPath(os.path.join(self.DataDir(), 'title1.html'))
self.NavigateToURL(url)
self.assertEqual(1, self.FindInPage('title')['match_count'])
def testCanFindMatchCountFail(self):
"""Verify Find match count for invalid search"""
url = self.GetFileURLForPath(os.path.join(self.DataDir(), 'title1.html'))
self.NavigateToURL(url)
self.assertEqual(0, self.FindInPage('blah')['match_count'])
def testFindIsNotCaseSensitive(self):
"""Verify that find is not case sensitive.
Manually Find is case insensitive. But since FindInPage is
case-sensitive by default we are confirming that we get a
different result when we turn off case matching.
"""
url = self.GetFileURLForPath(
os.path.join(self.DataDir(), 'find_in_page', 'largepage.html'))
self.NavigateToURL(url)
case_sensitive_result = self.FindInPage('The')['match_count']
case_insenstive_result = (self.FindInPage('The', match_case=False)
['match_count'])
self.assertTrue(case_insenstive_result >= case_sensitive_result)
def testLocalizationAndCaseOrder(self):
"""Verify that we check for localization.
Here we check the Turkish-i scenario where we verify that we
find both dotted and dotless I's.
"""
url = self.GetFileURLForPath(
os.path.join(self.DataDir(), 'find_in_page', 'turkish.html'))
self.NavigateToURL(url)
dotless = self.FindInPage(u'\u0131')['match_count']
dotted = self.FindInPage('i')['match_count']
capital_i_with_dot = self.FindInPage(u'\u0130')['match_count']
capital_i = self.FindInPage('I')['match_count']
self.assertNotEqual(dotless, dotted)
self.assertNotEqual(capital_i_with_dot, capital_i)
self.assertNotEqual(dotted, capital_i_with_dot)
def testSearchInTextAreas(self):
"""Verify search for text within various forms and text areas."""
urls = []
urls.append(self.GetFileURLForPath(
os.path.join(self.DataDir(), 'find_in_page', 'textintextarea.html')))
urls.append(self.GetFileURLForPath(
os.path.join(self.DataDir(), 'find_in_page', 'smalltextarea.html')))
urls.append(self.GetFileURLForPath(os.path.join(
self.DataDir(), 'find_in_page', 'populatedform.html')))
for url in urls:
self.NavigateToURL(url)
self.assertEqual(1, self.FindInPage('cat')['match_count'])
self.assertEqual(0, self.FindInPage('bat')['match_count'])
def testSearchWithinSpecialURL(self):
"""Verify search for text within special URLs such as chrome:history.
chrome://history, chrome://downloads, pyAuto Data directory
"""
zip_file = 'a_zip_file.zip'
self.NavigateToURL(self.GetFileURLForPath(self.DataDir()))
# search in Data directory
self.assertEqual(1,
self.FindInPage('downloads', tab_index=0)['match_count'])
# search in History page
self.AppendTab(pyauto.GURL('chrome://history'))
# the contents in the history page load asynchronously after tab loads
search_query = os.path.join('chrome', 'test', 'data')
self.WaitUntil(
lambda: self.FindInPage(search_query, tab_index=1)['match_count'],
expect_retval=1)
self.assertEqual(
1, self.FindInPage(search_query, tab_index=1)['match_count'])
# search in Downloads page
test_utils.DownloadFileFromDownloadsDataDir(self, zip_file)
self.AppendTab(pyauto.GURL('chrome://downloads'))
# the contents in the downloads page load asynchronously after tab loads
self.WaitUntil(
lambda: self.FindInPage(zip_file, tab_index=2)['match_count'],
expect_retval=2)
self.assertEqual(2,
self.FindInPage(zip_file, tab_index=2)['match_count'])
test_utils.RemoveDownloadedTestFile(self, zip_file)
def testFindNextAndPrevious(self):
"""Verify search selection coordinates.
The data file used is set-up such that the text occurs on the same line,
and we verify their positions by verifying their relative positions.
"""
search_string = u'\u5728\u897f\u660c\u536b\u661f\u53d1'
url = self.GetFileURLForPath(os.path.join(
self.DataDir(), self.find_test_data_dir, 'specialchar.html'))
self.NavigateToURL(url)
first_find = self.FindInPage(search_string)
second_find = self.FindInPage(search_string, find_next=True)
# We have search occurrence in the same row, so top-bottom
# coordinates should be the same even for second search.
self.assertEqual(first_find['match_top'], second_find['match_top'],
'Words\' top coordinates should be same')
self.assertEqual(first_find['match_bottom'], second_find['match_bottom'],
'Words\' bottom coordinates should be same')
# And left-right coordinates should be in order.
self.assertTrue(first_find['match_left'] < second_find['match_left'],
'Second find left coordinate should be greater than '
'the first find left coordinate')
self.assertTrue(first_find['match_right'] < second_find['match_right'],
'Second find right coordinate should be greater than '
'the first find right coordinate')
first_find_reverse = self.FindInPage(
search_string, find_next=True, forward=False)
# We find next and we go back so find coordinates should be the same
# as previous ones.
self.assertEqual(first_find, first_find_reverse,
'First occurrence must be selected, since we went back')
def testSpecialChars(self):
"""Test find in page with unicode and special characters.
Finds from page content, comments and meta data and verifies that comments
and meta data are not searchable.
"""
search_string = u'\u5728\u897f\u660c\u536b\u661f\u53d1'
url = self.GetFileURLForPath(os.path.join(
self.DataDir(), self.find_test_data_dir, 'specialchar.html'))
self.NavigateToURL(url)
self.assertEqual(4, self.FindInPage(search_string)['match_count'])
search_string = u'240^*&%!#~!*&\u518d\u5c31\u8077\u624b\u5f53'
self.assertEqual(2, self.FindInPage(search_string)['match_count'])
# Find for the special chars in the comment and in the meta tag
search_string = u'\u4e2d\u65b0\u793e\u8bb0\u8005\u5b8b\u5409'\
u'\u6cb3\u6444\u4e2d\u65b0\u7f51'
self.assertEqual(0, self.FindInPage(search_string)['match_count'],
'Chrome should not find chars from comment or meta tags')
def testFindInLargePage(self):
"""Find in a very large page"""
url = self.GetFileURLForPath(os.path.join(
self.DataDir(), self.find_test_data_dir, 'largepage.html'))
self.NavigateToURL(url)
self.assertEqual(373, self.FindInPage('daughter of Prince')['match_count'])
def testFindLongString(self):
"""Find a very long string in a large page"""
url = self.GetFileURLForPath(os.path.join(
self.DataDir(), self.find_test_data_dir, 'largepage.html'))
self.NavigateToURL(url)
file = codecs.open(os.path.join(self.DataDir(), self.find_test_data_dir,
'LongFind.txt'), 'r', 'utf-8')
search = file.read()
self.assertEqual(1, self.FindInPage(search)['match_count'])
def testFindBigString(self):
"""Find a big font string in a page"""
url = self.GetFileURLForPath(os.path.join(
self.DataDir(), self.find_test_data_dir, 'BigText.html'))
self.NavigateToURL(url)
self.assertEqual(1, self.FindInPage('SomeLargeString')['match_count'])
def testVariousFindTests(self):
"""Test find in page for <span> style text, lists, html comments, etc."""
url = self.GetFileURLForPath(os.path.join(
self.DataDir(), self.find_test_data_dir, 'FindRandomTests.html'))
self.NavigateToURL(url)
search = 'has light blue eyes and my father has dark'
self.assertEqual(1, self.FindInPage(search)['match_count'],
'Failed to find text with <span> tag')
# Find for list items
search = 'Google\nApple\nandroid'
self.assertEqual(1, self.FindInPage(search)['match_count'],
'Failed to find the list items')
# Find HTML comments
self.assertEqual(0, self.FindInPage('example comment')['match_count'],
'We should not find HTML comments')
def testFindWholeFileContent(self):
"""Find the whole text file page and find count should be 1"""
find_test_file = os.path.join(self.DataDir(), self.find_test_data_dir,
'find_test.txt')
url = self.GetFileURLForPath(find_test_file)
self.NavigateToURL(url)
file = open(find_test_file)
search = file.read()
self.assertEqual(1, self.FindInPage(search)['match_count'],
'Failed to find the whole page')
def testSingleOccurrence(self):
"""Search Back and Forward on a single occurrence"""
url = self.GetFileURLForPath(os.path.join(
self.DataDir(), self.find_test_data_dir, 'FindRandomTests.html'))
self.NavigateToURL(url)
self.assertEqual(1, self.FindInPage('2010 Pro Bowl')['match_count'])
# First occurrence find
first_occurence_dict = self.FindInPage('2010 Pro Bowl')
# Finding next occurrence
next_occurence_dict = self.FindInPage('2010 Pro Bowl', find_next = True)
self.assertEqual(first_occurence_dict, next_occurence_dict,
'We have only one occurrence in this page so'
'first and next coordinates must be same')
# Doing a fake find so we have no previous search
self.FindInPage('ghgfjgfh201232rere')
first_occurence_dict = self.FindInPage('2010 Pro Bowl')
# Finding previous occurrence
back_occurence_dict = self.FindInPage('2010 Pro Bowl',
find_next = True, forward = False)
self.assertEqual(first_occurence_dict, back_occurence_dict,
'We have only one occurrence in this page so '
'first and back search coordinates must be same')
def testSearchInPDF(self):
"""Verify that we can find in a pdf file.
Only for Google Chrome builds (Chromium builds do not have internal pdf).
"""
# bail out if not a branded build
properties = self.GetBrowserInfo()['properties']
if properties['branding'] != 'Google Chrome':
return
url = self.GetFileURLForPath(os.path.join(
self.DataDir(), 'plugin', 'Embed.pdf'))
self.NavigateToURL(url)
search_count = self.FindInPage('adobe')['match_count']
self.assertEqual(8, search_count, 'Failed to find in the pdf file')
if __name__ == '__main__':
pyauto_functional.Main()
| 44.47012
| 79
| 0.687153
|
0b10804851ab7ad560b0b5f187ae112beb911167
| 139
|
py
|
Python
|
app/phising_Dataset_Generator.py
|
chimeyrock999/PhishingURLDetection
|
9b854845e90644b839d7ffd981f53fe005e2788d
|
[
"MIT"
] | 3
|
2021-06-26T11:19:36.000Z
|
2021-11-19T16:43:06.000Z
|
app/phising_Dataset_Generator.py
|
chimeyrock999/PhishingURLDetection
|
9b854845e90644b839d7ffd981f53fe005e2788d
|
[
"MIT"
] | null | null | null |
app/phising_Dataset_Generator.py
|
chimeyrock999/PhishingURLDetection
|
9b854845e90644b839d7ffd981f53fe005e2788d
|
[
"MIT"
] | null | null | null |
from feature_extractor import extract
extract.generate_dataset('dataset_gender/phising_urls.csv', 'dataset_gender/phising_dataset.csv', 1)
| 46.333333
| 100
| 0.856115
|
52df0b1919a9b73c5d2505b8994193cf73e65843
| 145
|
py
|
Python
|
src/namespace_metadata.py
|
sscu-budapest/dataset-template
|
cbaa42a16e8692bc8248780ae0609c0808514af6
|
[
"MIT"
] | null | null | null |
src/namespace_metadata.py
|
sscu-budapest/dataset-template
|
cbaa42a16e8692bc8248780ae0609c0808514af6
|
[
"MIT"
] | 2
|
2021-06-19T21:24:36.000Z
|
2021-09-23T16:10:10.000Z
|
src/namespace_metadata.py
|
sscu-budapest/dataset-template
|
cbaa42a16e8692bc8248780ae0609c0808514af6
|
[
"MIT"
] | 1
|
2021-09-29T12:13:15.000Z
|
2021-09-29T12:13:15.000Z
|
from sscutils import ScruTable, TableFeaturesBase
class RecordFeatures(TableFeaturesBase):
pass
record_table = ScruTable(RecordFeatures)
| 16.111111
| 49
| 0.82069
|
80782a15aa8f0cf867f76dc8b16f20474ef0e94f
| 4,594
|
py
|
Python
|
qcloud-dns.py
|
szlytlyt/certbot-dns-qcloud-hook
|
5f5123e7d53854de2adc1274f01e8a5e76d42ce6
|
[
"MIT"
] | null | null | null |
qcloud-dns.py
|
szlytlyt/certbot-dns-qcloud-hook
|
5f5123e7d53854de2adc1274f01e8a5e76d42ce6
|
[
"MIT"
] | null | null | null |
qcloud-dns.py
|
szlytlyt/certbot-dns-qcloud-hook
|
5f5123e7d53854de2adc1274f01e8a5e76d42ce6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import hmac
import time
import random
import hashlib
import binascii
import requests
# @home2 https://setq.me/1024
class Client(object):
def __init__(self, secret_id, secret_key, host, uri, **params):
self.secret_id = secret_id
self.secret_key = secret_key
self.host = host
self.uri = uri
self.params = params
if sys.version_info.major > 2:
self.Py3 = True
self.secret_key = bytes(self.secret_key, 'utf-8')
else:
self.Py3 = False
def public_params(self):
params = {
'Nonce': random.randint(1, 9999),
'SecretId': self.secret_id,
'SignatureMethod': 'HmacSHA1',
'Timestamp': int(time.time()),
}
params.update(self.params)
return params
def sign(self, params, method='GET'):
params = params.copy()
params.update(self.public_params())
p = {}
for k in params:
if method == 'POST' and str(params[k])[0:1] == '@':
continue
p[k.replace('_', '.')] = params[k]
ps = '&'.join('%s=%s' % (k, p[k]) for k in sorted(p))
msg = '%s%s%s?%s' % (method.upper(), self.host, self.uri, ps)
if self.Py3:
msg = bytes(msg, 'utf-8')
hashed = hmac.new(self.secret_key, msg, hashlib.sha1)
base64 = binascii.b2a_base64(hashed.digest())[:-1]
if self.Py3:
base64 = base64.decode()
params['Signature'] = base64
return params
def send(self, params, method='GET'):
params = self.sign(params, method)
req_host = 'https://{}{}'.format(self.host, self.uri)
if method == 'GET':
resp = requests.get(req_host, params=params)
else:
resp = requests.post(req_host, data=params)
if resp.json()['code'] != 0:
print(" - qcloud API error message: {}".format(
resp.json()['message'].encode('utf-8')))
exit(-1)
return resp.json()
# View details at https://cloud.tencent.com/document/product/302/4032
class Cns:
def __init__(self, secret_id, secret_key):
host, uri = 'cns.api.qcloud.com', '/v2/index.php'
self.client = Client(secret_id, secret_key, host, uri)
def list(self, domain):
body = {
'Action': 'RecordList',
'domain': domain
}
return self.client.send(body)
def create(self, domain, name, _type, value):
body = {
'Action': 'RecordCreate',
'domain': domain,
'subDomain': name,
'recordType': _type,
'recordLine': '默认',
'value': value
}
return self.client.send(body)
def delete(self, domain, _id):
body = {
'Action': 'RecordDelete',
'domain': domain,
'recordId': _id
}
return self.client.send(body)
def separate(url):
Topdomain = ['edu', 'cn', 'com', 'net', 'co']
parts = url.split('.')
name = []
domain = []
for test in Topdomain:
try:
if parts.index(test):
name = parts[0:parts.index(test)-1]
domain = parts[parts.index(test)-1:]
break
except ValueError:
continue
return {
'name': ".".join(name),
'domain': ".".join(domain)
}
def run(secret_id, secret_key):
env = os.environ.copy()
cns = Cns(secret_id, secret_key)
option = sys.argv[1] # add|del
domain = separate(env['CERTBOT_DOMAIN'])['domain']
name = "_acme-challenge.{}".format(
separate(env['CERTBOT_DOMAIN'])['name'])
value = env['CERTBOT_VALIDATION']
print(' - {} {} {} {}'.format(option, domain, name, value))
if option == 'add':
cns.create(domain, name, 'TXT', value)
time.sleep(10) # Waiting for record to take effect
elif option == 'del':
for record in cns.list(domain)['data']['records']:
if record['name'] == name and record['value'] == value:
print(cns.delete(domain, record['id']))
else:
print(" - No records found")
if __name__ == '__main__':
# Create your secret_id and secret_key at https://console.cloud.tencent.com/cam/capi
secret_id = 'your secret_id'
secret_key = 'your secret_key'
if len(sys.argv) > 1:
run(secret_id, secret_key)
else:
print('using: qcloud-dns.py add|del')
exit(1)
| 28.358025
| 88
| 0.53744
|
1bc079665bd2009203f917c92e3fd598f5a2d920
| 5,755
|
py
|
Python
|
src/tests/test_decoder.py
|
dailai/quick-nlp
|
d27ae90c0788d0899af7f45b323ca64ed0c33868
|
[
"MIT"
] | null | null | null |
src/tests/test_decoder.py
|
dailai/quick-nlp
|
d27ae90c0788d0899af7f45b323ca64ed0c33868
|
[
"MIT"
] | null | null | null |
src/tests/test_decoder.py
|
dailai/quick-nlp
|
d27ae90c0788d0899af7f45b323ca64ed0c33868
|
[
"MIT"
] | null | null | null |
from types import SimpleNamespace
import numpy as np
import pytest
from fastai.core import T, V, to_np, to_gpu
from numpy.testing import assert_allclose
from quicknlp.modules import EmbeddingRNNDecoder, RNNAttentionDecoder, Projection, AttentionProjection
from quicknlp.modules.basic_decoder import select_hidden_by_index, reshape_parent_indices
from quicknlp.utils import assert_dims
params_to_try = [
SimpleNamespace(ntokens=4, emb_size=20, nhid=32, nlayers=1, max_tokens=10, batch_size=2, num_beams=0,
attention=False),
SimpleNamespace(ntokens=4, emb_size=20, nhid=32, nlayers=1, max_tokens=10, batch_size=2, num_beams=1,
attention=False),
SimpleNamespace(ntokens=4, emb_size=20, nhid=32, nlayers=1, max_tokens=10, batch_size=4, num_beams=3,
attention=False),
SimpleNamespace(ntokens=4, emb_size=20, nhid=32, nlayers=1, max_tokens=10, batch_size=4, num_beams=0,
attention=True, att_hid=5),
SimpleNamespace(ntokens=4, emb_size=20, nhid=32, nlayers=1, max_tokens=10, batch_size=4, num_beams=1,
attention=True, att_hid=5),
SimpleNamespace(ntokens=4, emb_size=20, nhid=32, nlayers=1, max_tokens=10, batch_size=4, num_beams=3,
attention=True, att_hid=5),
SimpleNamespace(ntokens=4, emb_size=20, nhid=32, nlayers=3, max_tokens=10, batch_size=4, num_beams=0,
attention=True, att_hid=5),
SimpleNamespace(ntokens=4, emb_size=20, nhid=32, nlayers=3, max_tokens=10, batch_size=4, num_beams=1,
attention=True, att_hid=5),
SimpleNamespace(ntokens=4, emb_size=20, nhid=32, nlayers=3, max_tokens=10, batch_size=4, num_beams=3,
attention=True, att_hid=5)
]
ids = ["teacher_forcing", "greedy", "beam_search", "attention_tf", "attention_greedy", "attention_beam_search",
"attention_tf_three_layers", "attention_greedy_three_layers", "attention_beam_search_three_layers"]
@pytest.fixture(scope="session", params=params_to_try, ids=ids)
def decoder_params(request):
return request.param
@pytest.fixture(scope="session")
def rnn_decoder(decoder_params):
if decoder_params.attention:
decoder = RNNAttentionDecoder(cell_type="gru", ntoken=decoder_params.ntokens,
emb_sz=decoder_params.emb_size, nhid=decoder_params.nhid,
nlayers=decoder_params.nlayers,
pad_token=1, eos_token=2,
max_tokens=decoder_params.max_tokens)
decoder.projection_layer = AttentionProjection(n_out=decoder_params.ntokens,
n_in=decoder_params.emb_size,
att_nhid=decoder_params.att_hid,
tie_encoder=None,
dropout=0.0)
else:
decoder = EmbeddingRNNDecoder(cell_type="gru", ntoken=decoder_params.ntokens,
emb_sz=decoder_params.emb_size, nhid=decoder_params.nhid,
nlayers=decoder_params.nlayers,
pad_token=1, eos_token=2,
max_tokens=decoder_params.max_tokens)
decoder.projection_layer = Projection(n_out=decoder_params.ntokens,
n_in=decoder_params.emb_size, tie_encoder=None, dropout=0.0)
decoder = to_gpu(decoder)
decoder.reset(decoder_params.batch_size)
return decoder, decoder_params
def test_select_hidden_by_index():
bs, num_beams = 2, 3
# when I pass inputs to the select_hidden_by_index function with bs=2, num_beams = 3
inputs = np.array([2, 3, 4, 10, 11, 12]).reshape(1, 6, 1) # [ndir, bs, hd]
tr_inputs = [V(T(inputs))]
# and indices for every batch [bs, ndims]
indices = np.array([[0, 0, 1], [2, 2, 2]])
tr_indices = V(T(indices))
tr_indices = reshape_parent_indices(tr_indices.view(-1), bs=bs, num_beams=num_beams)
results = select_hidden_by_index(tr_inputs, tr_indices.view(-1))
# then I get the expected seletec hidden
expected = np.array([2, 2, 3, 12, 12, 12])
assert_allclose(actual=to_np(results[0]).ravel(), desired=expected)
@pytest.fixture()
def decoder_inputs(decoder_params):
batch_size = decoder_params.batch_size
inputs = np.zeros(batch_size, dtype=np.int).reshape(1, batch_size)
enc_inputs = np.random.rand(1, decoder_params.batch_size, decoder_params.emb_size)
vin = V(T(inputs))
ven = V(T(enc_inputs))
return vin, ven
def test_decoder(rnn_decoder, decoder_inputs):
dec_ins, keys = decoder_inputs
decoder, params = rnn_decoder
decoder.reset(params.batch_size)
hidden = decoder.hidden
decoder.projection_layer.keys = keys
raw_outputs, outputs = decoder(dec_ins, hidden=hidden, num_beams=params.num_beams)
assert params.nlayers == len(outputs)
if params.num_beams > 0:
assert_dims(outputs,
[params.nlayers, None, params.num_beams * params.batch_size, (params.nhid, params.ntokens)])
# actual beam outputs can be found in beam_outputs
assert decoder.beam_outputs is not None
assert_dims(decoder.beam_outputs, [None, params.batch_size, params.num_beams])
# the sl can go up to max_tokens + 1(for the extra 0 token at the end)
assert 0 < decoder.beam_outputs.shape[0] <= params.max_tokens + 1
else:
assert_dims(outputs, [params.nlayers, None, params.batch_size, (params.nhid, params.ntokens)])
assert decoder.beam_outputs is None
| 50.929204
| 112
| 0.649522
|
baf230f60f94414e85a33e86afb5d46ed7780393
| 3,020
|
py
|
Python
|
setup.py
|
jma/invenio-circulation
|
d93be872ab8673eb9a37b0e183fe42ec3116c252
|
[
"MIT"
] | 1
|
2022-03-17T09:23:10.000Z
|
2022-03-17T09:23:10.000Z
|
setup.py
|
jma/invenio-circulation
|
d93be872ab8673eb9a37b0e183fe42ec3116c252
|
[
"MIT"
] | null | null | null |
setup.py
|
jma/invenio-circulation
|
d93be872ab8673eb9a37b0e183fe42ec3116c252
|
[
"MIT"
] | 1
|
2022-03-17T09:23:12.000Z
|
2022-03-17T09:23:12.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2018 CERN.
#
# Invenio-Circulation is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Invenio module for the circulation of bibliographic items."""
import os
from setuptools import find_packages, setup
readme = open('README.rst').read()
history = open('CHANGES.rst').read()
tests_require = [
'check-manifest>=0.25',
'coverage>=4.0',
'isort>=4.3.3',
'pydocstyle>=1.0.0',
'pytest-cache>=1.0',
'pytest-cov>=1.8.0',
'pytest-pep8>=1.0.6',
'pytest>=2.8.0',
]
extras_require = {
'docs': [
'Sphinx>=1.5.1',
],
'tests': tests_require,
}
extras_require['all'] = []
for reqs in extras_require.values():
extras_require['all'].extend(reqs)
setup_requires = [
'Babel>=1.3',
'pytest-runner>=2.6.2',
]
install_requires = [
'Flask-BabelEx>=0.9.2',
]
packages = find_packages()
# Get the version string. Cannot be done with import!
g = {}
with open(os.path.join('invenio_circulation', 'version.py'), 'rt') as fp:
exec(fp.read(), g)
version = g['__version__']
setup(
name='invenio-circulation',
version=version,
description=__doc__,
long_description=readme + '\n\n' + history,
keywords='invenio TODO',
license='MIT',
author='CERN',
author_email='info@inveniosoftware.org',
url='https://github.com/inveniosoftware/invenio-circulation',
packages=packages,
zip_safe=False,
include_package_data=True,
platforms='any',
entry_points={
'invenio_base.apps': [
'invenio_circulation = invenio_circulation:InvenioCirculation',
],
'invenio_i18n.translations': [
'messages = invenio_circulation',
],
# TODO: Edit these entry points to fit your needs.
# 'invenio_access.actions': [],
# 'invenio_admin.actions': [],
# 'invenio_assets.bundles': [],
# 'invenio_base.api_apps': [],
# 'invenio_base.api_blueprints': [],
# 'invenio_base.blueprints': [],
# 'invenio_celery.tasks': [],
# 'invenio_db.models': [],
# 'invenio_pidstore.minters': [],
# 'invenio_records.jsonresolver': [],
},
extras_require=extras_require,
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_require,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Development Status :: 1 - Planning',
],
)
| 27.706422
| 80
| 0.615232
|
b590392bed0a24510abc8eb796698a8b7f5b17e1
| 1,794
|
py
|
Python
|
gravur/main.py
|
F483/gravur
|
575c268d9ac28aa0ba00f1e5109bd74c3b7b69a5
|
[
"MIT"
] | 3
|
2015-07-20T17:56:21.000Z
|
2017-10-22T05:52:13.000Z
|
gravur/main.py
|
F483/gravur
|
575c268d9ac28aa0ba00f1e5109bd74c3b7b69a5
|
[
"MIT"
] | null | null | null |
gravur/main.py
|
F483/gravur
|
575c268d9ac28aa0ba00f1e5109bd74c3b7b69a5
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# Copyright (c) 2015 Fabian Barkhau <fabian.barkhau@gmail.com>
# License: MIT (see LICENSE file)
import os
import sys
import argparse
# kivy setup
os.environ["KIVY_NO_ARGS"] = "1"
#os.environ["KIVY_NO_CONSOLELOG"] = "1"
os.environ["KIVY_NO_FILELOG"] = "1"
# min supported resolution
from kivy.config import Config
Config.set('graphics', 'width', '320')
Config.set('graphics', 'height', '460')
# demo resolution
#from kivy.config import Config
#Config.set('graphics', 'width', '400')
#Config.set('graphics', 'height', '640')
from gravur.app import GravurApp
def _parse_args(args):
class ArgumentParser(argparse.ArgumentParser):
def error(self, message):
sys.stderr.write('error: %s\n' % message)
self.print_help()
sys.exit(2)
# setup parser
description = "Gravur bitcoin wallet."
parser = ArgumentParser(description=description)
default_app_home = os.path.join(os.path.expanduser("~"), ".gravur")
# wallet
default_app_wallet = os.path.join(default_app_home, "default.wallet")
parser.add_argument("--wallet", default=default_app_wallet, help="""
Encrypted wallet file location (created if non existant).
Default: {0}
""".format(default_app_wallet))
# testnet
parser.add_argument('--testnet', action='store_true',
help="Use bitcoin testnet instead of mainnet.")
# dryrun
parser.add_argument('--dryrun', action='store_true',
help="Never publish anything to the blockchain.")
# tmp_wallet
parser.add_argument("temp_wallet", help="Temporary prototype wallet.")
return vars(parser.parse_args(args=args))
def main(args):
backend_config = _parse_args(args)
GravurApp(backend_config).run()
| 27.181818
| 74
| 0.672241
|
2885cea08d6b2216c8f19e5cec9ff707fd444ee8
| 1,217
|
py
|
Python
|
indonesian_dot/envs/tests/puzzle_tests.py
|
Ra-Ni/Indonesian-Dot-Solver
|
2baf507d23816b686f046f89d4c833728b25f2dc
|
[
"MIT"
] | null | null | null |
indonesian_dot/envs/tests/puzzle_tests.py
|
Ra-Ni/Indonesian-Dot-Solver
|
2baf507d23816b686f046f89d4c833728b25f2dc
|
[
"MIT"
] | null | null | null |
indonesian_dot/envs/tests/puzzle_tests.py
|
Ra-Ni/Indonesian-Dot-Solver
|
2baf507d23816b686f046f89d4c833728b25f2dc
|
[
"MIT"
] | 1
|
2020-03-18T15:23:24.000Z
|
2020-03-18T15:23:24.000Z
|
import unittest
import agents
from envs.puzzle import Puzzle
class MyTestCase(unittest.TestCase):
def test_bfs(self):
bfs = agents.make('bfs')
p = Puzzle('1010101110000000000000000')
p.max_depth = 7
p.max_length = 5
sol, search = p.traverse(bfs)
print(sol)
print()
print(search)
def test_dfs(self):
dfs = agents.make('dfs')
p = Puzzle('0000000000000000000000000000000000000000000100000000111000000001000000000000000000000000000000000000')
p.max_depth = 2
p.max_length = 2
sol, search = p.traverse(dfs)
print(sol)
print()
print(search)
def test_dfs2(self):
dfs = agents.make('dfs')
p = Puzzle('1010101110000000000000000')
p.max_depth = 7
p.max_length = 2
sol, search = p.traverse(dfs)
print(sol)
print()
print(search)
def test_a_star(self):
a_star = agents.make('a*')
p = Puzzle('111111111')
p.max_depth = 7
p.max_length = 2
sol, search = p.traverse(a_star)
print(sol)
print()
print(search)
if __name__ == '__main__':
unittest.main()
| 23.862745
| 122
| 0.578472
|
cfc528487c2f09d697eca1b0a7a489d118353bf2
| 6,825
|
py
|
Python
|
TEST_DATA.py
|
GundlackFelixDEV/pdf-template-service
|
ccd5133b00fed88ca60063fa4e22401c78bb18cb
|
[
"MIT"
] | null | null | null |
TEST_DATA.py
|
GundlackFelixDEV/pdf-template-service
|
ccd5133b00fed88ca60063fa4e22401c78bb18cb
|
[
"MIT"
] | null | null | null |
TEST_DATA.py
|
GundlackFelixDEV/pdf-template-service
|
ccd5133b00fed88ca60063fa4e22401c78bb18cb
|
[
"MIT"
] | null | null | null |
PROFILE = {
"reciepientsFullName": "Max Mustermann",
"lastName": "Mustermann",
"title": "Mr.",
"reciepientsAddress": "Musterstraße 11",
"zipCode": "123456",
"city": "Musterhausen",
"IBAN": "DE07123412341234123412"
}
NF_FORM = {
"proc_agency": "USAG Grafenwoehr,<br>HQUSAG Grafenwoer,<br>Tax Relieve Office MWW,<br>Tax Reliev, Unit 2810",
"ocr_OrderNr": "GR-NF1-189559",
"ocr_ValidFrom": "09-2021",
"ocr_ValidUntil": "09-2022",
}
INVOICE = {
"TotalAmount": 119,
"TotalRemonon": 105,
"TotalRefund": 14,
"InvoiceNr": "A-10000-01-2109-GR-NF1-189559.1",
"Period": "August 2021",
"Date": "1.10.2021"
}
RECEITS = [
{
"vendor": {
"name": "Muster Vendor",
"ustdid": "xxx-xxx-xxxxxx",
"RemononID": "48483",
"ocr_MerchantStreet": "Am Heuballenhof 28",
"ocr_MerchantCity": "Guenzierweiler",
"ocr_MerchantPostcode": "3718",
"ocr_VatNumber": "DE999999999",
"ocr_TaxNumber": "255/383/332"
},
"line_items": [{
"ocr_Quantity": 3,
"ocr_ProductName": "Playstation",
"ocr_GrossLineTotal": 119,
"remonon_GrossLineTotal": 105,
"refund": 14
}, {
"ocr_Quantity": 1,
"ocr_ProductName": "Other Product",
"ocr_GrossLineTotal": 119,
"remonon_GrossLineTotal": 105,
"refund": 14
}],
"TotalAmount": 119,
"TotalRemonon": 105,
"TotalRefund": 14,
"ocr_Date": "12.12.2021 - 18:29",
"romononID": "48483",
"invoiceNr": "X"
},
{
"vendor": {
"name": "Muster Vendor",
"ustdid": "xxx-xxx-xxxxxx",
"RemononID": "48483",
"ocr_MerchantStreet": "Am Heuballenhof 28",
"ocr_MerchantCity": "Guenzierweiler",
"ocr_MerchantPostcode": "3718",
"ocr_VatNumber": "DE999999999",
"ocr_TaxNumber": "255/383/332"
},
"line_items": [{
"ocr_Quantity": 3,
"ocr_ProductName": "Playstation",
"ocr_GrossLineTotal": 119,
"remonon_GrossLineTotal": 105,
"refund": 14
}, {
"ocr_Quantity": 1,
"ocr_ProductName": "Other Product",
"ocr_GrossLineTotal": 119,
"remonon_GrossLineTotal": 105,
"refund": 14
}],
"TotalAmount": 119,
"TotalRemonon": 105,
"TotalRefund": 14,
"ocr_Date": "12.12.2021 - 18:29",
"romononID": "48483",
"invoiceNr": "X"
},
{
"vendor": {
"name": "Muster Vendor",
"ustdid": "xxx-xxx-xxxxxx",
"RemononID": "48483",
"ocr_MerchantStreet": "Am Heuballenhof 28",
"ocr_MerchantCity": "Guenzierweiler",
"ocr_MerchantPostcode": "3718",
"ocr_VatNumber": "DE999999999",
"ocr_TaxNumber": "255/383/332"
},
"line_items": [{
"ocr_Quantity": 3,
"ocr_ProductName": "Playstation",
"ocr_GrossLineTotal": 119,
"remonon_GrossLineTotal": 105,
"refund": 14
}, {
"ocr_Quantity": 1,
"ocr_ProductName": "Other Product",
"ocr_GrossLineTotal": 119,
"remonon_GrossLineTotal": 105,
"refund": 14
}],
"TotalAmount": 119,
"TotalRemonon": 105,
"TotalRefund": 14,
"ocr_Date": "12.12.2021 - 18:29",
"romononID": "48483",
"invoiceNr": "X"
},
{
"vendor": {
"name": "Muster Vendor",
"ustdid": "xxx-xxx-xxxxxx",
"RemononID": "48483",
"ocr_MerchantStreet": "Am Heuballenhof 28",
"ocr_MerchantCity": "Guenzierweiler",
"ocr_MerchantPostcode": "3718",
"ocr_VatNumber": "DE999999999",
"ocr_TaxNumber": "255/383/332"
},
"line_items": [{
"ocr_Quantity": 3,
"ocr_ProductName": "Playstation",
"ocr_GrossLineTotal": 119,
"remonon_GrossLineTotal": 105,
"refund": 14
}, {
"ocr_Quantity": 1,
"ocr_ProductName": "Other Product",
"ocr_GrossLineTotal": 119,
"remonon_GrossLineTotal": 105,
"refund": 14
}],
"TotalAmount": 119,
"TotalRemonon": 105,
"TotalRefund": 14,
"ocr_Date": "12.12.2021 - 18:29",
"romononID": "48483",
"invoiceNr": "X"
},
{
"vendor": {
"name": "Muster Vendor",
"ustdid": "xxx-xxx-xxxxxx",
"RemononID": "48483",
"ocr_MerchantStreet": "Am Heuballenhof 28",
"ocr_MerchantCity": "Guenzierweiler",
"ocr_MerchantPostcode": "3718",
"ocr_VatNumber": "DE999999999",
"ocr_TaxNumber": "255/383/332"
},
"line_items": [{
"ocr_Quantity": 3,
"ocr_ProductName": "Playstation",
"ocr_GrossLineTotal": 119,
"remonon_GrossLineTotal": 105,
"refund": 14
}, {
"ocr_Quantity": 1,
"ocr_ProductName": "Other Product",
"ocr_GrossLineTotal": 119,
"remonon_GrossLineTotal": 105,
"refund": 14
}],
"TotalAmount": 119,
"TotalRemonon": 105,
"TotalRefund": 14,
"ocr_Date": "12.12.2021 - 18:29",
"romononID": "48483",
"invoiceNr": "X"
},{
"vendor": {
"name": "Muster Vendor",
"ustdid": "xxx-xxx-xxxxxx",
"RemononID": "48483",
"ocr_MerchantStreet": "Am Heuballenhof 28",
"ocr_MerchantCity": "Guenzierweiler",
"ocr_MerchantPostcode": "3718",
"ocr_VatNumber": "DE999999999",
"ocr_TaxNumber": "255/383/332"
},
"line_items": [{
"ocr_Quantity": 3,
"ocr_ProductName": "Playstation",
"ocr_GrossLineTotal": 119,
"remonon_GrossLineTotal": 105,
"refund": 14
}, {
"ocr_Quantity": 1,
"ocr_ProductName": "Other Product",
"ocr_GrossLineTotal": 119,
"remonon_GrossLineTotal": 105,
"refund": 14
}],
"TotalAmount": 119,
"TotalRemonon": 105,
"TotalRefund": 14,
"ocr_Date": "12.12.2021 - 18:29",
"romononID": "48483",
"invoiceNr": "X"
}
]
TESTINPUT = {
"title": f"Sammelrechnung {INVOICE['InvoiceNr']}",
"profile": PROFILE,
"nf_form": NF_FORM,
"invoice": INVOICE,
"receits": RECEITS
}
| 31.164384
| 117
| 0.490256
|
8593379da3374253072a96a363ddb10cea992979
| 5,664
|
py
|
Python
|
tests/unit/pillar_test.py
|
epoelke/salt
|
80ae64e54f9f336d3cdb6e03e42f2a50469ec8f2
|
[
"Apache-2.0"
] | 1
|
2016-12-20T20:11:21.000Z
|
2016-12-20T20:11:21.000Z
|
tests/unit/pillar_test.py
|
epoelke/salt
|
80ae64e54f9f336d3cdb6e03e42f2a50469ec8f2
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/pillar_test.py
|
epoelke/salt
|
80ae64e54f9f336d3cdb6e03e42f2a50469ec8f2
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
tests.unit.pillar_test
~~~~~~~~~~~~~~~~~~~~~~
:codeauthor: :email:`Pedro Algarvio (pedro@algarvio.me)`
:copyright: © 2013 by the SaltStack Team, see AUTHORS for more details.
:license: Apache 2.0, see LICENSE for more details.
'''
import tempfile
# Import Salt Testing libs
from salttesting import skipIf, TestCase
from salttesting.helpers import ensure_in_syspath
from salttesting.mock import NO_MOCK, NO_MOCK_REASON, MagicMock, patch
ensure_in_syspath('../')
# Import salt libs
import salt.pillar
@skipIf(NO_MOCK, NO_MOCK_REASON)
class PillarTestCase(TestCase):
@patch('salt.pillar.compile_template')
def test_malformed_pillar_sls(self, compile_template):
opts = {
'renderer': 'json',
'state_top': '',
'pillar_roots': [],
'extension_modules': ''
}
grains = {
'os': 'Ubuntu',
'os_family': 'Debian',
'oscodename': 'raring',
'osfullname': 'Ubuntu',
'osrelease': '13.04',
'kernel': 'Linux'
}
pillar = salt.pillar.Pillar(opts, grains, 'mocked-minion', 'base')
# Mock getting the proper template files
pillar.client.get_state = MagicMock(
return_value={
'dest': '/path/to/pillar/files/foo.sls',
'source': 'salt://foo.sls'
}
)
# Template compilation returned a string
compile_template.return_value = 'BAHHH'
self.assertEqual(
pillar.render_pillar({'base': ['foo.sls']}),
({}, ['SLS \'foo.sls\' does not render to a dictionary'])
)
# Template compilation returned a list
compile_template.return_value = ['BAHHH']
self.assertEqual(
pillar.render_pillar({'base': ['foo.sls']}),
({}, ['SLS \'foo.sls\' does not render to a dictionary'])
)
# Template compilation returned a dictionary, which is what's expected
compile_template.return_value = {'foo': 'bar'}
self.assertEqual(
pillar.render_pillar({'base': ['foo.sls']}),
({'foo': 'bar'}, [])
)
# Test improper includes
compile_template.side_effect = [
{'foo': 'bar', 'include': 'blah'},
{'foo2': 'bar2'}
]
self.assertEqual(
pillar.render_pillar({'base': ['foo.sls']}),
({'foo': 'bar', 'include': 'blah'},
["Include Declaration in SLS 'foo.sls' is not formed as a list"])
)
# Test includes as a list, which is what's expected
compile_template.side_effect = [
{'foo': 'bar', 'include': ['blah']},
{'foo2': 'bar2'}
]
self.assertEqual(
pillar.render_pillar({'base': ['foo.sls']}),
({'foo': 'bar', 'foo2': 'bar2'}, [])
)
# Test includes as a list overriding data
compile_template.side_effect = [
{'foo': 'bar', 'include': ['blah']},
{'foo': 'bar2'}
]
self.assertEqual(
pillar.render_pillar({'base': ['foo.sls']}),
({'foo': 'bar2'}, [])
)
@patch('salt.pillar.salt.fileclient.get_file_client', autospec=True)
@patch('salt.pillar.salt.minion.Matcher', autospec=True)
def test_topfile_order(self, Matcher, get_file_client):
opts = {
'renderer': 'yaml',
'state_top': '',
'pillar_roots': [],
'extension_modules': '',
'environment': 'base',
}
grains = {
'os': 'Ubuntu',
'os_family': 'Debian',
'oscodename': 'raring',
'osfullname': 'Ubuntu',
'osrelease': '13.04',
'kernel': 'Linux'
}
# glob match takes precedence
self._setup_test_topfile_mocks(Matcher, get_file_client, 1, 2)
pillar = salt.pillar.Pillar(opts, grains, 'mocked-minion', 'base')
self.assertEqual(pillar.compile_pillar()['ssh'], 'bar')
# nodegroup match takes precedence
self._setup_test_topfile_mocks(Matcher, get_file_client, 2, 1)
pillar = salt.pillar.Pillar(opts, grains, 'mocked-minion', 'base')
self.assertEqual(pillar.compile_pillar()['ssh'], 'foo')
def _setup_test_topfile_mocks(self, Matcher, get_file_client,
nodegroup_order, glob_order):
# Write a simple topfile and two pillar state files
self.top_file = tempfile.NamedTemporaryFile()
self.top_file.write('''
base:
group:
- match: nodegroup
- order: {nodegroup_order}
- ssh
minion:
- order: {glob_order}
- ssh.minion
'''.format(nodegroup_order=nodegroup_order, glob_order=glob_order))
self.top_file.flush()
self.ssh_file = tempfile.NamedTemporaryFile()
self.ssh_file.write('''
ssh:
foo
''')
self.ssh_file.flush()
self.ssh_minion_file = tempfile.NamedTemporaryFile()
self.ssh_minion_file.write('''
ssh:
bar
''')
self.ssh_minion_file.flush()
# Setup Matcher mock
matcher = Matcher.return_value
matcher.confirm_top.return_value = True
# Setup fileclient mock
client = get_file_client.return_value
client.cache_file.return_value = self.top_file.name
def get_state(sls, env):
return {
'ssh': {'path': '', 'dest': self.ssh_file.name},
'ssh.minion': {'path': '', 'dest': self.ssh_minion_file.name},
}[sls]
client.get_state.side_effect = get_state
| 32.930233
| 78
| 0.558439
|
da3abf8a45b79a88d01ddee22f3413bbe76752f4
| 5,791
|
py
|
Python
|
envdsys/shared/data/datafile.py
|
NOAA-PMEL/envDataSystem
|
4db4a3569d2329658799a3eef06ce36dd5c0597d
|
[
"Unlicense"
] | 1
|
2021-11-06T19:22:53.000Z
|
2021-11-06T19:22:53.000Z
|
envdsys/shared/data/datafile.py
|
NOAA-PMEL/envDataSystem
|
4db4a3569d2329658799a3eef06ce36dd5c0597d
|
[
"Unlicense"
] | 25
|
2019-06-18T20:40:36.000Z
|
2021-07-23T20:56:48.000Z
|
envdsys/shared/data/datafile.py
|
NOAA-PMEL/envDataSystem
|
4db4a3569d2329658799a3eef06ce36dd5c0597d
|
[
"Unlicense"
] | null | null | null |
import asyncio
from shared.utilities.util import time_to_next
from shared.data.message import Message
# from datetime import datetime
# from data.data import Data
import json
import os
# from utilities.timer import Timer
class DataFile():
def __init__(
self,
base_path='./envdata/',
save_interval=60,
file_interval='day',
config=None,
):
self.base_path = base_path
# unless specified, flush file every 60 sec
self.save_interval = save_interval
# allow for cases where we want hour files
# options: 'day', 'hour'
self.file_interval = file_interval
if config:
self.setup(config)
if self.base_path[-1] != '/':
self.base_path += '/'
self.save_now = True
# if save_interval == 0:
# self.save_now = True
self.current_file_name = ''
self.data_buffer = asyncio.Queue()
self.task_list = []
self.loop = asyncio.get_event_loop()
self.file = None
self.format = {
'SOURCE': 'envDataSystem',
'VERSION': '1.0',
'FORMAT': 'jsonl',
}
def setup(self, config):
if 'base_path' in config:
self.base_path = config['base_path']
if 'save_interval' in config:
self.save_interval = config['save_interval']
if 'file_interval' in config:
self.file_interval = config['file_interval']
async def write_message(self, msg):
# print(f'{msg.to_json()}')
if msg.subject == 'DATA':
await self.write(msg.body)
# if 'body' in msg and 'DATA' in msg['body']:
# await self.write(msg['body']['DATA'])
async def write(self, data):
# add message to queue and return
data['FILE_META'] = self.format
# print(f'write: {data}')
await self.data_buffer.put(data)
# record = dict()
# record['FILE_META'] = self.format
# record['RECORD'] = data
async def __write(self):
while True:
data = await self.data_buffer.get()
# print(f'datafile.__write: {data}')
if data and ('DATA' in data):
d_and_t = data['DATA']['DATETIME'].split('T')
ymd = d_and_t[0]
hour = d_and_t[1].split(':')[0]
self.__open(ymd, hour=hour)
if not self.file:
return
json.dump(data, self.file)
self.file.write('\n')
if self.save_now:
self.file.flush()
if self.save_interval > 0:
self.save_now = False
def __open(self, ymd, hour=None):
fname = ymd
if self.file_interval == 'hour':
fname += '_' + hour
fname += '.jsonl'
if (
self.file is not None and
not self.file.closed and
os.path.basename(self.file.name) == fname
):
return
# TODO: change to raise error so __write can catch it
try:
if not os.path.exists(self.base_path):
os.makedirs(self.base_path, exist_ok=True)
except OSError as e:
print(f'OSError: {e}')
self.file = None
return
self.file = open(
self.base_path+fname,
mode='a',
)
def open(self):
self.task_list.append(
asyncio.ensure_future(self.save_file_loop())
)
self.task_list.append(
asyncio.ensure_future(self.__write())
)
def close(self):
for t in self.task_list:
t.cancel()
if self.file:
try:
self.file.flush()
self.file.close()
self.file = None
except ValueError:
print('file already closed')
async def save_file_loop(self):
while True:
if self.save_interval > 0:
await asyncio.sleep(
time_to_next(self.save_interval)
)
self.save_now = True
else:
self.save_now = True
await asyncio.sleep(1)
# async def add_data(df):
# while True:
# entry = {
# 'DateTime': datetime.utcnow().isoformat(timespec='seconds'),
# 'Data': {'T': 25.0, 'RH': 60.0},
# }
# df.append(entry)
# # print(df)
# await asyncio.sleep(1)
# def shutdown():
# df.close()
# tasks = asyncio.Task.all_tasks()
# for t in tasks:
# t.cancel()
# print("Tasks canceled")
# asyncio.get_event_loop().stop()
# if __name__ == "__main__":
# event_loop = asyncio.get_event_loop()
# config = {
# 'base_path': '/home/horton/derek/tmp',
# 'instrument_class': 'env_sensor',
# 'instrument_name': 'THSMPS',
# 'write_freq': 2,
# }
# df = DataFile(config)
# task = asyncio.ensure_future(add_data(df))
# task_list = asyncio.Task.all_tasks()
# #
# try:
# event_loop.run_until_complete(asyncio.wait(task_list))
# # event_loop.run_forever()
# except KeyboardInterrupt:
# print('closing client')
# # client.close()
# # event_loop.run_until_complete(client.wait_closed())
# shutdown()
# # for task in task_list:
# # print("cancel task")
# # task.cancel()
# # server.close()
# event_loop.run_forever()
# # event_loop.run_until_complete(asyncio.wait(asyncio.ensure_future(shutdown)))
# finally:
# print('closing event loop')
# event_loop.close()
| 25.737778
| 88
| 0.520808
|
8f6d7e6d2b48ebc4efe1939ee9cc5634eb0be37b
| 84
|
py
|
Python
|
data/__init__.py
|
saralajew/robustNPCs
|
41f2ce5618003a10db11601cded71ec3ba538bcc
|
[
"BSD-3-Clause"
] | 1
|
2020-11-02T09:05:44.000Z
|
2020-11-02T09:05:44.000Z
|
data/__init__.py
|
saralajew/robust_NPCs
|
41f2ce5618003a10db11601cded71ec3ba538bcc
|
[
"BSD-3-Clause"
] | null | null | null |
data/__init__.py
|
saralajew/robust_NPCs
|
41f2ce5618003a10db11601cded71ec3ba538bcc
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .data import *
| 16.8
| 38
| 0.702381
|
be5818e03c31a5c6888bf8eb6d2a46b1ebf33cc5
| 130
|
py
|
Python
|
collection/exceptions/__init__.py
|
Chansazm/Project_20_Money_Transfer_App
|
f215c9f6fa4004ec44bc9009d717adaede4d98a9
|
[
"MIT"
] | null | null | null |
collection/exceptions/__init__.py
|
Chansazm/Project_20_Money_Transfer_App
|
f215c9f6fa4004ec44bc9009d717adaede4d98a9
|
[
"MIT"
] | null | null | null |
collection/exceptions/__init__.py
|
Chansazm/Project_20_Money_Transfer_App
|
f215c9f6fa4004ec44bc9009d717adaede4d98a9
|
[
"MIT"
] | null | null | null |
__all__ = [
'api_exception',
'token_post_401_application_json_response_exception',
'error_reason_error_exception',
]
| 26
| 58
| 0.753846
|
0450d502fea094ae410d1f006696b90e63e2f0bd
| 5,930
|
py
|
Python
|
src/sos/targets_python.py
|
pgcudahy/sos
|
ee902841003c7630db501101038f370650955ef9
|
[
"BSD-3-Clause"
] | 90
|
2019-06-25T15:33:22.000Z
|
2022-03-31T03:50:58.000Z
|
src/sos/targets_python.py
|
pgcudahy/sos
|
ee902841003c7630db501101038f370650955ef9
|
[
"BSD-3-Clause"
] | 223
|
2019-06-24T13:47:38.000Z
|
2022-03-29T21:37:10.000Z
|
src/sos/targets_python.py
|
pgcudahy/sos
|
ee902841003c7630db501101038f370650955ef9
|
[
"BSD-3-Clause"
] | 26
|
2019-06-27T19:24:23.000Z
|
2022-03-24T03:41:37.000Z
|
#!/usr/bin/env python3
#
# Copyright (c) Bo Peng and the University of Texas MD Anderson Cancer Center
# Distributed under the terms of the 3-clause BSD License.
from .targets import BaseTarget
from .utils import env, textMD5
class Py_Module(BaseTarget):
'''A target for a Python module.'''
LIB_STATUS_CACHE = {}
def __init__(self, module, version=None, autoinstall=False):
super(Py_Module, self).__init__()
if not isinstance(module, str):
raise ValueError('A string is expected for module name.')
self._module = module.strip()
self._version = version.strip() if isinstance(version, str) else version
for opt in ('==', '>=', '>', '<=', '<', '!='):
if opt in self._module:
if self._version is not None:
raise ValueError(
f"Specifying 'version=' option in addition to '{module}' is not allowed"
)
self._module, self._version = [
x.strip() for x in self._module.split(opt, 1)
]
if ',' in self._version:
raise ValueError(
f'SoS does not yet support multiple version comparisons. {self._mdoule} provided'
)
self._version = opt + self._version
break
self._autoinstall = autoinstall
def _install(self, name, autoinstall):
'''Check existence of Python module and install it using command
pip install if necessary.'''
import importlib
import pkg_resources
spam_spec = importlib.util.find_spec(name)
reinstall = False
if spam_spec is not None:
if self._version:
mod = importlib.__import__(name)
if hasattr(mod, '__version__'):
ver = mod.__version__
else:
try:
ver = pkg_resources.get_distribution(name).version
except Exception as e:
env.logger.debug(
f'Failed to get version of {name}: {e}')
env.logger.debug(
f'Comparing exiting version {ver} against requested version {self._version}'
)
if self._version.startswith(
'==') and pkg_resources.parse_version(
ver) == pkg_resources.parse_version(
self._version[2:]):
pass
elif self._version.startswith(
'<=') and pkg_resources.parse_version(
ver) <= pkg_resources.parse_version(
self._version[2:]):
pass
elif self._version.startswith(
'<') and not self._version.startswith(
'<=') and pkg_resources.parse_version(
ver) < pkg_resources.parse_version(
self._version[1:]):
pass
elif self._version.startswith(
'>=') and pkg_resources.parse_version(
ver) >= pkg_resources.parse_version(
self._version[2:]):
pass
# the case of >
elif self._version.startswith(
'>') and not self._version.startswith(
'>=') and pkg_resources.parse_version(
ver) > pkg_resources.parse_version(
self._version[1:]):
pass
elif self._version.startswith(
'!=') and pkg_resources.parse_version(
ver) != pkg_resources.parse_version(
self._version[2:]):
pass
elif self._version[0] not in (
'=', '>', '<', '!') and pkg_resources.parse_version(
ver) == pkg_resources.parse_version(self._version):
pass
else:
env.logger.warning(
f'Version {ver} of installed {name} does not match specified version {self._version}.'
)
reinstall = True
if spam_spec and not reinstall:
return True
if not autoinstall:
return False
# try to install it?
import subprocess
cmd = ['pip', 'install'] + ([] if self._version else ['-U']) + [
self._module + (self._version if self._version else '')
if self._autoinstall is True else self._autoinstall
]
env.logger.info(
f'Installing python module {name} with command {" ".join(cmd)}')
ret = subprocess.call(cmd)
if reinstall:
import sys
importlib.reload(sys.modules[name])
# try to check version
return ret == 0 and self._install(name, False)
def target_exists(self, mode='any'):
if (self._module, self._version) in self.LIB_STATUS_CACHE:
return self.LIB_STATUS_CACHE[(self._module, self._version)]
else:
ret = self._install(self._module, self._autoinstall)
self.LIB_STATUS_CACHE = {
x: y
for x, y in self.LIB_STATUS_CACHE.items()
if x[0] != self._module
}
self.LIB_STATUS_CACHE[(self._module, self._version)] = ret
return ret
def target_name(self):
return self._module
def target_signature(self, mode='any'):
# we are supposed to get signature of the module, but we cannot
return textMD5('Python module ' + self._module)
| 42.661871
| 110
| 0.498145
|
7ae411aaa716d9eac6096c34418f5393cbfa7480
| 2,421
|
py
|
Python
|
tests/test_config.py
|
shuohan/singleton-config
|
e7a38006dbc50f10afadeea57274826ca4233e82
|
[
"MIT"
] | null | null | null |
tests/test_config.py
|
shuohan/singleton-config
|
e7a38006dbc50f10afadeea57274826ca4233e82
|
[
"MIT"
] | null | null | null |
tests/test_config.py
|
shuohan/singleton-config
|
e7a38006dbc50f10afadeea57274826ca4233e82
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from enum import IntEnum
from singleton_config import Config as Config_
class Mode(IntEnum):
M1 = 1
M2 = 2
class Config(Config_):
def __init__(self):
super().__init__()
self.add_config('a', 1)
self.add_config('b', '2')
self.add_config('cc', Mode.M1, True)
def _save_a(self):
return str(self.a)
def _load_a(self, value):
self.a = int(value)
def _set_cc(self, mode):
if isinstance(mode, Mode):
self._cc = mode
elif isinstance(mode, int):
self._cc = Mode(mode)
elif isinstance(mode, str):
self._cc = Mode[mode]
class Config2(Config_):
def __init__(self):
super().__init__()
self.add_config('cc', Mode.M1, True)
def test_config():
config1 = Config()
assert config1.a == 1
assert config1.b == '2'
assert config1.cc is Mode.M1
assert config1.has_config('cc')
try:
config1.d = 10
assert False
except RuntimeError:
assert True
config1.a = 100
config2 = Config()
assert config2.a == 100
assert id(config1) == id(config2)
print(config2)
heading = '%s.Config:' % __name__
message = '\n'.join([heading,
' a: 100',
' b: 2',
' cc: Mode.M1'])
assert config1.__str__() == message
filename = 'test.json'
config2.cc = Mode.M2
config2.save_json(filename)
config2.a = 1
config3 = Config()
assert id(config2) == id(config3)
config3.add_config('d', 'hello world')
tmp = {'a': '1', 'b': '2', 'cc': Mode.M2, 'd': 'hello world'}
assert config2.save_dict() == tmp
with open(filename) as jfile:
lines = jfile.readlines()
tmp = ['{\n', ' "a": "100",\n',' "b": "2",\n', ' "cc": 2\n', '}']
assert lines == tmp
assert config1.a == 1
config1.load_json(filename)
assert config1.a == 100
assert config1.b == '2'
assert config1.cc is Mode.M2
assert config1.d == 'hello world'
os.remove(filename)
config = Config2()
assert config.cc is Mode.M1
try:
config.cc = Mode.M2
assert False
except AttributeError as e:
assert str(e) == 'can\'t set attribute'
print('all successful')
if __name__ == '__main__':
test_config()
| 23.057143
| 78
| 0.551838
|
ddc7c707439dbda5e3eb4832f231e3480716f2f1
| 2,672
|
py
|
Python
|
class-1/lsa.py
|
Xiaoyu-Xing/learning-nlp
|
804a32b99aa45a59ac852ea7e1d3d5366f638256
|
[
"MIT"
] | null | null | null |
class-1/lsa.py
|
Xiaoyu-Xing/learning-nlp
|
804a32b99aa45a59ac852ea7e1d3d5366f638256
|
[
"MIT"
] | null | null | null |
class-1/lsa.py
|
Xiaoyu-Xing/learning-nlp
|
804a32b99aa45a59ac852ea7e1d3d5366f638256
|
[
"MIT"
] | null | null | null |
import nltk
import numpy as np
import matplotlib.pyplot as plt
from nltk.stem import WordNetLemmatizer
from sklearn.decomposition import TruncatedSVD
wordnet_lemmatizer = WordNetLemmatizer()
titles = [line.rstrip() for line in open('../dataset/books/all_book_titles.txt')]
stopwords = set(w.rstrip() for w in open('../dataset/books/stopwords.txt'))
stopwords = stopwords.union({
'introduction', 'edition', 'series', 'application',
'approach', 'card', 'access', 'package', 'plus', 'etext',
'brief', 'vol', 'fundamental', 'guide', 'essential', 'printed',
'third', 'second', 'fourth', })
def my_tokenizer(s):
s = s.lower() # downcase
tokens = nltk.tokenize.word_tokenize(s) # split string into words (tokens)
tokens = [t for t in tokens if len(t) > 2] # remove short words, they're probably not useful
tokens = [wordnet_lemmatizer.lemmatize(t) for t in tokens] # put words into base form
tokens = [t for t in tokens if t not in stopwords] # remove stopwords
tokens = [t for t in tokens if not any(c.isdigit() for c in t)] # remove any digits, i.e. "3rd edition"
return tokens
word_index_map = {}
current_index = 0
all_tokens = []
all_titles = []
index_word_map = []
error_count = 0
for title in titles:
try:
title = title.encode('ascii', 'ignore').decode('utf-8') # this will throw exception if bad characters
all_titles.append(title)
tokens = my_tokenizer(title)
all_tokens.append(tokens)
for token in tokens:
if token not in word_index_map:
word_index_map[token] = current_index
current_index += 1
index_word_map.append(token)
except Exception as e:
print(e)
print(title)
error_count += 1
print("Number of errors parsing file:", error_count, "number of lines in file:", len(titles))
if error_count == len(titles):
print("There is no data to do anything with! Quitting...")
exit()
# now let's create our input matrices - just indicator variables for this example - works better than proportions
def tokens_to_vector(tokens):
x = np.zeros(len(word_index_map))
for t in tokens:
i = word_index_map[t]
x[i] = 1
return x
N = len(all_tokens)
D = len(word_index_map)
X = np.zeros((D, N)) # terms will go along rows, documents along columns
i = 0
for tokens in all_tokens:
X[:,i] = tokens_to_vector(tokens)
i += 1
def main():
svd = TruncatedSVD()
Z = svd.fit_transform(X)
plt.scatter(Z[:,0], Z[:,1])
for i in range(D):
plt.annotate(s=index_word_map[i], xy=(Z[i,0], Z[i,1]))
plt.show()
if __name__ == '__main__':
main()
| 32.987654
| 113
| 0.654192
|
d38a0dc031315fb8b6ba2328ec22f75db1003afa
| 1,552
|
py
|
Python
|
mpld3/tests/test_figure.py
|
odidev/mpld3
|
7bb37b9a9e6d1c90dd9f83075d1987541bb087d5
|
[
"BSD-3-Clause"
] | 1,101
|
2016-04-20T18:35:09.000Z
|
2022-03-31T10:52:22.000Z
|
mpld3/tests/test_figure.py
|
odidev/mpld3
|
7bb37b9a9e6d1c90dd9f83075d1987541bb087d5
|
[
"BSD-3-Clause"
] | 172
|
2016-04-20T18:19:21.000Z
|
2022-03-16T13:44:08.000Z
|
mpld3/tests/test_figure.py
|
odidev/mpld3
|
7bb37b9a9e6d1c90dd9f83075d1987541bb087d5
|
[
"BSD-3-Clause"
] | 195
|
2016-04-21T17:39:12.000Z
|
2022-03-16T05:10:09.000Z
|
"""
Test creation of a figure
"""
import matplotlib.pyplot as plt
from .. import fig_to_dict
from numpy.testing import assert_equal
def test_basic_figure():
size = (8, 6)
dpi = 80
fig = plt.figure(figsize=size, dpi=dpi)
rep = fig_to_dict(fig)
plt.close(fig)
assert_equal(list(sorted(rep.keys())),
['axes', 'data', 'height', 'id', 'plugins', 'width'])
assert_equal(rep['width'], size[0] * dpi)
assert_equal(rep['height'], size[1] * dpi)
assert_equal(rep['data'], {})
assert_equal(rep['axes'], [])
def test_axes():
bbox = [0.1, 0.1, 0.8, 0.8]
xlim = [-10, 10]
ylim = [-20, 20]
fig = plt.figure()
ax = fig.add_axes(bbox)
ax.set_xlim(xlim)
ax.set_ylim(ylim)
rep = fig_to_dict(fig)
axrep = rep['axes'][0]
assert_equal(list(sorted(axrep.keys())),
['axes', 'axesbg', 'axesbgalpha', 'bbox', 'collections',
'id', 'images', 'lines', 'markers', 'paths', 'sharex',
'sharey', 'texts', 'xdomain', 'xlim', 'xscale', 'ydomain',
'ylim', 'yscale', 'zoomable'])
for key in ['collections', 'images', 'lines', 'markers', 'paths', 'texts']:
assert_equal(axrep[key], [])
for key in ['xlim', 'xdomain']:
assert_equal(axrep[key], xlim)
for key in ['ylim', 'ydomain']:
assert_equal(axrep[key], ylim)
for key in ['xscale', 'yscale']:
assert_equal(axrep[key], 'linear')
assert_equal(axrep['zoomable'], True)
assert_equal(axrep['bbox'], bbox)
| 26.758621
| 79
| 0.563144
|
544343605fb507db05d07f76bbdd04435946066a
| 939
|
py
|
Python
|
APICanalLuciernaga/programacion/models.py
|
YarlingB/canal-luciernaga
|
a90ff4115bfd4c050c9b9b507c8a6df18a4dad5e
|
[
"MIT"
] | null | null | null |
APICanalLuciernaga/programacion/models.py
|
YarlingB/canal-luciernaga
|
a90ff4115bfd4c050c9b9b507c8a6df18a4dad5e
|
[
"MIT"
] | 12
|
2020-02-12T01:04:09.000Z
|
2022-02-10T12:22:14.000Z
|
APICanalLuciernaga/programacion/models.py
|
YarlingB/canal-luciernaga
|
a90ff4115bfd4c050c9b9b507c8a6df18a4dad5e
|
[
"MIT"
] | 3
|
2019-07-04T19:51:17.000Z
|
2019-07-30T17:23:13.000Z
|
from django.db import models
from noticias.models import Categoria
# Create your models here.
class Programacion(models.Model):
titulo = models.CharField(max_length = 225)
link = models.URLField(null=True,blank=True)
fecha = models.DateField()
def __str__(self):
return self.titulo
class Meta:
verbose_name = "Programación"
verbose_name_plural = "Programaciones"
class HoraProgramacion(models.Model):
programacion = models.ForeignKey(Programacion, on_delete = models.CASCADE)
titulo = models.CharField(max_length = 225)
categoria_p = models.ForeignKey(Categoria,on_delete=models.DO_NOTHING,null=True,verbose_name='Categoría')
descripcion = models.CharField("Descripción", max_length=200, blank=True, null=True)
hora_inicio = models.TimeField()
hora_fin = models.TimeField()
class Meta:
verbose_name ="Hora Programación"
verbose_name_plural ="Horas de programacion"
def __str__(self):
return self.titulo
| 29.34375
| 106
| 0.776358
|
709a0647a6550b75c591cc4264598a87951bd5aa
| 834
|
py
|
Python
|
mockbcrypt/plugin.py
|
boneyao/mock-bcrypt
|
669bf7257974b85404fcccf40b5bd865607f1e7f
|
[
"MIT"
] | null | null | null |
mockbcrypt/plugin.py
|
boneyao/mock-bcrypt
|
669bf7257974b85404fcccf40b5bd865607f1e7f
|
[
"MIT"
] | null | null | null |
mockbcrypt/plugin.py
|
boneyao/mock-bcrypt
|
669bf7257974b85404fcccf40b5bd865607f1e7f
|
[
"MIT"
] | null | null | null |
import logging
import os
from mock import patch
from nose.plugins import Plugin
log = logging.getLogger('nose.plugins.mockbcrypt')
class MockBcryptPlugin(Plugin):
name = 'mockbcrypt'
def options(self, parser, env=os.environ):
super(MockBcryptPlugin, self).options(parser, env=env)
def configure(self, options, conf):
super(MockBcryptPlugin, self).configure(options, conf)
if not self.enabled:
return
encrypt_patcher = patch('passlib.hash.bcrypt.encrypt')
encrypt = encrypt_patcher.start()
encrypt.side_effect = lambda x: x
verify_patcher = patch('passlib.hash.bcrypt.verify')
verify = verify_patcher.start()
verify.side_effect = lambda x, y: x == y
def finalize(self, result):
log.info('MockBcrypt pluginized world!')
| 27.8
| 62
| 0.673861
|
d2990af46ea36fca13048e3ba55dcdb3ed9129d7
| 1,153
|
py
|
Python
|
November/Week3/Range Sum of BST.py
|
vinaykumar7686/Leetcode-August_Challenge
|
fe1928d8b10a63d7aa561118a70eeaec2f3a2f36
|
[
"MIT"
] | 1
|
2020-08-02T13:41:38.000Z
|
2020-08-02T13:41:38.000Z
|
November/Week3/Range Sum of BST.py
|
vinaykumar7686/Leetcode-August_Challenge
|
fe1928d8b10a63d7aa561118a70eeaec2f3a2f36
|
[
"MIT"
] | null | null | null |
November/Week3/Range Sum of BST.py
|
vinaykumar7686/Leetcode-August_Challenge
|
fe1928d8b10a63d7aa561118a70eeaec2f3a2f36
|
[
"MIT"
] | null | null | null |
# Range Sum of BST
'''
Given the root node of a binary search tree, return the sum of values of all nodes with a value in the range [low, high].
Example 1:
Input: root = [10,5,15,3,7,null,18], low = 7, high = 15
Output: 32
Example 2:
Input: root = [10,5,15,3,7,13,18,1,null,6], low = 6, high = 10
Output: 23
Constraints:
The number of nodes in the tree is in the range [1, 2 * 104].
1 <= Node.val <= 105
1 <= low <= high <= 105
All Node.val are unique.
'''
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def rangeSumBST(self, root: TreeNode, low: int, high: int) -> int:
if not root:
return 0
if root.val>=low and root.val<=high:
return root.val+self.rangeSumBST(root.left, low, high)+self.rangeSumBST(root.right, low, high)
elif root.val>=low:
return self.rangeSumBST(root.left, low, high)
elif root.val<=high:
return self.rangeSumBST(root.right, low, high)
else:
return 0
| 25.065217
| 121
| 0.607112
|
b7edfb5ae46312138af4155c4a27207e0bc6b53a
| 3,801
|
py
|
Python
|
tests/core/test_walltime.py
|
intdata-bsc/idact
|
54cb65a711c145351e205970c27c83e6393cccf5
|
[
"MIT"
] | 5
|
2018-12-06T15:40:34.000Z
|
2019-06-19T11:22:58.000Z
|
tests/core/test_walltime.py
|
garstka/idact
|
b9c8405c94db362c4a51d6bfdf418b14f06f0da1
|
[
"MIT"
] | 9
|
2018-12-06T16:35:26.000Z
|
2019-04-28T19:01:40.000Z
|
tests/core/test_walltime.py
|
intdata-bsc/idact
|
54cb65a711c145351e205970c27c83e6393cccf5
|
[
"MIT"
] | 2
|
2019-04-28T19:18:58.000Z
|
2019-06-17T06:56:28.000Z
|
"""Tests for :class:`.Walltime`."""
import pytest
from idact.core.walltime import Walltime
def test_walltime_values():
"""Basic walltime values."""
walltime = Walltime(days=1, hours=2, minutes=3, seconds=4)
assert walltime.days == 1
assert walltime.hours == 2
assert walltime.minutes == 3
assert walltime.seconds == 4
def test_walltime_must_not_be_zero():
"""Zero walltime is not allowed."""
with pytest.raises(ValueError):
Walltime()
def test_walltime_days():
"""Values for days parameter."""
with pytest.raises(ValueError):
Walltime(days=-1)
assert Walltime(days=10).days == 10
def test_walltime_hours():
"""Values for hours parameter."""
with pytest.raises(ValueError):
Walltime(hours=-1)
assert Walltime(hours=10).hours == 10
with pytest.raises(ValueError):
Walltime(hours=24)
def test_walltime_minutes():
"""Values for minutes parameter."""
with pytest.raises(ValueError):
Walltime(minutes=-1)
assert Walltime(minutes=10).minutes == 10
with pytest.raises(ValueError):
Walltime(minutes=60)
def test_walltime_seconds():
"""Values for seconds parameter."""
with pytest.raises(ValueError):
Walltime(seconds=-1)
assert Walltime(seconds=10).seconds == 10
with pytest.raises(ValueError):
Walltime(seconds=60)
def test_walltime_from_string():
"""Values for walltime conversion from string."""
walltime_1 = Walltime.from_string('1:02:03')
assert walltime_1.days == 0
assert walltime_1.hours == 1
assert walltime_1.minutes == 2
assert walltime_1.seconds == 3
walltime_2 = Walltime.from_string('4-1:02:03')
assert walltime_2.days == 4
assert walltime_2.hours == 1
assert walltime_2.minutes == 2
assert walltime_2.seconds == 3
walltime_3 = Walltime.from_string('4-05:02:03')
assert walltime_3.days == 4
assert walltime_3.hours == 5
assert walltime_3.minutes == 2
assert walltime_3.seconds == 3
walltime_4 = Walltime.from_string('400-12:30:59')
assert walltime_4.days == 400
assert walltime_4.hours == 12
assert walltime_4.minutes == 30
assert walltime_4.seconds == 59
with pytest.raises(ValueError):
Walltime.from_string('0:00:00')
with pytest.raises(ValueError):
Walltime.from_string('-1:02:03')
with pytest.raises(ValueError):
Walltime.from_string('1:02: 03')
with pytest.raises(ValueError):
Walltime.from_string('02:03')
with pytest.raises(ValueError):
Walltime.from_string('3')
with pytest.raises(ValueError):
Walltime.from_string('3-4')
with pytest.raises(ValueError):
Walltime.from_string('3-4:05')
with pytest.raises(ValueError):
Walltime.from_string('1:2:3')
with pytest.raises(ValueError):
Walltime.from_string('abc')
def test_walltime_to_string():
assert str(Walltime(days=0,
hours=1,
minutes=2,
seconds=3)) == '01:02:03'
assert str(Walltime(days=1,
hours=2,
minutes=3,
seconds=4)) == '1-02:03:04'
assert str(Walltime(days=123,
hours=4,
minutes=5,
seconds=6)) == '123-04:05:06'
assert str(Walltime(days=123,
hours=10,
minutes=20,
seconds=30)) == '123-10:20:30'
assert str(Walltime(days=123,
hours=0,
minutes=0,
seconds=0)) == '123-00:00:00'
walltime = Walltime(days=1, hours=2, minutes=3, seconds=4)
assert str(walltime) == repr(walltime)
| 29.929134
| 62
| 0.60563
|
a5863760c4d946d644ace5882684c6bcb8f4b3ee
| 2,283
|
py
|
Python
|
demo/messaging/demo_ping_pong.py
|
keszybz/pelita
|
081329340b61e73436fb860f66882ffa47d0009e
|
[
"BSD-2-Clause"
] | null | null | null |
demo/messaging/demo_ping_pong.py
|
keszybz/pelita
|
081329340b61e73436fb860f66882ffa47d0009e
|
[
"BSD-2-Clause"
] | null | null | null |
demo/messaging/demo_ping_pong.py
|
keszybz/pelita
|
081329340b61e73436fb860f66882ffa47d0009e
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from datetime import datetime
from pelita.messaging import DispatchingActor, expose, actor_of, actor_registry, StopProcessing, RemoteConnection
class Ping(DispatchingActor):
def on_start(self):
self.pong = None
self.pings_left = 2000
@expose
def connect(self, actor_uuid):
if self.ref.remote:
self.pong = self.ref.remote.create_proxy(actor_uuid)
else:
self.pong = actor_registry.get_by_uuid(actor_uuid)
@expose
def Start(self):
print "Ping: Starting"
self.pong.notify("Ping", channel=self.ref)
self.pings_left -= 1
@expose
def SendPing(self):
self.pong.notify("Ping", channel=self.ref)
self.pings_left -= 1
@expose
def Pong(self):
if self.pings_left % 100 == 0:
print "Ping: pong from: " + str(self.ref.channel)
if self.pings_left > 0:
self.ref.notify("SendPing")
else:
print "Ping: Stop."
self.pong.notify("Stop", channel=self.ref)
self.ref.put(StopProcessing)
class Pong(DispatchingActor):
def on_start(self):
self.pong_count = 0
self.old_time = datetime.now()
@expose
def Ping(self):
if self.pong_count % 100 == 0:
delta = datetime.now() - self.old_time
self.old_time = datetime.now()
print "Pong: ping " + str(self.pong_count) + " from " + str(self.ref.channel) + \
str(delta.seconds) + "." + str(delta.microseconds // 1000)
self.ref.channel.notify("Pong", channel=self.ref)
self.pong_count += 1
@expose
def Stop(self):
print "Pong: Stop."
self.ref.put(StopProcessing)
import logging
#logging.basicConfig()
remote = True
if remote:
remote = RemoteConnection().start_listener("localhost", 0)
remote.register("ping", actor_of(Ping))
remote.start_all()
port = remote.listener.socket.port
ping = RemoteConnection().actor_for("ping", "localhost", port)
else:
ping = actor_of(Ping)
ping.start()
pong = actor_of(Pong)
pong.start()
ping.notify("connect", [pong.uuid])
ping.notify("Start")
pong.join()
if remote:
remote.stop()
else:
ping.stop()
pong.stop()
| 24.031579
| 113
| 0.60841
|
a8533c91c633fe80e7c776d7e08dfa0f0180de27
| 2,105
|
py
|
Python
|
python/cm/api_context.py
|
arenadata/adcm
|
a499caa30adc2a53e7b3f46c96a865f9e4079e4e
|
[
"Apache-2.0"
] | 16
|
2019-11-28T18:05:21.000Z
|
2021-12-08T18:09:18.000Z
|
python/cm/api_context.py
|
arenadata/adcm
|
a499caa30adc2a53e7b3f46c96a865f9e4079e4e
|
[
"Apache-2.0"
] | 1,127
|
2019-11-29T08:57:25.000Z
|
2022-03-31T20:21:32.000Z
|
python/cm/api_context.py
|
arenadata/adcm
|
a499caa30adc2a53e7b3f46c96a865f9e4079e4e
|
[
"Apache-2.0"
] | 10
|
2019-11-28T18:05:06.000Z
|
2022-01-13T06:16:40.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
API methods could be called from web and from ansible,
so some things should be done in different ways. Also it's painful to fetch or calc some common
things in every API method to pass it down in call stack, So it's better off to get and keep them
here during WSGI call or Ansible run
Implemented as singleton module, just `import ctx from cm.api_context` and use `ctx.*` when needed
"""
import os
from pathlib import Path
from typing import Optional
from cm import models
from cm.logger import log
from cm.status_api import Event
class _Context:
"""Common context for API methods calls"""
def __init__(self):
self.event = Event()
self.task: Optional[models.TaskLog] = None
self.job: Optional[models.JobLog] = None
self.lock: Optional[models.ConcernItem] = None
self.get_job_data()
def get_job_data(self):
env = os.environ
ansible_config = env.get('ANSIBLE_CONFIG')
if not ansible_config:
return
job_id = Path(ansible_config).parent.name
try:
self.job = models.JobLog.objects.select_related('task', 'task__lock').get(
id=int(job_id)
)
except (ValueError, models.ObjectDoesNotExist):
return
self.task = getattr(self.job, 'task', None)
self.lock = getattr(self.task, 'lock', None)
msg = f'API context was initialized with {self.job}, {self.task}, {self.lock}'
log.debug(msg)
# initialized on first import
ctx = None
if not ctx:
ctx = _Context()
| 32.384615
| 98
| 0.686461
|
ab2fdcd30fc354431069de20650ba9e262ca6a1a
| 2,571
|
py
|
Python
|
UserModel/pcauth_views.py
|
CryptoCompetition2019-RNG/AuthServer
|
c22e2b13af2cc51f62fdc55e3f682eb344d4fbcb
|
[
"Apache-2.0"
] | null | null | null |
UserModel/pcauth_views.py
|
CryptoCompetition2019-RNG/AuthServer
|
c22e2b13af2cc51f62fdc55e3f682eb344d4fbcb
|
[
"Apache-2.0"
] | 10
|
2020-06-05T23:28:04.000Z
|
2022-03-12T00:02:52.000Z
|
UserModel/pcauth_views.py
|
CryptoCompetition2019-RNG/AuthServer
|
c22e2b13af2cc51f62fdc55e3f682eb344d4fbcb
|
[
"Apache-2.0"
] | null | null | null |
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_exempt
from AuthServer.method import json_response_zh, get_json_ret, decrypt_ecb, encrypt_ecb
from Crypto.Util.number import long_to_bytes
from .models import UserModel
@csrf_exempt
@require_POST
def pcauth_api1(request):
"""
PC 端验证口令的第一步
:param request: 一个有效的请求应该包含形如以下的 POST 数据:
{"data": sm4_{DH_key}( id.ljust(64, '\x00') ) + sm4_{salt}( hex(r1) ) }
"""
data = long_to_bytes(int(request.data, 16))
if len(data) != 64 * 2:
return json_response_zh(get_json_ret(41))
user_name = decrypt_ecb(request.DH_key, data[:64]).decode()
user = UserModel.objects.filter(user_name=user_name).first()
if user is None:
return json_response_zh(get_json_ret(41))
user.random_value1 = decrypt_ecb(user.get_salt_sm4_key(), data[64:]).decode()
user.save()
return json_response_zh(get_json_ret(0))
@csrf_exempt
@require_POST
def pcauth_api2(request):
"""
pc 端验证口令的第二步,PC 端不断请求服务器,查看第一步是否完成
:param request: 一个正常的请求应该包含如下的 POST 数据:
{"data": sm4_{DH_key}( id.ljust(64, '\x00') )}
:return:
"""
data = long_to_bytes(int(request.data, 16))
if len(data) != 64:
return json_response_zh(get_json_ret(41))
user_name = decrypt_ecb(request.DH_key, data).decode()
user = UserModel.objects.filter(user_name=user_name).first()
if user is None:
return json_response_zh(get_json_ret(41))
if user.random_value1 is None:
return json_response_zh(get_json_ret(42))
request.session['user_name'] = user_name
ret_data = encrypt_ecb(request.DH_key, (user.random_value1 + user.A_pwd).encode())
return json_response_zh(get_json_ret(0, data=ret_data.hex()))
@csrf_exempt
@require_POST
def pcauth_api3(request):
"""
PC 端验证口令的第三步
:param request: 一个有效的请求应该包含形如以下的 POST 数据
{"data": sm4_{DH_key}( hex(r1) + B_pwd* )}
:return: B_pwd* 与 B_pwd 是否相等
"""
data = long_to_bytes(int(request.data, 16))
if len(data) != 64 * 2:
return json_response_zh(get_json_ret(41))
plain = decrypt_ecb(request.DH_key, data).decode()
if plain[:64] != request.user.random_value1:
return json_response_zh(get_json_ret(50, msg="随机数错误"))
assert isinstance(request.user, UserModel)
if plain[64:] != request.user.B_pwd:
return json_response_zh(get_json_ret(50))
request.user.random_value1 = None
request.user.login_status = True
request.user.save()
return json_response_zh(get_json_ret(0))
| 32.961538
| 86
| 0.694282
|
f5b100b6564b122297d403e7152dcf1debc77fbe
| 2,058
|
py
|
Python
|
pylearn2/scripts/datasets/make_cifar10_whitened.py
|
yingzha/pylearn2
|
9132584e906d4af0f0255a9d7e6bc160fbe11e65
|
[
"BSD-3-Clause"
] | null | null | null |
pylearn2/scripts/datasets/make_cifar10_whitened.py
|
yingzha/pylearn2
|
9132584e906d4af0f0255a9d7e6bc160fbe11e65
|
[
"BSD-3-Clause"
] | null | null | null |
pylearn2/scripts/datasets/make_cifar10_whitened.py
|
yingzha/pylearn2
|
9132584e906d4af0f0255a9d7e6bc160fbe11e65
|
[
"BSD-3-Clause"
] | null | null | null |
"""
This script makes a dataset of 32x32 approximately whitened CIFAR-10 images.
"""
from __future__ import print_function
from pylearn2.utils import serial
from pylearn2.datasets import preprocessing
from pylearn2.utils import string_utils
import numpy as np
from pylearn2.datasets.cifar10 import CIFAR10
data_dir = string_utils.preprocess('${PYLEARN2_DATA_PATH}/cifar10')
print('Loading CIFAR-10 train dataset...')
train = CIFAR10(which_set = 'train')
print("Preparing output directory...")
output_dir = data_dir + '/pylearn2_whitened'
serial.mkdir( output_dir )
README = open(output_dir + '/README','w')
README.write("""
The .pkl files in this directory may be opened in python using
cPickle, pickle, or pylearn2.serial.load.
train.pkl, and test.pkl each contain
a pylearn2 Dataset object defining a labeled
dataset of a 32x32 approximately whitened version of the STL-10
dataset. train.pkl contains labeled train examples. test.pkl
contains labeled test examples.
preprocessor.pkl contains a pylearn2 ZCA object that was used
to approximately whiten the images. You may want to use this
object later to preprocess other images.
They were created with the pylearn2 script make_cifar10_whitened.py.
All other files in this directory, including this README, were
created by the same script and are necessary for the other files
to function correctly.
""")
README.close()
print("Learning the preprocessor and preprocessing the unsupervised train data...")
preprocessor = preprocessing.ZCA()
train.apply_preprocessor(preprocessor = preprocessor, can_fit = True)
print('Saving the unsupervised data')
train.use_design_loc(output_dir+'/train.npy')
serial.save(output_dir + '/train.pkl', train)
print("Loading the test data")
test = CIFAR10(which_set = 'test')
print("Preprocessing the test data")
test.apply_preprocessor(preprocessor = preprocessor, can_fit = False)
print("Saving the test data")
test.use_design_loc(output_dir+'/test.npy')
serial.save(output_dir+'/test.pkl', test)
serial.save(output_dir + '/preprocessor.pkl',preprocessor)
| 30.716418
| 83
| 0.788144
|
85304c4699f5d0a89452771f343bb448e8a6a109
| 6,364
|
py
|
Python
|
Model.py
|
IshfaqMalik/ImageClassifier
|
672e2f0ae97bdb96720b62fb6ed013cbfc6e39fa
|
[
"MIT"
] | null | null | null |
Model.py
|
IshfaqMalik/ImageClassifier
|
672e2f0ae97bdb96720b62fb6ed013cbfc6e39fa
|
[
"MIT"
] | null | null | null |
Model.py
|
IshfaqMalik/ImageClassifier
|
672e2f0ae97bdb96720b62fb6ed013cbfc6e39fa
|
[
"MIT"
] | null | null | null |
import torch
from torch import nn, optim
from torchvision import models
from collections import OrderedDict
import torch.nn.functional as F
import time
import utils
import numpy as np
def network ( output_units, hidden_units, drop_p):
classifier = nn.Sequential(OrderedDict([
('fc1', nn.Linear(25088, hidden_units)),
('relu', nn.ReLU()),
('dropout', nn.Dropout(p = drop_p)),
('fc2', nn.Linear(hidden_units, output_units)),
('output', nn.LogSoftmax(dim=1))]))
return classifier
def train_network(model, trainloader, validloader,optimizer,criterion, epochs, gpu):
if gpu:
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
else:
device = "cpu"
model.to(device)
model.train()
# validation_step = True
print_every = 30
steps = 0
for epoch in range(epochs):
print("Epoch")
training_loss = 0
for ii, (inputs,labels) in enumerate(trainloader):
print("Step")
steps += 1
inputs, labels = inputs.to(device), labels.to(device)
optimizer.zero_grad()
logps = model.forward(inputs)
loss = criterion(logps, labels)
loss.backward()
optimizer.step()
training_loss = training_loss + loss.item()
#checking validation
if steps % print_every == 0:
valid_loss = 0
valid_accuracy = 0
model.eval()
with torch.no_grad():
for inputs, labels in validloader:
inputs, labels = inputs.to(device), labels.to(device)
logps = model.forward(inputs)
batch_loss = criterion(logps,labels)
valid_loss = valid_loss + batch_loss.item()
#checking accuracy
probs = torch.exp(logps)
top_p, top_class = probs.topk(1, dim = 1)
equals = top_class == labels.view(*top_class.shape)
valid_accuracy = valid_accuracy + torch.mean(equals.type(torch.FloatTensor)).item()
print('\nEpoch: {}/{} '.format(epoch + 1, epochs),
'\n Training:\n Loss: {:.4f} '.format(training_loss / print_every))
print("\n Validation:\n Loss: {:.4f} ".format(valid_loss / len(validloader)),
"Accuracy: {:.4f}".format(valid_accuracy / len(validloader)))
training_loss = 0
model.train()
def save_checkpoint(model,train_data,optimizer, save_dir,arch):
model_checkpoint = {'arch':arch,
'learning_rate': 0.003,
'batch_size': 64,
'classifier' : model.classifier,
'optimizer': optimizer.state_dict(),
'state_dict': model.state_dict(),
'class_to_idx': train_data.class_to_idx,}
if(save_dir == None): torch.save(model_checkpoint, 'checkpoint.pth')
else:torch.save(model_checkpoint, save_dir+'checkpoint.pth')
def load_model (file_path, gpu):
if gpu:
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
else:
device = "cpu"
checkpoint = torch.load(file_path)
learning_rate = checkpoint['learning_rate']
model = getattr(models,checkpoint['arch'])(pretrained = True)
model.classifier = checkpoint['classifier']
model.optimizer = checkpoint['optimizer']
model.load_state_dict(checkpoint['state_dict'])
model.class_to_idx = checkpoint['class_to_idx']
return model
def predict(image_path, model, topk):
cuda = torch.cuda.is_available()
if cuda:
# Move model parameters to the GPU
model.cuda()
print("Number of GPUs:", torch.cuda.device_count())
print("Device name:", torch.cuda.get_device_name(torch.cuda.device_count()-1))
else:
model.cpu()
print("We go for CPU")
# turn off dropout
model.eval()
# tranfer to tensor
image = torch.from_numpy(np.array([image_path])).float()
# The image becomes the input
if cuda:
image = image.cuda()
output = model.forward(image)
probabilities = torch.exp(output).data
# getting the topk (=5) probabilites and indexes
# 0 -> probabilities
# 1 -> index
prob = torch.topk(probabilities, topk)[0].tolist()[0] # probabilities
index = torch.topk(probabilities, topk)[1].tolist()[0] # index
ind = []
for i in range(len(model.class_to_idx.items())):
ind.append(list(model.class_to_idx.items())[i][0])
# transfer index to label
classes = []
for i in range(5):
classes.append(ind[index[i]])
return prob, classes
#Testing
def test_network (model,loader, criterion,gpu):
if gpu:
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
else:
device = "cpu"
test_loss = 0
test_accuracy = 0
model.eval()
for inputs, labels in loader:
inputs, labels = inputs.to(device), labels.to(device)
logps = model.forward(inputs)
loss = criterion(logps,labels)
test_loss = test_loss + loss.item()
probabilities = torch.exp(logps)
top_p, top_class = probabilities.topk(1, dim =1)
equals = top_class == labels.view(*top_class.shape)
test_accuracy = test_accuracy + torch.mean(equals.type(torch.FloatTensor)).item()
return test_loss/len(loader), test_accuracy/len(loader)
| 28.410714
| 107
| 0.520742
|
049277f880c2175ed3f968394651b503972b28b7
| 4,564
|
py
|
Python
|
py/ate/atet_fixture.py
|
aevri/phabricator-tools
|
ef7501bcaee83e98d168d16f64b3f73e744d3336
|
[
"Apache-2.0"
] | 150
|
2015-01-21T15:52:22.000Z
|
2021-11-09T05:53:36.000Z
|
py/ate/atet_fixture.py
|
aevri/phabricator-tools
|
ef7501bcaee83e98d168d16f64b3f73e744d3336
|
[
"Apache-2.0"
] | 72
|
2015-05-08T04:33:08.000Z
|
2017-01-27T09:37:36.000Z
|
py/ate/atet_fixture.py
|
aevri/phabricator-tools
|
ef7501bcaee83e98d168d16f64b3f73e744d3336
|
[
"Apache-2.0"
] | 38
|
2015-01-30T10:33:47.000Z
|
2021-11-09T05:53:30.000Z
|
"""Creates a fixture with Arcyd setup to perform tests."""
# =============================================================================
# CONTENTS
# -----------------------------------------------------------------------------
# atet_fixture
#
# Public Classes:
# Fixture
# .setup_arcyds
# .close
# .launch_debug_shell
# .repos
# .arcyds
# .repo_root_dir
#
# -----------------------------------------------------------------------------
# (this contents block is generated, edits will be lost)
# =============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shutil
import subprocess
import tempfile
import phldef_conduit
import phlsys_fs
import atet_arcyd_instance
import atet_sharedrepo
class Fixture(object):
def __init__(
self,
arcyd_command,
barc_command,
arcyon_command,
phab_uri,
repo_count,
arcyd_count,
alice,
bob):
if repo_count < 1:
raise(Exception("repo_count must be 1 or more, got {}".format(
repo_count)))
if arcyd_count < 1:
raise(Exception("arcyd_count must be 1 or more, got {}".format(
arcyd_count)))
self._root_dir = tempfile.mkdtemp()
self._repo_root_dir = os.path.join(self._root_dir, 'repos')
os.makedirs(self._repo_root_dir)
self._repos = []
for i in xrange(repo_count):
repo_path = os.path.join(self._repo_root_dir, 'repo-{}'.format(i))
os.makedirs(repo_path)
self._repos.append(
atet_sharedrepo.SharedRepo(
repo_path,
barc_command,
arcyon_command,
phab_uri,
alice,
bob))
self._arcyd_root_dir = os.path.join(self._root_dir, 'arcyds')
os.makedirs(self._arcyd_root_dir)
self._arcyds = []
for i in xrange(arcyd_count):
arcyd_path = os.path.join(
self._arcyd_root_dir, 'arcyd-{}'.format(i))
os.makedirs(arcyd_path)
self._arcyds.append(atet_arcyd_instance.ArcydInstance(
arcyd_path, arcyd_command))
def setup_arcyds(self, arcyd_user, arcyd_email, arcyd_cert, phab_uri):
for arcyd in self.arcyds:
arcyd(
'init',
'--arcyd-email',
arcyd_email,
'--max-workers',
'2',
'--sleep-secs',
'1')
arcyd(
'add-phabricator',
'--name', 'localphab',
'--instance-uri', phab_uri,
'--review-url-format', phldef_conduit.REVIEW_URL_FORMAT,
'--admin-emails', 'local-phab-admin@localhost',
'--arcyd-user', arcyd_user,
'--arcyd-cert', arcyd_cert)
repo_url_format = '{}/{{}}/central'.format(self.repo_root_dir)
arcyd(
'add-repohost',
'--name', 'localdir',
'--repo-url-format', repo_url_format)
def close(self):
for r in self._repos:
r.close()
shutil.rmtree(self._root_dir)
def launch_debug_shell(self):
with phlsys_fs.chdir_context(self._root_dir):
print("Launching debug shell, exit the shell to continue ...")
subprocess.call('bash')
@property
def repos(self):
return self._repos
@property
def arcyds(self):
return self._arcyds
@property
def repo_root_dir(self):
return self._repo_root_dir
# -----------------------------------------------------------------------------
# Copyright (C) 2015 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------ END-OF-FILE ----------------------------------
| 31.260274
| 79
| 0.521253
|
db40e3b10c3cf63c5f1a5934fa721fd71b8fe458
| 30,631
|
py
|
Python
|
BaseTools/Source/Python/Workspace/BuildClassObject.py
|
BenjaminYou/edk2
|
fd30b0070773ac4ac5f49abca8f5b3afbeece158
|
[
"Python-2.0",
"Zlib",
"BSD-2-Clause",
"MIT",
"BSD-2-Clause-Patent",
"BSD-3-Clause"
] | 3,861
|
2019-09-04T10:10:11.000Z
|
2022-03-31T15:46:28.000Z
|
BaseTools/Source/Python/Workspace/BuildClassObject.py
|
BenjaminYou/edk2
|
fd30b0070773ac4ac5f49abca8f5b3afbeece158
|
[
"Python-2.0",
"Zlib",
"BSD-2-Clause",
"MIT",
"BSD-2-Clause-Patent",
"BSD-3-Clause"
] | 494
|
2018-09-18T19:31:55.000Z
|
2022-03-30T16:52:52.000Z
|
BaseTools/Source/Python/Workspace/BuildClassObject.py
|
BenjaminYou/edk2
|
fd30b0070773ac4ac5f49abca8f5b3afbeece158
|
[
"Python-2.0",
"Zlib",
"BSD-2-Clause",
"MIT",
"BSD-2-Clause-Patent",
"BSD-3-Clause"
] | 654
|
2019-09-05T11:42:37.000Z
|
2022-03-30T02:42:32.000Z
|
## @file
# This file is used to define each component of the build database
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from collections import OrderedDict, namedtuple
from Common.DataType import *
import collections
import re
from collections import OrderedDict
from Common.Misc import CopyDict,ArrayIndex
import copy
import Common.EdkLogger as EdkLogger
from Common.BuildToolError import OPTION_VALUE_INVALID
from Common.caching import cached_property
StructPattern = re.compile(r'[_a-zA-Z][0-9A-Za-z_\[\]]*$')
## PcdClassObject
#
# This Class is used for PcdObject
#
# @param object: Inherited from object class
# @param Name: Input value for Name of Pcd, default is None
# @param Guid: Input value for Guid of Pcd, default is None
# @param Type: Input value for Type of Pcd, default is None
# @param DatumType: Input value for DatumType of Pcd, default is None
# @param Value: Input value for Value of Pcd, default is None
# @param Token: Input value for Token of Pcd, default is None
# @param MaxDatumSize: Input value for MaxDatumSize of Pcd, default is None
# @param SkuInfoList: Input value for SkuInfoList of Pcd, default is {}
# @param IsOverrided: Input value for IsOverrided of Pcd, default is False
# @param GuidValue: Input value for TokenSpaceGuidValue of Pcd, default is None
#
# @var TokenCName: To store value for TokenCName
# @var TokenSpaceGuidCName: To store value for TokenSpaceGuidCName
# @var Type: To store value for Type
# @var DatumType: To store value for DatumType
# @var TokenValue: To store value for TokenValue
# @var MaxDatumSize: To store value for MaxDatumSize
# @var SkuInfoList: To store value for SkuInfoList
# @var IsOverrided: To store value for IsOverrided
# @var Phase: To store value for Phase, default is "DXE"
#
class PcdClassObject(object):
def __init__(self, Name = None, Guid = None, Type = None, DatumType = None, Value = None, Token = None, MaxDatumSize = None, SkuInfoList = None, IsOverrided = False, GuidValue = None, validateranges = None, validlists = None, expressions = None, IsDsc = False, UserDefinedDefaultStoresFlag = False):
self.TokenCName = Name
self.TokenSpaceGuidCName = Guid
self.TokenSpaceGuidValue = GuidValue
self.Type = Type
self._DatumType = DatumType
self.DefaultValue = Value
self.TokenValue = Token
self.MaxDatumSize = MaxDatumSize
self.MaxSizeUserSet = None
self.SkuInfoList = SkuInfoList if SkuInfoList is not None else OrderedDict()
self.Phase = "DXE"
self.Pending = False
self.IsOverrided = IsOverrided
self.IsFromBinaryInf = False
self.IsFromDsc = False
self.validateranges = validateranges if validateranges is not None else []
self.validlists = validlists if validlists is not None else []
self.expressions = expressions if expressions is not None else []
self.DscDefaultValue = None
self.DscRawValue = {}
self.DscRawValueInfo = {}
if IsDsc:
self.DscDefaultValue = Value
self.PcdValueFromComm = ""
self.PcdValueFromFdf = ""
self.CustomAttribute = {}
self.UserDefinedDefaultStoresFlag = UserDefinedDefaultStoresFlag
self._Capacity = None
@property
def Capacity(self):
if self._Capacity is None:
self._Capacity = []
dimension = ArrayIndex.findall(self._DatumType)
for item in dimension:
maxsize = item.lstrip("[").rstrip("]").strip()
if not maxsize:
maxsize = "-1"
maxsize = str(int(maxsize,16)) if maxsize.startswith(("0x","0X")) else maxsize
self._Capacity.append(maxsize)
if hasattr(self, "SkuOverrideValues"):
for sku in self.SkuOverrideValues:
for defaultstore in self.SkuOverrideValues[sku]:
fields = self.SkuOverrideValues[sku][defaultstore]
for demesionattr in fields:
fieldinfo = fields[demesionattr]
deme = ArrayIndex.findall(demesionattr)
for i in range(len(deme)):
if int(deme[i].lstrip("[").rstrip("]").strip()) >= int(self._Capacity[i]):
if self._Capacity[i] != "-1":
firstfieldinfo = list(fieldinfo.values())[0]
EdkLogger.error('Build', OPTION_VALUE_INVALID, "For Pcd %s, Array Index exceed the Array size. From %s Line %s \n " %
(".".join((self.TokenSpaceGuidCName, self.TokenCName)), firstfieldinfo[1],firstfieldinfo[2] ))
if hasattr(self,"DefaultValues"):
for demesionattr in self.DefaultValues:
fieldinfo = self.DefaultValues[demesionattr]
deme = ArrayIndex.findall(demesionattr)
for i in range(len(deme)):
if int(deme[i].lstrip("[").rstrip("]").strip()) >= int(self._Capacity[i]):
if self._Capacity[i] != "-1":
firstfieldinfo = list(fieldinfo.values())[0]
EdkLogger.error('Build', OPTION_VALUE_INVALID, "For Pcd %s, Array Index exceed the Array size. From %s Line %s \n " %
(".".join((self.TokenSpaceGuidCName, self.TokenCName)), firstfieldinfo[1],firstfieldinfo[2] ))
return self._Capacity
def PcdArraySize(self):
if self.Capacity[-1] == "-1":
return -1
size = 1
for de in self.Capacity:
size = size * int(de)
return size
@property
def DatumType(self):
return self._DatumType
@DatumType.setter
def DatumType(self,DataType):
self._DatumType = DataType
self._Capacity = None
@property
def BaseDatumType(self):
if self.IsArray():
return self._DatumType[:self._DatumType.index("[")]
else:
return self._DatumType
def IsArray(self):
return True if len(self.Capacity) else False
def IsAggregateDatumType(self):
if self.DatumType in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
return False
if self.IsArray() or StructPattern.match(self.DatumType):
return True
return False
def IsSimpleTypeArray(self):
if self.IsArray() and self.BaseDatumType in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, "BOOLEAN"]:
return True
return False
@staticmethod
def GetPcdMaxSizeWorker(PcdString, MaxSize):
if PcdString.startswith("{") and PcdString.endswith("}"):
return max([len(PcdString.split(",")),MaxSize])
if PcdString.startswith("\"") or PcdString.startswith("\'"):
return max([len(PcdString)-2+1,MaxSize])
if PcdString.startswith("L\""):
return max([2*(len(PcdString)-3+1),MaxSize])
return max([len(PcdString),MaxSize])
## Get the maximum number of bytes
def GetPcdMaxSize(self):
if self.DatumType in TAB_PCD_NUMERIC_TYPES:
return MAX_SIZE_TYPE[self.DatumType]
MaxSize = int(self.MaxDatumSize, 10) if self.MaxDatumSize else 0
if self.PcdValueFromFdf:
MaxSize = self.GetPcdMaxSizeWorker(self.PcdValueFromFdf,MaxSize)
if self.PcdValueFromComm:
MaxSize = self.GetPcdMaxSizeWorker(self.PcdValueFromComm,MaxSize)
if hasattr(self, "DefaultValueFromDec"):
MaxSize = self.GetPcdMaxSizeWorker(self.DefaultValueFromDec,MaxSize)
return MaxSize
## Get the number of bytes
def GetPcdSize(self):
if self.DatumType in TAB_PCD_NUMERIC_TYPES:
return MAX_SIZE_TYPE[self.DatumType]
if not self.DefaultValue:
return 1
elif self.DefaultValue[0] == 'L':
return (len(self.DefaultValue) - 2) * 2
elif self.DefaultValue[0] == '{':
return len(self.DefaultValue.split(','))
else:
return len(self.DefaultValue) - 1
## Convert the class to a string
#
# Convert each member of the class to string
# Organize to a single line format string
#
# @retval Rtn Formatted String
#
def __str__(self):
Rtn = '\tTokenCName=' + str(self.TokenCName) + ', ' + \
'TokenSpaceGuidCName=' + str(self.TokenSpaceGuidCName) + ', ' + \
'Type=' + str(self.Type) + ', ' + \
'DatumType=' + str(self.DatumType) + ', ' + \
'DefaultValue=' + str(self.DefaultValue) + ', ' + \
'TokenValue=' + str(self.TokenValue) + ', ' + \
'MaxDatumSize=' + str(self.MaxDatumSize) + ', '
for Item in self.SkuInfoList.values():
Rtn = Rtn + 'SkuId=' + Item.SkuId + ', ' + 'SkuIdName=' + Item.SkuIdName
Rtn = Rtn + ', IsOverrided=' + str(self.IsOverrided)
return Rtn
## Override __eq__ function
#
# Check whether pcds are the same
#
# @retval False The two pcds are different
# @retval True The two pcds are the same
#
def __eq__(self, Other):
return Other and self.TokenCName == Other.TokenCName and self.TokenSpaceGuidCName == Other.TokenSpaceGuidCName
## Override __hash__ function
#
# Use (TokenCName, TokenSpaceGuidCName) as key in hash table
#
# @retval truple() Key for hash table
#
def __hash__(self):
return hash((self.TokenCName, self.TokenSpaceGuidCName))
@cached_property
def _fullname(self):
return ".".join((self.TokenSpaceGuidCName,self.TokenCName))
def __lt__(self,pcd):
return self._fullname < pcd._fullname
def __gt__(self,pcd):
return self._fullname > pcd._fullname
def sharedcopy(self,new_pcd):
new_pcd.TokenCName = self.TokenCName
new_pcd.TokenSpaceGuidCName = self.TokenSpaceGuidCName
new_pcd.TokenSpaceGuidValue = self.TokenSpaceGuidValue
new_pcd.Type = self.Type
new_pcd.DatumType = self.DatumType
new_pcd.DefaultValue = self.DefaultValue
new_pcd.TokenValue = self.TokenValue
new_pcd.MaxDatumSize = self.MaxDatumSize
new_pcd.MaxSizeUserSet = self.MaxSizeUserSet
new_pcd.Phase = self.Phase
new_pcd.Pending = self.Pending
new_pcd.IsOverrided = self.IsOverrided
new_pcd.IsFromBinaryInf = self.IsFromBinaryInf
new_pcd.IsFromDsc = self.IsFromDsc
new_pcd.PcdValueFromComm = self.PcdValueFromComm
new_pcd.PcdValueFromFdf = self.PcdValueFromFdf
new_pcd.UserDefinedDefaultStoresFlag = self.UserDefinedDefaultStoresFlag
new_pcd.DscRawValue = self.DscRawValue
new_pcd.DscRawValueInfo = self.DscRawValueInfo
new_pcd.CustomAttribute = self.CustomAttribute
new_pcd.validateranges = [item for item in self.validateranges]
new_pcd.validlists = [item for item in self.validlists]
new_pcd.expressions = [item for item in self.expressions]
new_pcd.SkuInfoList = {key: copy.deepcopy(skuobj) for key,skuobj in self.SkuInfoList.items()}
return new_pcd
def __deepcopy__(self,memo):
new_pcd = PcdClassObject()
self.sharedcopy(new_pcd)
return new_pcd
class StructurePcd(PcdClassObject):
def __init__(self, StructuredPcdIncludeFile=None, Packages=None, Name=None, Guid=None, Type=None, DatumType=None, Value=None, Token=None, MaxDatumSize=None, SkuInfoList=None, IsOverrided=False, GuidValue=None, validateranges=None, validlists=None, expressions=None,default_store = TAB_DEFAULT_STORES_DEFAULT):
if SkuInfoList is None:
SkuInfoList = {}
if validateranges is None:
validateranges = []
if validlists is None:
validlists = []
if expressions is None:
expressions = []
if Packages is None:
Packages = []
super(StructurePcd, self).__init__(Name, Guid, Type, DatumType, Value, Token, MaxDatumSize, SkuInfoList, IsOverrided, GuidValue, validateranges, validlists, expressions)
self.StructuredPcdIncludeFile = [] if StructuredPcdIncludeFile is None else StructuredPcdIncludeFile
self.PackageDecs = Packages
self.DefaultStoreName = [default_store]
self.DefaultValues = OrderedDict()
self.PcdMode = None
self.SkuOverrideValues = OrderedDict()
self.StructName = None
self.PcdDefineLineNo = 0
self.PkgPath = ""
self.DefaultValueFromDec = ""
self.DefaultValueFromDecInfo = None
self.ValueChain = set()
self.PcdFieldValueFromComm = OrderedDict()
self.PcdFieldValueFromFdf = OrderedDict()
self.DefaultFromDSC=None
def __repr__(self):
return self.TypeName
def AddDefaultValue (self, FieldName, Value, FileName="", LineNo=0,DimensionAttr ="-1"):
if DimensionAttr not in self.DefaultValues:
self.DefaultValues[DimensionAttr] = collections.OrderedDict()
if FieldName in self.DefaultValues[DimensionAttr]:
del self.DefaultValues[DimensionAttr][FieldName]
self.DefaultValues[DimensionAttr][FieldName] = [Value.strip(), FileName, LineNo]
return self.DefaultValues[DimensionAttr][FieldName]
def SetDecDefaultValue(self, DefaultValue,decpath=None,lineno=None):
self.DefaultValueFromDec = DefaultValue
self.DefaultValueFromDecInfo = (decpath,lineno)
def AddOverrideValue (self, FieldName, Value, SkuName, DefaultStoreName, FileName="", LineNo=0, DimensionAttr = '-1'):
if SkuName not in self.SkuOverrideValues:
self.SkuOverrideValues[SkuName] = OrderedDict()
if DefaultStoreName not in self.SkuOverrideValues[SkuName]:
self.SkuOverrideValues[SkuName][DefaultStoreName] = OrderedDict()
if DimensionAttr not in self.SkuOverrideValues[SkuName][DefaultStoreName]:
self.SkuOverrideValues[SkuName][DefaultStoreName][DimensionAttr] = collections.OrderedDict()
if FieldName in self.SkuOverrideValues[SkuName][DefaultStoreName][DimensionAttr]:
del self.SkuOverrideValues[SkuName][DefaultStoreName][DimensionAttr][FieldName]
self.SkuOverrideValues[SkuName][DefaultStoreName][DimensionAttr][FieldName] = [Value.strip(), FileName, LineNo]
return self.SkuOverrideValues[SkuName][DefaultStoreName][DimensionAttr][FieldName]
def SetPcdMode (self, PcdMode):
self.PcdMode = PcdMode
def copy(self, PcdObject):
self.TokenCName = PcdObject.TokenCName if PcdObject.TokenCName else self.TokenCName
self.TokenSpaceGuidCName = PcdObject.TokenSpaceGuidCName if PcdObject.TokenSpaceGuidCName else PcdObject.TokenSpaceGuidCName
self.TokenSpaceGuidValue = PcdObject.TokenSpaceGuidValue if PcdObject.TokenSpaceGuidValue else self.TokenSpaceGuidValue
self.Type = PcdObject.Type if PcdObject.Type else self.Type
self._DatumType = PcdObject.DatumType if PcdObject.DatumType else self.DatumType
self.DefaultValue = PcdObject.DefaultValue if PcdObject.DefaultValue else self.DefaultValue
self.TokenValue = PcdObject.TokenValue if PcdObject.TokenValue else self.TokenValue
self.MaxDatumSize = PcdObject.MaxDatumSize if PcdObject.MaxDatumSize else self.MaxDatumSize
self.SkuInfoList = PcdObject.SkuInfoList if PcdObject.SkuInfoList else self.SkuInfoList
self.Phase = PcdObject.Phase if PcdObject.Phase else self.Phase
self.Pending = PcdObject.Pending if PcdObject.Pending else self.Pending
self.IsOverrided = PcdObject.IsOverrided if PcdObject.IsOverrided else self.IsOverrided
self.IsFromBinaryInf = PcdObject.IsFromBinaryInf if PcdObject.IsFromBinaryInf else self.IsFromBinaryInf
self.IsFromDsc = PcdObject.IsFromDsc if PcdObject.IsFromDsc else self.IsFromDsc
self.validateranges = PcdObject.validateranges if PcdObject.validateranges else self.validateranges
self.validlists = PcdObject.validlists if PcdObject.validlists else self.validlists
self.expressions = PcdObject.expressions if PcdObject.expressions else self.expressions
self.DscRawValue = PcdObject.DscRawValue if PcdObject.DscRawValue else self.DscRawValue
self.DscRawValueInfo = PcdObject.DscRawValueInfo if PcdObject.DscRawValueInfo else self.DscRawValueInfo
self.PcdValueFromComm = PcdObject.PcdValueFromComm if PcdObject.PcdValueFromComm else self.PcdValueFromComm
self.PcdValueFromFdf = PcdObject.PcdValueFromFdf if PcdObject.PcdValueFromFdf else self.PcdValueFromFdf
self.CustomAttribute = PcdObject.CustomAttribute if PcdObject.CustomAttribute else self.CustomAttribute
self.UserDefinedDefaultStoresFlag = PcdObject.UserDefinedDefaultStoresFlag if PcdObject.UserDefinedDefaultStoresFlag else self.UserDefinedDefaultStoresFlag
if isinstance(PcdObject, StructurePcd):
self.StructuredPcdIncludeFile = PcdObject.StructuredPcdIncludeFile if PcdObject.StructuredPcdIncludeFile else self.StructuredPcdIncludeFile
self.PackageDecs = PcdObject.PackageDecs if PcdObject.PackageDecs else self.PackageDecs
self.DefaultValues = PcdObject.DefaultValues if PcdObject.DefaultValues else self.DefaultValues
self.PcdMode = PcdObject.PcdMode if PcdObject.PcdMode else self.PcdMode
self.DefaultValueFromDec = PcdObject.DefaultValueFromDec if PcdObject.DefaultValueFromDec else self.DefaultValueFromDec
self.DefaultValueFromDecInfo = PcdObject.DefaultValueFromDecInfo if PcdObject.DefaultValueFromDecInfo else self.DefaultValueFromDecInfo
self.SkuOverrideValues = PcdObject.SkuOverrideValues if PcdObject.SkuOverrideValues else self.SkuOverrideValues
self.StructName = PcdObject.DatumType if PcdObject.DatumType else self.StructName
self.PcdDefineLineNo = PcdObject.PcdDefineLineNo if PcdObject.PcdDefineLineNo else self.PcdDefineLineNo
self.PkgPath = PcdObject.PkgPath if PcdObject.PkgPath else self.PkgPath
self.ValueChain = PcdObject.ValueChain if PcdObject.ValueChain else self.ValueChain
self.PcdFieldValueFromComm = PcdObject.PcdFieldValueFromComm if PcdObject.PcdFieldValueFromComm else self.PcdFieldValueFromComm
self.PcdFieldValueFromFdf = PcdObject.PcdFieldValueFromFdf if PcdObject.PcdFieldValueFromFdf else self.PcdFieldValueFromFdf
def __deepcopy__(self,memo):
new_pcd = StructurePcd()
self.sharedcopy(new_pcd)
new_pcd.DefaultValueFromDec = self.DefaultValueFromDec
new_pcd.DefaultValueFromDecInfo = self.DefaultValueFromDecInfo
new_pcd.PcdMode = self.PcdMode
new_pcd.StructName = self.DatumType
new_pcd.PcdDefineLineNo = self.PcdDefineLineNo
new_pcd.PkgPath = self.PkgPath
new_pcd.StructuredPcdIncludeFile = [item for item in self.StructuredPcdIncludeFile]
new_pcd.PackageDecs = [item for item in self.PackageDecs]
new_pcd.DefaultValues = CopyDict(self.DefaultValues)
new_pcd.DefaultFromDSC=CopyDict(self.DefaultFromDSC)
new_pcd.SkuOverrideValues = CopyDict(self.SkuOverrideValues)
new_pcd.PcdFieldValueFromComm = CopyDict(self.PcdFieldValueFromComm)
new_pcd.PcdFieldValueFromFdf = CopyDict(self.PcdFieldValueFromFdf)
new_pcd.ValueChain = {item for item in self.ValueChain}
return new_pcd
LibraryClassObject = namedtuple('LibraryClassObject', ['LibraryClass','SupModList'])
## ModuleBuildClassObject
#
# This Class defines ModuleBuildClass
#
# @param object: Inherited from object class
#
# @var MetaFile: To store value for module meta file path
# @var BaseName: To store value for BaseName
# @var ModuleType: To store value for ModuleType
# @var Guid: To store value for Guid
# @var Version: To store value for Version
# @var PcdIsDriver: To store value for PcdIsDriver
# @var BinaryModule: To store value for BinaryModule
# @var CustomMakefile: To store value for CustomMakefile
# @var Specification: To store value for Specification
# @var Shadow To store value for Shadow
# @var LibraryClass: To store value for LibraryClass, it is a list structure as
# [ LibraryClassObject, ...]
# @var ModuleEntryPointList: To store value for ModuleEntryPointList
# @var ModuleUnloadImageList: To store value for ModuleUnloadImageList
# @var ConstructorList: To store value for ConstructorList
# @var DestructorList: To store value for DestructorList
# @var Binaries: To store value for Binaries, it is a list structure as
# [ ModuleBinaryClassObject, ...]
# @var Sources: To store value for Sources, it is a list structure as
# [ ModuleSourceFilesClassObject, ... ]
# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
# { [LibraryClassName, ModuleType] : LibraryClassInfFile }
# @var Protocols: To store value for Protocols, it is a list structure as
# [ ProtocolName, ... ]
# @var Ppis: To store value for Ppis, it is a list structure as
# [ PpiName, ... ]
# @var Guids: To store value for Guids, it is a list structure as
# [ GuidName, ... ]
# @var Includes: To store value for Includes, it is a list structure as
# [ IncludePath, ... ]
# @var Packages: To store value for Packages, it is a list structure as
# [ DecFileName, ... ]
# @var Pcds: To store value for Pcds, it is a set structure as
# { [(PcdCName, PcdGuidCName)] : PcdClassObject}
# @var BuildOptions: To store value for BuildOptions, it is a set structure as
# { [BuildOptionKey] : BuildOptionValue}
# @var Depex: To store value for Depex
#
class ModuleBuildClassObject(object):
def __init__(self):
self.AutoGenVersion = 0
self.MetaFile = ''
self.BaseName = ''
self.ModuleType = ''
self.Guid = ''
self.Version = ''
self.PcdIsDriver = ''
self.BinaryModule = ''
self.Shadow = ''
self.CustomMakefile = {}
self.Specification = {}
self.LibraryClass = []
self.ModuleEntryPointList = []
self.ModuleUnloadImageList = []
self.ConstructorList = []
self.DestructorList = []
self.Binaries = []
self.Sources = []
self.LibraryClasses = OrderedDict()
self.Libraries = []
self.Protocols = []
self.Ppis = []
self.Guids = []
self.Includes = []
self.Packages = []
self.Pcds = {}
self.BuildOptions = {}
self.Depex = {}
## Convert the class to a string
#
# Convert member MetaFile of the class to a string
#
# @retval string Formatted String
#
def __str__(self):
return str(self.MetaFile)
## Override __eq__ function
#
# Check whether ModuleBuildClassObjects are the same
#
# @retval False The two ModuleBuildClassObjects are different
# @retval True The two ModuleBuildClassObjects are the same
#
def __eq__(self, Other):
return self.MetaFile == Other
## Override __hash__ function
#
# Use MetaFile as key in hash table
#
# @retval string Key for hash table
#
def __hash__(self):
return hash(self.MetaFile)
## PackageBuildClassObject
#
# This Class defines PackageBuildClass
#
# @param object: Inherited from object class
#
# @var MetaFile: To store value for package meta file path
# @var PackageName: To store value for PackageName
# @var Guid: To store value for Guid
# @var Version: To store value for Version
# @var Protocols: To store value for Protocols, it is a set structure as
# { [ProtocolName] : Protocol Guid, ... }
# @var Ppis: To store value for Ppis, it is a set structure as
# { [PpiName] : Ppi Guid, ... }
# @var Guids: To store value for Guids, it is a set structure as
# { [GuidName] : Guid, ... }
# @var Includes: To store value for Includes, it is a list structure as
# [ IncludePath, ... ]
# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
# { [LibraryClassName] : LibraryClassInfFile }
# @var Pcds: To store value for Pcds, it is a set structure as
# { [(PcdCName, PcdGuidCName)] : PcdClassObject}
#
class PackageBuildClassObject(object):
def __init__(self):
self.MetaFile = ''
self.PackageName = ''
self.Guid = ''
self.Version = ''
self.Protocols = {}
self.Ppis = {}
self.Guids = {}
self.Includes = []
self.LibraryClasses = {}
self.Pcds = {}
## Convert the class to a string
#
# Convert member MetaFile of the class to a string
#
# @retval string Formatted String
#
def __str__(self):
return str(self.MetaFile)
## Override __eq__ function
#
# Check whether PackageBuildClassObjects are the same
#
# @retval False The two PackageBuildClassObjects are different
# @retval True The two PackageBuildClassObjects are the same
#
def __eq__(self, Other):
return self.MetaFile == Other
## Override __hash__ function
#
# Use MetaFile as key in hash table
#
# @retval string Key for hash table
#
def __hash__(self):
return hash(self.MetaFile)
## PlatformBuildClassObject
#
# This Class defines PlatformBuildClass
#
# @param object: Inherited from object class
#
# @var MetaFile: To store value for platform meta-file path
# @var PlatformName: To store value for PlatformName
# @var Guid: To store value for Guid
# @var Version: To store value for Version
# @var DscSpecification: To store value for DscSpecification
# @var OutputDirectory: To store value for OutputDirectory
# @var FlashDefinition: To store value for FlashDefinition
# @var BuildNumber: To store value for BuildNumber
# @var MakefileName: To store value for MakefileName
# @var SkuIds: To store value for SkuIds, it is a set structure as
# { 'SkuName' : SkuId, '!include' : includefilename, ...}
# @var Modules: To store value for Modules, it is a list structure as
# [ InfFileName, ... ]
# @var Libraries: To store value for Libraries, it is a list structure as
# [ InfFileName, ... ]
# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
# { (LibraryClassName, ModuleType) : LibraryClassInfFile }
# @var Pcds: To store value for Pcds, it is a set structure as
# { [(PcdCName, PcdGuidCName)] : PcdClassObject }
# @var BuildOptions: To store value for BuildOptions, it is a set structure as
# { [BuildOptionKey] : BuildOptionValue }
#
class PlatformBuildClassObject(object):
def __init__(self):
self.MetaFile = ''
self.PlatformName = ''
self.Guid = ''
self.Version = ''
self.DscSpecification = ''
self.OutputDirectory = ''
self.FlashDefinition = ''
self.BuildNumber = ''
self.MakefileName = ''
self.SkuIds = {}
self.Modules = []
self.LibraryInstances = []
self.LibraryClasses = {}
self.Libraries = {}
self.Pcds = {}
self.BuildOptions = {}
## Convert the class to a string
#
# Convert member MetaFile of the class to a string
#
# @retval string Formatted String
#
def __str__(self):
return str(self.MetaFile)
## Override __eq__ function
#
# Check whether PlatformBuildClassObjects are the same
#
# @retval False The two PlatformBuildClassObjects are different
# @retval True The two PlatformBuildClassObjects are the same
#
def __eq__(self, Other):
return self.MetaFile == Other
## Override __hash__ function
#
# Use MetaFile as key in hash table
#
# @retval string Key for hash table
#
def __hash__(self):
return hash(self.MetaFile)
| 48.237795
| 314
| 0.617512
|
e1aa462135298655b2a2d2afc1e759fd214cae73
| 5,907
|
py
|
Python
|
release/stubs.min/System/ComponentModel/__init___parts/CustomTypeDescriptor.py
|
tranconbv/ironpython-stubs
|
a601759e6c6819beff8e6b639d18a24b7e351851
|
[
"MIT"
] | null | null | null |
release/stubs.min/System/ComponentModel/__init___parts/CustomTypeDescriptor.py
|
tranconbv/ironpython-stubs
|
a601759e6c6819beff8e6b639d18a24b7e351851
|
[
"MIT"
] | null | null | null |
release/stubs.min/System/ComponentModel/__init___parts/CustomTypeDescriptor.py
|
tranconbv/ironpython-stubs
|
a601759e6c6819beff8e6b639d18a24b7e351851
|
[
"MIT"
] | null | null | null |
class CustomTypeDescriptor(object):
""" Provides a simple default implementation of the System.ComponentModel.ICustomTypeDescriptor interface. """
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return CustomTypeDescriptor()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def GetAttributes(self):
"""
GetAttributes(self: CustomTypeDescriptor) -> AttributeCollection
Returns a collection of custom attributes for the type represented by this type descriptor.
Returns: An System.ComponentModel.AttributeCollection containing the attributes for the type. The default is System.ComponentModel.AttributeCollection.Empty.
"""
pass
def GetClassName(self):
"""
GetClassName(self: CustomTypeDescriptor) -> str
Returns the fully qualified name of the class represented by this type descriptor.
Returns: A System.String containing the fully qualified class name of the type this type descriptor is describing. The default is null.
"""
pass
def GetComponentName(self):
"""
GetComponentName(self: CustomTypeDescriptor) -> str
Returns the name of the class represented by this type descriptor.
Returns: A System.String containing the name of the component instance this type descriptor is describing. The default is null.
"""
pass
def GetConverter(self):
"""
GetConverter(self: CustomTypeDescriptor) -> TypeConverter
Returns a type converter for the type represented by this type descriptor.
Returns: A System.ComponentModel.TypeConverter for the type represented by this type descriptor. The default is a newly created System.ComponentModel.TypeConverter.
"""
pass
def GetDefaultEvent(self):
"""
GetDefaultEvent(self: CustomTypeDescriptor) -> EventDescriptor
Returns the event descriptor for the default event of the object represented by this type descriptor.
Returns: The System.ComponentModel.EventDescriptor for the default event on the object represented by this type descriptor. The default is null.
"""
pass
def GetDefaultProperty(self):
"""
GetDefaultProperty(self: CustomTypeDescriptor) -> PropertyDescriptor
Returns the property descriptor for the default property of the object represented by this type descriptor.
Returns: A System.ComponentModel.PropertyDescriptor for the default property on the object represented by this type descriptor. The default is null.
"""
pass
def GetEditor(self,editorBaseType):
"""
GetEditor(self: CustomTypeDescriptor,editorBaseType: Type) -> object
Returns an editor of the specified type that is to be associated with the class represented by this type descriptor.
editorBaseType: The base type of the editor to retrieve.
Returns: An editor of the given type that is to be associated with the class represented by this type descriptor. The default is null.
"""
pass
def GetEvents(self,attributes=None):
"""
GetEvents(self: CustomTypeDescriptor) -> EventDescriptorCollection
Returns a collection of event descriptors for the object represented by this type descriptor.
Returns: An System.ComponentModel.EventDescriptorCollection containing the event descriptors for the object represented by this type descriptor. The default is
System.ComponentModel.EventDescriptorCollection.Empty.
GetEvents(self: CustomTypeDescriptor,attributes: Array[Attribute]) -> EventDescriptorCollection
Returns a filtered collection of event descriptors for the object represented by this type descriptor.
attributes: An array of attributes to use as a filter. This can be null.
Returns: An System.ComponentModel.EventDescriptorCollection containing the event descriptions for the object represented by this type descriptor. The default is
System.ComponentModel.EventDescriptorCollection.Empty.
"""
pass
def GetProperties(self,attributes=None):
"""
GetProperties(self: CustomTypeDescriptor) -> PropertyDescriptorCollection
Returns a collection of property descriptors for the object represented by this type descriptor.
Returns: A System.ComponentModel.PropertyDescriptorCollection containing the property descriptions for the object represented by this type descriptor. The default is
System.ComponentModel.PropertyDescriptorCollection.Empty.
GetProperties(self: CustomTypeDescriptor,attributes: Array[Attribute]) -> PropertyDescriptorCollection
Returns a filtered collection of property descriptors for the object represented by this type descriptor.
attributes: An array of attributes to use as a filter. This can be null.
Returns: A System.ComponentModel.PropertyDescriptorCollection containing the property descriptions for the object represented by this type descriptor. The default is
System.ComponentModel.PropertyDescriptorCollection.Empty.
"""
pass
def GetPropertyOwner(self,pd):
"""
GetPropertyOwner(self: CustomTypeDescriptor,pd: PropertyDescriptor) -> object
Returns an object that contains the property described by the specified property descriptor.
pd: The property descriptor for which to retrieve the owning object.
Returns: An System.Object that owns the given property specified by the type descriptor. The default is null.
"""
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,*args): #cannot find CLR constructor
"""
__new__(cls: type)
__new__(cls: type,parent: ICustomTypeDescriptor)
"""
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
| 48.02439
| 215
| 0.753851
|
4f454830864ff998ab86e6e4881796a310a53cba
| 2,859
|
py
|
Python
|
103_edge_filters.py
|
Data-Laboratory/WorkExamples
|
27e58207e664da7813673e6792c0c30c0a5bf74c
|
[
"MIT"
] | 1
|
2021-12-15T22:27:27.000Z
|
2021-12-15T22:27:27.000Z
|
103_edge_filters.py
|
Data-Laboratory/WorkExamples
|
27e58207e664da7813673e6792c0c30c0a5bf74c
|
[
"MIT"
] | null | null | null |
103_edge_filters.py
|
Data-Laboratory/WorkExamples
|
27e58207e664da7813673e6792c0c30c0a5bf74c
|
[
"MIT"
] | null | null | null |
#Ref: Sreenivas Sarwar Anik
"""
Roberts
The idea behind the Roberts cross operator is to approximate the gradient of an
image through discrete differentiation which is achieved by computing the sum of the squares of the
differences between diagonally adjacent pixels. It highlights regions of high spatial gradient which often
correspond to edges.
Sobel:
Similar to Roberts - calculates gradient of the image.
The operator uses two 3×3 kernels which are convolved with the original image to calculate
approximations of the derivatives – one for horizontal changes, and one for vertical.
Scharr:
Typically used to identify gradients along the x-axis (dx = 1, dy = 0) and y-axis (dx = 0,
dy = 1) independently. Performance is quite similar to Sobel filter.
Prewitt:
The Prewitt operator is based on convolving
the image with a small, separable, and integer valued filter in horizontal and vertical directions and is
therefore relatively inexpensive in terms of computations like Sobel operator.
Farid:
Farid and Simoncelli propose to use a pair of kernels, one for interpolation and another for
differentiation (csimilar to Sobel). These kernels, of fixed sizes 5 x 5 and 7 x 7, are optimized so
that the Fourier transform approximates their correct derivative relationship.
Canny:
The Process of Canny edge detection algorithm can be broken down to 5 different steps:
1. Apply Gaussian filter to smooth the image in order to remove the noise
2. Find the intensity gradients of the image
3. Apply non-maximum suppression to get rid of spurious response to edge detection
4. Apply double threshold to determine potential edges (supplied by the user)
5. Track edge by hysteresis: Finalize the detection of edges by suppressing all the other edges that
are weak and not connected to strong edges.
"""
from skimage import io, filters, feature
import matplotlib.pyplot as plt
from skimage.color import rgb2gray
import cv2
import numpy as np
img = cv2.imread('images/BSE.jpg', 0)
"""
#Edge detection
from skimage.filters import roberts, sobel, scharr, prewitt, farid
roberts_img = roberts(img)
sobel_img = sobel(img)
scharr_img = scharr(img)
prewitt_img = prewitt(img)
farid_img = farid(img)
cv2.imshow("Roberts", roberts_img)
cv2.imshow("Sobel", sobel_img)
cv2.imshow("Scharr", scharr_img)
cv2.imshow("Prewitt", prewitt_img)
cv2.imshow("Farid", farid_img)
cv2.waitKey(0)
cv2.destroyAllWindows()
"""
#Canny
canny_edge = cv2.Canny(img,50,80)
#Autocanny
sigma = 0.3
median = np.median(img)
# apply automatic Canny edge detection using the computed median
lower = int(max(0, (1.0 - sigma) * median))
upper = int(min(255, (1.0 + sigma) * median))
auto_canny = cv2.Canny(img, lower, upper)
cv2.imshow("Canny", canny_edge)
cv2.imshow("Auto Canny", auto_canny)
cv2.waitKey(0)
cv2.destroyAllWindows()
| 31.766667
| 106
| 0.765652
|
445d5729b04195dd7f9af832eda61dfc6ee3255d
| 458
|
py
|
Python
|
data/scripts/templates/object/tangible/dungeon/shared_terminal_free_s1.py
|
obi-two/GameServer
|
7d37024e2291a97d49522610cd8f1dbe5666afc2
|
[
"MIT"
] | 20
|
2015-02-23T15:11:56.000Z
|
2022-03-18T20:56:48.000Z
|
data/scripts/templates/object/tangible/dungeon/shared_terminal_free_s1.py
|
apathyboy/swganh
|
665128efe9154611dec4cb5efc61d246dd095984
|
[
"MIT"
] | null | null | null |
data/scripts/templates/object/tangible/dungeon/shared_terminal_free_s1.py
|
apathyboy/swganh
|
665128efe9154611dec4cb5efc61d246dd095984
|
[
"MIT"
] | 20
|
2015-04-04T16:35:59.000Z
|
2022-03-24T14:54:37.000Z
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/dungeon/shared_terminal_free_s1.iff"
result.attribute_template_id = -1
result.stfName("terminal_name","terminal_free_s1")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
| 26.941176
| 72
| 0.733624
|
3ad073c9ff62a482c353a6b06528f1efbe4aa173
| 18,119
|
py
|
Python
|
trt_pose/coco.py
|
tucachmo2202/trt_pose
|
b847fc197c32219dc2d719c2b42906603da0988a
|
[
"MIT"
] | null | null | null |
trt_pose/coco.py
|
tucachmo2202/trt_pose
|
b847fc197c32219dc2d719c2b42906603da0988a
|
[
"MIT"
] | null | null | null |
trt_pose/coco.py
|
tucachmo2202/trt_pose
|
b847fc197c32219dc2d719c2b42906603da0988a
|
[
"MIT"
] | null | null | null |
import torch
import torch.utils.data
import torch.nn
import os
import PIL.Image
import json
import tqdm
import trt_pose
import trt_pose.plugins
import glob
import torchvision.transforms.functional as FT
import numpy as np
from trt_pose.parse_objects import ParseObjects
import pycocotools
import pycocotools.coco
import pycocotools.cocoeval
import torchvision
''' skeleton: [[16, 14], [14, 12], [17, 15], [15, 13], [12, 13], [6, 8], [7, 9], [8, 10], [9, 11], [2, 3], [1, 2], [1, 3], [2, 4], [3, 5], [4, 6], [5, 7], [18, 1], [18, 6], [18, 7], [18, 12], [18, 13]]
topology: tensor([[ 0, 1, 15, 13],
[ 2, 3, 13, 11],
[ 4, 5, 16, 14],
[ 6, 7, 14, 12],
[ 8, 9, 11, 12],
[10, 11, 5, 7],
[12, 13, 6, 8],
[14, 15, 7, 9],
[16, 17, 8, 10],
[18, 19, 1, 2],
[20, 21, 0, 1],
[22, 23, 0, 2],
[24, 25, 1, 3],
[26, 27, 2, 4],
[28, 29, 3, 5],
[30, 31, 4, 6],
[32, 33, 17, 0],
[34, 35, 17, 5],
[36, 37, 17, 6],
[38, 39, 17, 11],
[40, 41, 17, 12]], dtype=torch.int32)
Trong topology, mỗi đầu vào là [k_i, k_j, c_a, c_b] là kênh của paf tương ứng với chiều i,j
và c_a là c_b là loại của chi
connections - int - NxKx2xM, đồ thị kết nối.
Kết nối laoij 1 và nguồn chi chỉ số 5, chỉ số chi đích là 3 = connections[0][1][0][5]
hoặc kết nối laoij 1 và chỉ số chi đích là 3, chỉ số chi nguồn là 5 là connections[0][1][1][3],
đồ thị đại diện là thừa, cả 2 hướng được thêm vào cho thời gian tra cứu k đổi. Nếu
k có kết nối tồn tại cho nguồn hoặc đích, nó trả về -1.
'''
def coco_category_to_topology(coco_category):
"""Gets topology tensor from a COCO category
"""
skeleton = coco_category['skeleton']
K = len(skeleton)
topology = torch.zeros((K, 4)).int()
for k in range(K):
topology[k][0] = 2 * k
topology[k][1] = 2 * k + 1
topology[k][2] = skeleton[k][0] - 1
topology[k][3] = skeleton[k][1] - 1
return topology
#Trả về tên của các điểm keypoints
def coco_category_to_parts(coco_category):
"""Gets list of parts name from a COCO category
"""
return coco_category['keypoints']
def coco_annotations_to_tensors(coco_annotations,
image_shape,
parts,
topology,
max_count=100):
"""Gets tensors corresponding to peak counts, peak coordinates, and peak to peak connections
"""
annotations = coco_annotations
C = len(parts) # C = 17
K = topology.shape[0]
M = max_count
IH = image_shape[0] #image_shape là shape của ảnh gốc
IW = image_shape[1]
counts = torch.zeros((C)).int()
peaks = torch.zeros((C, M, 2)).float()
visibles = torch.zeros((len(annotations), C)).int()
# connections = -torch.ones((K, 2, M)).int()
for ann_idx, ann in enumerate(annotations):
kps = ann['keypoints']
# add visible peaks
for c in range(C):
x = kps[c * 3]
y = kps[c * 3 + 1]
visible = kps[c * 3 + 2]
if visible:
peaks[c][counts[c]][0] = (float(y) + 0.5) / (IH + 1.0)
peaks[c][counts[c]][1] = (float(x) + 0.5) / (IW + 1.0)
counts[c] = counts[c] + 1
visibles[ann_idx][c] = 1
# for k in range(K):
# c_a = topology[k][2]
# c_b = topology[k][3]
# if visibles[ann_idx][c_a] and visibles[ann_idx][c_b]:
# connections[k][0][counts[c_a] - 1] = counts[c_b] - 1
# connections[k][1][counts[c_b] - 1] = counts[c_a] - 1
return counts, peaks
def coco_annotations_to_mask_bbox(coco_annotations, image_shape):
mask = np.ones(image_shape, dtype=np.uint8) # mask khởi tạo bằng mảng các giá trị 1
#bbox của coco có định dạng [top left x position, top left y position, width, height]
for ann in coco_annotations:
if 'num_keypoints' not in ann or ann['num_keypoints'] == 0: #nếu không có keypoints nào thì mask của bbox đó thay bằng giá trị 0
bbox = ann['bbox']
x0 = round(bbox[0])
y0 = round(bbox[1])
x1 = round(x0 + bbox[2])
y1 = round(y0 + bbox[3])
mask[y0:y1, x0:x1] = 0
return mask
def convert_dir_to_bmp(output_dir, input_dir):
files = glob.glob(os.path.join(input_dir, '*.jpg'))
for f in files:
new_path = os.path.join(
output_dir,
os.path.splitext(os.path.basename(f))[0] + '.bmp')
img = PIL.Image.open(f)
img.save(new_path)
def get_quad(angle, translation, scale, aspect_ratio=1.0):
if aspect_ratio > 1.0:
# width > height =>
# increase height region
quad = np.array([
[0.0, 0.5 - 0.5 * aspect_ratio],
[0.0, 0.5 + 0.5 * aspect_ratio],
[1.0, 0.5 + 0.5 * aspect_ratio],
[1.0, 0.5 - 0.5 * aspect_ratio],
])
elif aspect_ratio < 1.0:
# width < height
quad = np.array([
[0.5 - 0.5 / aspect_ratio, 0.0],
[0.5 - 0.5 / aspect_ratio, 1.0],
[0.5 + 0.5 / aspect_ratio, 1.0],
[0.5 + 0.5 / aspect_ratio, 0.0],
])
else:
quad = np.array([
[0.0, 0.0],
[0.0, 1.0],
[1.0, 1.0],
[1.0, 0.0],
])
quad -= 0.5
R = np.array([
[np.cos(angle), -np.sin(angle)],
[np.sin(angle), np.cos(angle)]
])
quad = np.dot(quad, R)
quad -= np.array(translation)
quad /= scale
quad += 0.5
return quad
def transform_image(image, size, quad):
new_quad = np.zeros_like(quad)
new_quad[:, 0] = quad[:, 0] * image.size[0]
new_quad[:, 1] = quad[:, 1] * image.size[1]
new_quad = (new_quad[0][0], new_quad[0][1],
new_quad[1][0], new_quad[1][1],
new_quad[2][0], new_quad[2][1],
new_quad[3][0], new_quad[3][1])
return image.transform(size, PIL.Image.QUAD, new_quad)
def transform_points_xy(points, quad):
p00 = quad[0]
p01 = quad[1] - p00
p10 = quad[3] - p00
p01 /= np.sum(p01**2)
p10 /= np.sum(p10**2)
A = np.array([
p10,
p01,
]).transpose()
return np.dot(points - p00, A)
def transform_peaks(counts, peaks, quad):
newpeaks = peaks.clone().numpy()
C = counts.shape[0]
for c in range(C):
count = int(counts[c])
newpeaks[c][0:count] = transform_points_xy(newpeaks[c][0:count][:, ::-1], quad)[:, ::-1]
return torch.from_numpy(newpeaks)
class CocoDataset(torch.utils.data.Dataset):
def __init__(self,
images_dir,
annotations_file,
category_name,
image_shape,
target_shape,
is_bmp=False,
stdev=0.02,
use_crowd=True,
min_area=0.0,
max_area=1.0,
max_part_count=100,
random_angle=(0.0, 0.0),
random_scale=(1.0, 1.0),
random_translate=(0.0, 0.0),
transforms=None,
keep_aspect_ratio=False): #tham số keep_aspect_ratio để chỉ rằng ảnh sẽ resize padding??
self.keep_aspect_ratio = keep_aspect_ratio
self.transforms=transforms
self.is_bmp = is_bmp
self.images_dir = images_dir
self.image_shape = image_shape
self.target_shape = target_shape
self.stdev = stdev
self.random_angle = random_angle
self.random_scale = random_scale
self.random_translate = random_translate
tensor_cache_file = annotations_file + '.cache'
if tensor_cache_file is not None and os.path.exists(tensor_cache_file):
print('Cachefile found. Loading from cache file...')
cache = torch.load(tensor_cache_file)
self.counts = cache['counts']
self.peaks = cache['peaks']
# self.connections = cache['connections']
self.topology = cache['topology']
self.parts = cache['parts']
self.filenames = cache['filenames']
self.samples = cache['samples']
return
with open(annotations_file, 'r') as f:
data = json.load(f)
cat = [c for c in data['categories'] if c['name'] == category_name][0]
cat_id = cat['id']
img_map = {}
for img in data['images']:
img_map[img['id']] = img
'''
Thẻ images chứa các thông tin như trong ví dụ dưới đây:
"images": [
{
"id": 0,
"license": 1,
"file_name": "0001.jpg",
"height": 275,
"width": 490,
"date_captured": "2020-07-20T19:39:26+00:00"
}
],
'''
samples = {}
for ann in data['annotations']:
# filter by category
new_ann = ann.deepcopy()
new_ann['image']
if ann['category_id'] != cat_id:
continue
# filter by crowd
if not use_crowd and ann['iscrowd']:
continue
img_id = ann['image_id']
height = img['height']
width = img['width']
area = ann['area']
# filter by object area
normalized_area = float(area) / float(height * width)
if normalized_area < min_area or normalized_area > max_area:
continue
# add metadata
if img_id not in samples:
sample = {}
sample['img'] = img
sample['anns'] = [ann]
samples[img_id] = sample
else:
samples[img_id]['anns'] += [ann]
# generate tensors
self.topology = coco_category_to_topology(cat)
self.parts = coco_category_to_parts(cat)
N = len(samples)
C = len(self.parts)
K = self.topology.shape[0]
M = max_part_count
print('Generating intermediate tensors...')
self.counts = torch.zeros((N, C), dtype=torch.int32)
self.peaks = torch.zeros((N, C, M, 2), dtype=torch.float32)
# self.connections = torch.zeros((N, K, 2, M), dtype=torch.int32)
self.filenames = []
self.samples = []
for i, sample in tqdm.tqdm(enumerate(samples.values())):
filename = sample['img']['file_name']
self.filenames.append(filename)
image_shape = (sample['img']['height'], sample['img']['width'])
counts_i, peaks_i= coco_annotations_to_tensors(
sample['anns'], image_shape, self.parts, self.topology)
self.counts[i] = counts_i
self.peaks[i] = peaks_i
# self.connections[i] = connections_i
self.samples += [sample]
if tensor_cache_file is not None:
print('Saving to intermediate tensors to cache file...')
torch.save({
'counts': self.counts,
'peaks': self.peaks,
# 'connections': self.connections,
'topology': self.topology,
'parts': self.parts,
'filenames': self.filenames,
'samples': self.samples
}, tensor_cache_file)
def __len__(self):
return len(self.filenames)
def __getitem__(self, idx):
if self.is_bmp:
filename = os.path.splitext(self.filenames[idx])[0] + '.bmp'
else:
filename = os.path.splitext(self.filenames[idx])[0] + '.jpg'
image = PIL.Image.open(os.path.join(self.images_dir, filename))
im = self.samples[idx]['img']
mask = coco_annotations_to_mask_bbox(self.samples[idx]['anns'], (im['height'], im['width']))
mask = PIL.Image.fromarray(mask)
counts = self.counts[idx]
peaks = self.peaks[idx]
# affine transformation
shiftx = float(torch.rand(1)) * (self.random_translate[1] - self.random_translate[0]) + self.random_translate[0]
shifty = float(torch.rand(1)) * (self.random_translate[1] - self.random_translate[0]) + self.random_translate[0]
scale = float(torch.rand(1)) * (self.random_scale[1] - self.random_scale[0]) + self.random_scale[0]
angle = float(torch.rand(1)) * (self.random_angle[1] - self.random_angle[0]) + self.random_angle[0]
#
if self.keep_aspect_ratio:
ar = float(image.width) / float(image.height)
quad = get_quad(angle, (shiftx, shifty), scale, aspect_ratio=ar)
else:
quad = get_quad(angle, (shiftx, shifty), scale, aspect_ratio=1.0)
image = transform_image(image, (self.image_shape[1], self.image_shape[0]), quad)
mask = transform_image(mask, (self.target_shape[1], self.target_shape[0]), quad)
peaks = transform_peaks(counts, peaks, quad)
counts = counts[None, ...]
peaks = peaks[None, ...]
stdev = float(self.stdev * self.target_shape[0])
cmap = trt_pose.plugins.generate_cmap(counts, peaks,
self.target_shape[0], self.target_shape[1], stdev, int(stdev * 5))
# paf = trt_pose.plugins.generate_paf(
# self.connections[idx][None, ...], self.topology,
# counts, peaks,
# self.target_shape[0], self.target_shape[1], stdev)
image = image.convert('RGB')
if self.transforms is not None:
image = self.transforms(image)
return image, cmap[0], torch.from_numpy(np.array(mask))[None, ...]
def get_part_type_counts(self):
return torch.sum(self.counts, dim=0)
# def get_paf_type_counts(self):
# c = torch.sum(self.connections[:, :, 0, :] >= 0, dim=-1) # sum over parts
# c = torch.sum(c, dim=0) # sum over batch
# return c
class CocoHumanPoseEval(object):
def __init__(self, images_dir, annotation_file, image_shape, keep_aspect_ratio=False):
self.images_dir = images_dir
self.annotation_file = annotation_file
self.image_shape = tuple(image_shape)
self.keep_aspect_ratio = keep_aspect_ratio
self.cocoGt = pycocotools.coco.COCO('annotations/person_keypoints_val2017.json')
self.catIds = self.cocoGt.getCatIds('person')
self.imgIds = self.cocoGt.getImgIds(catIds=self.catIds)
self.transform = torchvision.transforms.Compose([
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
def evaluate(self, model, topology):
self.parse_objects = ParseObjects(topology, cmap_threshold=0.1, link_threshold=0.1, cmap_window=5, line_integral_samples=7, max_num_parts=100, max_num_objects=100)
results = []
for n, imgId in enumerate(self.imgIds[1:]):
# read image
img = self.cocoGt.imgs[imgId]
img_path = os.path.join(self.images_dir, img['file_name'])
image = PIL.Image.open(img_path).convert('RGB')#.resize(IMAGE_SHAPE)
if self.keep_aspect_ratio:
ar = float(image.width) / float(image.height)
else:
ar = 1.0
quad = get_quad(0.0, (0, 0), 1.0, aspect_ratio=ar)
image = transform_image(image, self.image_shape, quad)
data = self.transform(image).cuda()[None, ...]
cmap, paf = model(data)
cmap, paf = cmap.cpu(), paf.cpu()
# object_counts, objects, peaks, int_peaks = postprocess(cmap, paf, cmap_threshold=0.05, link_threshold=0.01, window=5)
# object_counts, objects, peaks = int(object_counts[0]), objects[0], peaks[0]
object_counts, objects, peaks = self.parse_objects(cmap, paf)
object_counts, objects, peaks = int(object_counts[0]), objects[0], peaks[0]
for i in range(object_counts):
object = objects[i]
score = 0.0
kps = [0]*(17*3)
x_mean = 0
y_mean = 0
cnt = 0
for j in range(17):
k = object[j]
if k >= 0:
peak = peaks[j][k]
if ar > 1.0: # w > h w/h
x = peak[1]
y = (peak[0] - 0.5) * ar + 0.5
else:
x = (peak[1] - 0.5) / ar + 0.5
y = peak[0]
x = round(float(img['width'] * x))
y = round(float(img['height'] * y))
score += 1.0
kps[j * 3 + 0] = x
kps[j * 3 + 1] = y
kps[j * 3 + 2] = 2
x_mean += x
y_mean += y
cnt += 1
ann = {
'image_id': imgId,
'category_id': 1,
'keypoints': kps,
'score': score / 17.0
}
results.append(ann)
if n % 100 == 0:
print('%d / %d' % (n, len(self.imgIds)))
if len(results) == 0:
return
with open('trt_pose_results.json', 'w') as f:
json.dump(results, f)
cocoDt = self.cocoGt.loadRes('trt_pose_results.json')
cocoEval = pycocotools.cocoeval.COCOeval(self.cocoGt, cocoDt, 'keypoints')
cocoEval.params.imgIds = self.imgIds
cocoEval.params.catIds = [1]
cocoEval.evaluate()
cocoEval.accumulate()
cocoEval.summarize()
| 34.381404
| 201
| 0.52111
|
e5ec58cc659539fb11eed47288489b71f1e0b8c2
| 553
|
py
|
Python
|
modelbased-rl/BMPO/static/hoppernt.py
|
TJU-DRL-LAB/ai-optimizer
|
f558cc524c66460913989519779873b371bf78bc
|
[
"MIT"
] | 19
|
2020-09-02T05:58:09.000Z
|
2021-08-23T11:03:00.000Z
|
modelbased-rl/BMPO/static/hoppernt.py
|
TJU-DRL-LAB/ai-optimizer
|
f558cc524c66460913989519779873b371bf78bc
|
[
"MIT"
] | 4
|
2020-07-19T13:57:56.000Z
|
2021-11-10T19:42:58.000Z
|
static/hoppernt.py
|
apexrl/bmpo
|
d065cf195b942c3e534d4c789a0982e0ec09957c
|
[
"MIT"
] | 3
|
2020-09-28T11:21:24.000Z
|
2020-12-28T23:27:25.000Z
|
import numpy as np
class StaticFns:
@staticmethod
def termination_fn(obs, act, next_obs):
assert len(obs.shape) == len(next_obs.shape) == len(act.shape) == 2
height = next_obs[:, 0]
angle = next_obs[:, 1]
not_done = np.isfinite(next_obs).all(axis=-1) \
* np.abs(next_obs[:,1:] < 100).all(axis=-1) \
* (height > .7) \
* (np.abs(angle) < .2)
done = ~not_done
done = np.zeros_like(done)
done = done[:,None]
return done
| 27.65
| 75
| 0.500904
|
fca9df063f53254a7f545528459b38ec718cd45d
| 1,734
|
py
|
Python
|
Learning PyTorch/WHAT IS TORCH.NN REALLY/Using torch.nn.functional.py
|
LbsIrving/PyTorch
|
314dbe9efc9e0116a7342d4ae3ab168c1c3afa32
|
[
"MIT"
] | null | null | null |
Learning PyTorch/WHAT IS TORCH.NN REALLY/Using torch.nn.functional.py
|
LbsIrving/PyTorch
|
314dbe9efc9e0116a7342d4ae3ab168c1c3afa32
|
[
"MIT"
] | null | null | null |
Learning PyTorch/WHAT IS TORCH.NN REALLY/Using torch.nn.functional.py
|
LbsIrving/PyTorch
|
314dbe9efc9e0116a7342d4ae3ab168c1c3afa32
|
[
"MIT"
] | null | null | null |
from pathlib import Path
import requests
DATA_PATH = Path("data")
PATH = DATA_PATH / "mnist"
PATH.mkdir(parents=True, exist_ok=True)
URL = "https://github.com/pytorch/tutorials/raw/master/_static/"
FILENAME = "mnist.pkl.gz"
if not (PATH / FILENAME).exists():
content = requests.get(URL + FILENAME).content
(PATH / FILENAME).open("wb").write(content)
import pickle
import gzip
with gzip.open((PATH / FILENAME).as_posix(), "rb") as f:
((x_train, y_train), (x_valid, y_valid), _) = pickle.load(f, encoding="latin-1")
import torch
x_train, y_train, x_valid, y_valid = map(
torch.tensor, (x_train, y_train, x_valid, y_valid)
)
n, c = x_train.shape
import math
weights = torch.randn(784, 10) / math.sqrt(784)
weights.requires_grad_()
bias = torch.zeros(10, requires_grad=True)
bs = 64 # batch size
def accuracy(out, yb):
preds = torch.argmax(out, dim=1)
return (preds == yb).float().mean()
import torch.nn.functional as F
loss_func = F.cross_entropy
def model(xb):
return xb @ weights + bias
lr = 0.5 # learning rate
epochs = 2 # how many epochs to train for
for epoch in range(epochs):
for i in range((n - 1) // bs + 1):
# set_trace()
start_i = i * bs
end_i = start_i + bs
xb = x_train[start_i:end_i]
yb = y_train[start_i:end_i]
pred = model(xb)
loss = loss_func(pred, yb)
loss.backward()
with torch.no_grad():
weights -= weights.grad * lr
bias -= bias.grad * lr
weights.grad.zero_()
bias.grad.zero_()
print(loss_func(model(x_train), y_train), accuracy(model(x_train), y_train))
| 23.432432
| 85
| 0.607843
|
82f06048e7fe5e645c48b3b0842d4c4acc622068
| 4,604
|
py
|
Python
|
volatility/volatility/plugins/linux/process_hollow.py
|
williamclot/MemoryVisualizer
|
2ff9f30f07519d6578bc36c12f8d08acc9cb4383
|
[
"MIT"
] | 2
|
2018-07-16T13:30:40.000Z
|
2018-07-17T12:02:05.000Z
|
volatility/volatility/plugins/linux/process_hollow.py
|
williamclot/MemoryVisualizer
|
2ff9f30f07519d6578bc36c12f8d08acc9cb4383
|
[
"MIT"
] | null | null | null |
volatility/volatility/plugins/linux/process_hollow.py
|
williamclot/MemoryVisualizer
|
2ff9f30f07519d6578bc36c12f8d08acc9cb4383
|
[
"MIT"
] | null | null | null |
# Volatility
# Copyright (C) 2007-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License Version 2 as
# published by the Free Software Foundation. You may not use, modify or
# distribute this program under any other version of the GNU General
# Public License.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
"""
@author: Andrew Case
@license: GNU General Public License 2.0
@contact: atcuno@gmail.com
@organization:
"""
import volatility.obj as obj
import volatility.debug as debug
import volatility.addrspace as addrspace
import volatility.plugins.linux.common as linux_common
import volatility.plugins.linux.pslist as linux_pslist
class linux_process_hollow(linux_pslist.linux_pslist):
"""Checks for signs of process hollowing"""
def __init__(self, config, *args, **kwargs):
linux_pslist.linux_pslist.__init__(self, config, *args, **kwargs)
self._config.add_option('BASE', short_option = 'b', default = None, help = 'The address of the ELF file in memory', action = 'store', type='long' )
self._config.add_option('PATH', short_option = 'P', default = None, help = 'The path of the known good file', action = 'store', type='str')
# TODO:
# make aware of if application or library
# check the class, then do offset + base based on that
def calculate(self):
linux_common.set_plugin_members(self)
if not self._config.BASE:
debug.error("No base address specified.")
if not self._config.PATH:
debug.error("No known-good path specified.")
fd = open(self._config.PATH, "rb")
known_good = fd.read()
fd.close()
bufferas = addrspace.BufferAddressSpace(self._config, data = known_good)
elf_hdr = obj.Object("elf_hdr", offset = 0, vm = bufferas)
tasks = linux_pslist.linux_pslist.calculate(self)
for task in tasks:
proc_as = task.get_process_address_space()
for vma in task.get_proc_maps():
if self._config.BASE != vma.vm_start:
continue
for sym in elf_hdr.symbols():
if sym.st_value == 0 or (sym.st_info & 0xf) != 2:
continue
symname = elf_hdr.symbol_name(sym)
sym_offset = sym.st_value
# in the same vma
if vma.vm_start < sym.st_value < vma.vm_end:
vm_start = vma.vm_start
sym_offset = sym_offset - vm_start
full_address = sym.st_value
else:
next_vma = vma.vm_next
if next_vma.vm_start < sym.st_value < next_vma.vm_end:
vm_start = next_vma.vm_start
sym_offset = sym.st_value - vm_start
full_address = sym.st_value
else:
full_address = vma.vm_start + sym.st_value
mem_buffer = proc_as.read(vm_start + sym_offset, sym.st_size)
if sym.st_value > vma.vm_start:
disk_off = sym.st_value - vm_start
else:
disk_off = sym.st_value
disk_buffer = bufferas.read(disk_off, sym.st_size)
# bad
if mem_buffer != None and disk_buffer != mem_buffer:
yield task, symname, full_address
elif mem_buffer == None:
print "Function %s paged out in memory" % symname
def render_text(self, outfd, data):
self.table_header(outfd, [("Task", "16"),
("PID", "6"),
("Symbol Name", "32"),
("Symbol Address", "[addrpad]"),
])
for (task, symname, address) in data:
self.table_row(outfd, str(task.comm), task.pid, symname, address)
| 37.129032
| 155
| 0.568636
|
b434c5f64f1c3b41161fd432907d9d84d416e328
| 51,042
|
py
|
Python
|
frappe/__init__.py
|
DigiThinkIT/frappe
|
de30abb5e32051de8b87bcb390e18b778a2032ad
|
[
"MIT"
] | 3
|
2021-03-01T08:52:59.000Z
|
2021-12-15T10:17:59.000Z
|
frappe/__init__.py
|
DigiThinkIT/frappe
|
de30abb5e32051de8b87bcb390e18b778a2032ad
|
[
"MIT"
] | 212
|
2017-08-16T13:03:18.000Z
|
2020-10-06T12:26:21.000Z
|
frappe/__init__.py
|
Bloomstack/frappe
|
de30abb5e32051de8b87bcb390e18b778a2032ad
|
[
"MIT"
] | 14
|
2020-11-04T11:22:44.000Z
|
2022-02-01T20:59:37.000Z
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
"""
globals attached to frappe module
+ some utility functions that should probably be moved
"""
from __future__ import unicode_literals, print_function
from six import iteritems, binary_type, text_type, string_types
from werkzeug.local import Local, release_local
import os, sys, importlib, inspect, json
from past.builtins import cmp
from faker import Faker
# public
from .exceptions import *
from .utils.jinja import (get_jenv, get_template, render_template, get_email_from_template, get_jloader)
# Harmless for Python 3
# For Python 2 set default encoding to utf-8
if sys.version[0] == '2':
reload(sys)
sys.setdefaultencoding("utf-8")
__frappe_version__ = '12.8.1'
__version__ = '2.8.0'
__title__ = "Frappe Framework"
local = Local()
class _dict(dict):
"""dict like object that exposes keys as attributes"""
def __getattr__(self, key):
ret = self.get(key)
if not ret and key.startswith("__"):
raise AttributeError()
return ret
def __setattr__(self, key, value):
self[key] = value
def __getstate__(self):
return self
def __setstate__(self, d):
self.update(d)
def update(self, d):
"""update and return self -- the missing dict feature in python"""
super(_dict, self).update(d)
return self
def copy(self):
return _dict(dict(self).copy())
def _(msg, lang=None):
"""Returns translated string in current lang, if exists."""
from frappe.translate import get_full_dict
from frappe.utils import strip_html_tags, is_html
if not hasattr(local, 'lang'):
local.lang = lang or 'en'
if not lang:
lang = local.lang
non_translated_msg = msg
if is_html(msg):
msg = strip_html_tags(msg)
# msg should always be unicode
msg = as_unicode(msg).strip()
# return lang_full_dict according to lang passed parameter
return get_full_dict(lang).get(msg) or non_translated_msg
def as_unicode(text, encoding='utf-8'):
'''Convert to unicode if required'''
if isinstance(text, text_type):
return text
elif text==None:
return ''
elif isinstance(text, binary_type):
return text_type(text, encoding)
else:
return text_type(text)
def get_lang_dict(fortype, name=None):
"""Returns the translated language dict for the given type and name.
:param fortype: must be one of `doctype`, `page`, `report`, `include`, `jsfile`, `boot`
:param name: name of the document for which assets are to be returned."""
from frappe.translate import get_dict
return get_dict(fortype, name)
def set_user_lang(user, user_language=None):
"""Guess and set user language for the session. `frappe.local.lang`"""
from frappe.translate import get_user_lang
local.lang = get_user_lang(user)
# local-globals
db = local("db")
conf = local("conf")
form = form_dict = local("form_dict")
request = local("request")
response = local("response")
session = local("session")
user = local("user")
flags = local("flags")
error_log = local("error_log")
debug_log = local("debug_log")
message_log = local("message_log")
lang = local("lang")
def init(site, sites_path=None, new_site=False):
"""Initialize frappe for the current site. Reset thread locals `frappe.local`"""
if getattr(local, "initialised", None):
return
if not sites_path:
sites_path = '.'
local.error_log = []
local.message_log = []
local.debug_log = []
local.realtime_log = []
local.flags = _dict({
"ran_schedulers": [],
"currently_saving": [],
"redirect_location": "",
"in_install_db": False,
"in_install_app": False,
"in_import": False,
"in_test": False,
"mute_messages": False,
"ignore_links": False,
"mute_emails": False,
"has_dataurl": False,
"new_site": new_site
})
local.rollback_observers = []
local.test_objects = {}
local.site = site
local.sites_path = sites_path
local.site_path = os.path.join(sites_path, site)
local.request_ip = None
local.response = _dict({"docs":[]})
local.task_id = None
local.conf = _dict(get_site_config())
local.lang = local.conf.lang or "en"
local.lang_full_dict = None
local.module_app = None
local.app_modules = None
local.system_settings = _dict()
local.user = None
local.user_perms = None
local.session = None
local.role_permissions = {}
local.valid_columns = {}
local.new_doc_templates = {}
local.link_count = {}
local.jenv = None
local.jloader =None
local.cache = {}
local.document_cache = {}
local.meta_cache = {}
local.form_dict = _dict()
local.session = _dict()
setup_module_map()
local.initialised = True
def connect(site=None, db_name=None, set_admin_as_user=True):
"""Connect to site database instance.
:param site: If site is given, calls `frappe.init`.
:param db_name: Optional. Will use from `site_config.json`.
:param set_admin_as_user: Set Administrator as current user.
"""
from frappe.database import get_db
if site:
init(site)
local.db = get_db(user=db_name or local.conf.db_name)
if set_admin_as_user:
set_user("Administrator")
def connect_replica():
from frappe.database import get_db
user = local.conf.db_name
password = local.conf.db_password
port = local.conf.replica_db_port
if local.conf.different_credentials_for_replica:
user = local.conf.replica_db_name
password = local.conf.replica_db_password
local.replica_db = get_db(host=local.conf.replica_host, user=user, password=password, port=port)
# swap db connections
local.primary_db = local.db
local.db = local.replica_db
def get_site_config(sites_path=None, site_path=None):
"""Returns `site_config.json` combined with `sites/common_site_config.json`.
`site_config` is a set of site wide settings like database name, password, email etc."""
config = {}
sites_path = sites_path or getattr(local, "sites_path", None)
site_path = site_path or getattr(local, "site_path", None)
if sites_path:
common_site_config = os.path.join(sites_path, "common_site_config.json")
if os.path.exists(common_site_config):
config.update(get_file_json(common_site_config))
if site_path:
site_config = os.path.join(site_path, "site_config.json")
if os.path.exists(site_config):
config.update(get_file_json(site_config))
elif local.site and not local.flags.new_site:
print("Site {0} does not exist".format(local.site))
sys.exit(1)
return _dict(config)
def get_conf(site=None):
if hasattr(local, 'conf'):
return local.conf
else:
# if no site, get from common_site_config.json
with init_site(site):
return local.conf
class init_site:
def __init__(self, site=None):
'''If site==None, initialize it for empty site ('') to load common_site_config.json'''
self.site = site or ''
def __enter__(self):
init(self.site)
return local
def __exit__(self, type, value, traceback):
destroy()
def destroy():
"""Closes connection and releases werkzeug local."""
if db:
db.close()
release_local(local)
# memcache
redis_server = None
def cache():
"""Returns memcache connection."""
global redis_server
if not redis_server:
from frappe.utils.redis_wrapper import RedisWrapper
redis_server = RedisWrapper.from_url(conf.get('redis_cache')
or "redis://localhost:11311")
return redis_server
def get_traceback():
"""Returns error traceback."""
from frappe.utils import get_traceback
return get_traceback()
def errprint(msg):
"""Log error. This is sent back as `exc` in response.
:param msg: Message."""
msg = as_unicode(msg)
if not request or (not "cmd" in local.form_dict) or conf.developer_mode:
print(msg)
error_log.append({"exc": msg})
def log(msg):
"""Add to `debug_log`.
:param msg: Message."""
if not request:
if conf.get("logging") or False:
print(repr(msg))
debug_log.append(as_unicode(msg))
def msgprint(msg, title=None, raise_exception=0, as_table=False, indicator=None, alert=False, primary_action=None, is_minimizable=None):
"""Print a message to the user (via HTTP response).
Messages are sent in the `__server_messages` property in the
response JSON and shown in a pop-up / modal.
:param msg: Message.
:param title: [optional] Message title.
:param raise_exception: [optional] Raise given exception and show message.
:param as_table: [optional] If `msg` is a list of lists, render as HTML table.
:param primary_action: [optional] Bind a primary server/client side action.
"""
from frappe.utils import encode
msg = safe_decode(msg)
out = _dict(message=msg)
def _raise_exception():
if raise_exception:
if flags.rollback_on_exception:
db.rollback()
import inspect
if inspect.isclass(raise_exception) and issubclass(raise_exception, Exception):
raise raise_exception(msg)
else:
raise ValidationError(msg)
if flags.mute_messages:
_raise_exception()
return
if as_table and type(msg) in (list, tuple):
out.msg = '<table border="1px" style="border-collapse: collapse" cellpadding="2px">' + ''.join(['<tr>'+''.join(['<td>%s</td>' % c for c in r])+'</tr>' for r in msg]) + '</table>'
if flags.print_messages and out.msg:
print("Message: " + repr(out.msg).encode("utf-8"))
if title:
out.title = title
if not indicator and raise_exception:
indicator = 'red'
if indicator:
out.indicator = indicator
if is_minimizable:
out.is_minimizable = is_minimizable
if alert:
out.alert = 1
if raise_exception:
out.raise_exception = 1
if primary_action:
out.primary_action = primary_action
message_log.append(json.dumps(out))
if raise_exception and hasattr(raise_exception, '__name__'):
local.response['exc_type'] = raise_exception.__name__
_raise_exception()
def clear_messages():
local.message_log = []
def get_message_log():
log = []
for msg_out in local.message_log:
log.append(json.loads(msg_out))
return log
def clear_last_message():
if len(local.message_log) > 0:
local.message_log = local.message_log[:-1]
def throw(msg, exc=ValidationError, title=None, is_minimizable=None):
"""Throw execption and show message (`msgprint`).
:param msg: Message.
:param exc: Exception class. Default `frappe.ValidationError`"""
msgprint(msg, raise_exception=exc, title=title, indicator='red', is_minimizable=is_minimizable)
def emit_js(js, user=False, **kwargs):
from frappe.realtime import publish_realtime
if user == False:
user = session.user
publish_realtime('eval_js', js, user=user, **kwargs)
def create_folder(path, with_init=False):
"""Create a folder in the given path and add an `__init__.py` file (optional).
:param path: Folder path.
:param with_init: Create `__init__.py` in the new folder."""
from frappe.utils import touch_file
if not os.path.exists(path):
os.makedirs(path)
if with_init:
touch_file(os.path.join(path, "__init__.py"))
def set_user(username):
"""Set current user.
:param username: **User** name to set as current user."""
local.session.user = username
local.session.sid = username
local.cache = {}
local.form_dict = _dict()
local.jenv = None
local.session.data = _dict()
local.role_permissions = {}
local.new_doc_templates = {}
local.user_perms = None
def get_user():
from frappe.utils.user import UserPermissions
if not local.user_perms:
local.user_perms = UserPermissions(local.session.user)
return local.user_perms
def get_roles(username=None):
"""Returns roles of current user."""
if not local.session:
return ["Guest"]
if username:
import frappe.permissions
return frappe.permissions.get_roles(username)
else:
return get_user().get_roles()
def get_request_header(key, default=None):
"""Return HTTP request header.
:param key: HTTP header key.
:param default: Default value."""
return request.headers.get(key, default)
def sendmail(recipients=[], sender="", subject="No Subject", message="No Message",
as_markdown=False, delayed=True, reference_doctype=None, reference_name=None,
unsubscribe_method=None, unsubscribe_params=None, unsubscribe_message=None,
attachments=None, content=None, doctype=None, name=None, reply_to=None,
cc=[], bcc=[], message_id=None, in_reply_to=None, send_after=None, expose_recipients=None,
send_priority=1, communication=None, retry=1, now=None, read_receipt=None, is_notification=False,
inline_images=None, template=None, args=None, header=None, print_letterhead=False):
"""Send email using user's default **Email Account** or global default **Email Account**.
:param recipients: List of recipients.
:param sender: Email sender. Default is current user or default outgoing account.
:param subject: Email Subject.
:param message: (or `content`) Email Content.
:param as_markdown: Convert content markdown to HTML.
:param delayed: Send via scheduled email sender **Email Queue**. Don't send immediately. Default is true
:param send_priority: Priority for Email Queue, default 1.
:param reference_doctype: (or `doctype`) Append as communication to this DocType.
:param reference_name: (or `name`) Append as communication to this document name.
:param unsubscribe_method: Unsubscribe url with options email, doctype, name. e.g. `/api/method/unsubscribe`
:param unsubscribe_params: Unsubscribe paramaters to be loaded on the unsubscribe_method [optional] (dict).
:param attachments: List of attachments.
:param reply_to: Reply-To Email Address.
:param message_id: Used for threading. If a reply is received to this email, Message-Id is sent back as In-Reply-To in received email.
:param in_reply_to: Used to send the Message-Id of a received email back as In-Reply-To.
:param send_after: Send after the given datetime.
:param expose_recipients: Display all recipients in the footer message - "This email was sent to"
:param communication: Communication link to be set in Email Queue record
:param inline_images: List of inline images as {"filename", "filecontent"}. All src properties will be replaced with random Content-Id
:param template: Name of html template from templates/emails folder
:param args: Arguments for rendering the template
:param header: Append header in email
"""
text_content = None
if template:
message, text_content = get_email_from_template(template, args)
message = content or message
if as_markdown:
message = frappe.utils.md_to_html(message)
if not delayed:
now = True
from frappe.email import queue
queue.send(recipients=recipients, sender=sender,
subject=subject, message=message, text_content=text_content,
reference_doctype = doctype or reference_doctype, reference_name = name or reference_name,
unsubscribe_method=unsubscribe_method, unsubscribe_params=unsubscribe_params, unsubscribe_message=unsubscribe_message,
attachments=attachments, reply_to=reply_to, cc=cc, bcc=bcc, message_id=message_id, in_reply_to=in_reply_to,
send_after=send_after, expose_recipients=expose_recipients, send_priority=send_priority,
communication=communication, now=now, read_receipt=read_receipt, is_notification=is_notification,
inline_images=inline_images, header=header, print_letterhead=print_letterhead)
whitelisted = []
guest_methods = []
xss_safe_methods = []
def whitelist(allow_guest=False, xss_safe=False):
"""
Decorator for whitelisting a function and making it accessible via HTTP.
Standard request will be `/api/method/[path.to.method]`
:param allow_guest: Allow non logged-in user to access this method.
Use as:
@frappe.whitelist()
def myfunc(param1, param2):
pass
"""
def innerfn(fn):
global whitelisted, guest_methods, xss_safe_methods
whitelisted.append(fn)
if allow_guest:
guest_methods.append(fn)
if xss_safe:
xss_safe_methods.append(fn)
return fn
return innerfn
def read_only():
def innfn(fn):
def wrapper_fn(*args, **kwargs):
if conf.read_from_replica:
connect_replica()
try:
retval = fn(*args, **get_newargs(fn, kwargs))
except:
raise
finally:
if local and hasattr(local, 'primary_db'):
local.db.close()
local.db = local.primary_db
return retval
return wrapper_fn
return innfn
def only_for(roles, message=False):
"""Raise `frappe.PermissionError` if the user does not have any of the given **Roles**.
:param roles: List of roles to check."""
if local.flags.in_test:
return
if not isinstance(roles, (tuple, list)):
roles = (roles,)
roles = set(roles)
myroles = set(get_roles())
if not roles.intersection(myroles):
if message:
msgprint(_('Only for {}'.format(', '.join(roles))))
raise PermissionError
def get_domain_data(module):
try:
domain_data = get_hooks('domains')
if module in domain_data:
return _dict(get_attr(get_hooks('domains')[module][0] + '.data'))
else:
return _dict()
except ImportError:
if local.flags.in_test:
return _dict()
else:
raise
def clear_cache(user=None, doctype=None):
"""Clear **User**, **DocType** or global cache.
:param user: If user is given, only user cache is cleared.
:param doctype: If doctype is given, only DocType cache is cleared."""
import frappe.cache_manager
if doctype:
frappe.cache_manager.clear_doctype_cache(doctype)
reset_metadata_version()
elif user:
frappe.cache_manager.clear_user_cache(user)
else: # everything
from frappe import translate
frappe.cache_manager.clear_user_cache()
frappe.cache_manager.clear_domain_cache()
translate.clear_cache()
reset_metadata_version()
local.cache = {}
local.new_doc_templates = {}
for fn in get_hooks("clear_cache"):
get_attr(fn)()
local.role_permissions = {}
def has_permission(doctype=None, ptype="read", doc=None, user=None, verbose=False, throw=False):
"""Raises `frappe.PermissionError` if not permitted.
:param doctype: DocType for which permission is to be check.
:param ptype: Permission type (`read`, `write`, `create`, `submit`, `cancel`, `amend`). Default: `read`.
:param doc: [optional] Checks User permissions for given doc.
:param user: [optional] Check for given user. Default: current user."""
if not doctype and doc:
doctype = doc.doctype
import frappe.permissions
out = frappe.permissions.has_permission(doctype, ptype, doc=doc, verbose=verbose, user=user)
if throw and not out:
if doc:
frappe.throw(_("No permission for {0}").format(doc.doctype + " " + doc.name))
else:
frappe.throw(_("No permission for {0}").format(doctype))
return out
def has_website_permission(doc=None, ptype='read', user=None, verbose=False, doctype=None):
"""Raises `frappe.PermissionError` if not permitted.
:param doctype: DocType for which permission is to be check.
:param ptype: Permission type (`read`, `write`, `create`, `submit`, `cancel`, `amend`). Default: `read`.
:param doc: Checks User permissions for given doc.
:param user: [optional] Check for given user. Default: current user."""
if not user:
user = session.user
if doc:
if isinstance(doc, string_types):
doc = get_doc(doctype, doc)
doctype = doc.doctype
if doc.flags.ignore_permissions:
return True
# check permission in controller
if hasattr(doc, 'has_website_permission'):
return doc.has_website_permission(ptype, user, verbose=verbose)
hooks = (get_hooks("has_website_permission") or {}).get(doctype, [])
if hooks:
for method in hooks:
result = call(method, doc=doc, ptype=ptype, user=user, verbose=verbose)
# if even a single permission check is Falsy
if not result:
return False
# else it is Truthy
return True
else:
return False
def is_table(doctype):
"""Returns True if `istable` property (indicating child Table) is set for given DocType."""
def get_tables():
return db.sql_list("select name from tabDocType where istable=1")
tables = cache().get_value("is_table", get_tables)
return doctype in tables
def get_precision(doctype, fieldname, currency=None, doc=None):
"""Get precision for a given field"""
from frappe.model.meta import get_field_precision
return get_field_precision(get_meta(doctype).get_field(fieldname), doc, currency)
def generate_hash(txt=None, length=None):
"""Generates random hash for given text + current timestamp + random string."""
import hashlib, time
from .utils import random_string
digest = hashlib.sha224(((txt or "") + repr(time.time()) + repr(random_string(8))).encode()).hexdigest()
if length:
digest = digest[:length]
return digest
def reset_metadata_version():
"""Reset `metadata_version` (Client (Javascript) build ID) hash."""
v = generate_hash()
cache().set_value("metadata_version", v)
return v
def new_doc(doctype, parent_doc=None, parentfield=None, as_dict=False):
"""Returns a new document of the given DocType with defaults set.
:param doctype: DocType of the new document.
:param parent_doc: [optional] add to parent document.
:param parentfield: [optional] add against this `parentfield`."""
from frappe.model.create_new import get_new_doc
return get_new_doc(doctype, parent_doc, parentfield, as_dict=as_dict)
def set_value(doctype, docname, fieldname, value=None):
"""Set document value. Calls `frappe.client.set_value`"""
import frappe.client
return frappe.client.set_value(doctype, docname, fieldname, value)
def get_cached_doc(*args, **kwargs):
if args and len(args) > 1 and isinstance(args[1], text_type):
key = get_document_cache_key(args[0], args[1])
# local cache
doc = local.document_cache.get(key)
if doc:
return doc
# redis cache
doc = cache().hget('document_cache', key)
if doc:
doc = get_doc(doc)
local.document_cache[key] = doc
return doc
# database
doc = get_doc(*args, **kwargs)
return doc
def get_document_cache_key(doctype, name):
return '{0}::{1}'.format(doctype, name)
def clear_document_cache(doctype, name):
cache().hdel("last_modified", doctype)
key = get_document_cache_key(doctype, name)
if key in local.document_cache:
del local.document_cache[key]
cache().hdel('document_cache', key)
def get_cached_value(doctype, name, fieldname, as_dict=False):
doc = get_cached_doc(doctype, name)
if isinstance(fieldname, string_types):
if as_dict:
throw('Cannot make dict for single fieldname')
return doc.get(fieldname)
values = [doc.get(f) for f in fieldname]
if as_dict:
return _dict(zip(fieldname, values))
return values
def get_doc(*args, **kwargs):
"""Return a `frappe.model.document.Document` object of the given type and name.
:param arg1: DocType name as string **or** document JSON.
:param arg2: [optional] Document name as string.
Examples:
# insert a new document
todo = frappe.get_doc({"doctype":"ToDo", "description": "test"})
tood.insert()
# open an existing document
todo = frappe.get_doc("ToDo", "TD0001")
"""
import frappe.model.document
doc = frappe.model.document.get_doc(*args, **kwargs)
# set in cache
if args and len(args) > 1:
key = get_document_cache_key(args[0], args[1])
local.document_cache[key] = doc
cache().hset('document_cache', key, doc.as_dict())
return doc
def get_last_doc(doctype):
"""Get last created document of this type."""
d = get_all(doctype, ["name"], order_by="creation desc", limit_page_length=1)
if d:
return get_doc(doctype, d[0].name)
else:
raise DoesNotExistError
def get_single(doctype):
"""Return a `frappe.model.document.Document` object of the given Single doctype."""
return get_doc(doctype, doctype)
def get_meta(doctype, cached=True):
"""Get `frappe.model.meta.Meta` instance of given doctype name."""
import frappe.model.meta
return frappe.model.meta.get_meta(doctype, cached=cached)
def get_meta_module(doctype):
import frappe.modules
return frappe.modules.load_doctype_module(doctype)
def delete_doc(doctype=None, name=None, force=0, ignore_doctypes=None, for_reload=False,
ignore_permissions=False, flags=None, ignore_on_trash=False, ignore_missing=True):
"""Delete a document. Calls `frappe.model.delete_doc.delete_doc`.
:param doctype: DocType of document to be delete.
:param name: Name of document to be delete.
:param force: Allow even if document is linked. Warning: This may lead to data integrity errors.
:param ignore_doctypes: Ignore if child table is one of these.
:param for_reload: Call `before_reload` trigger before deleting.
:param ignore_permissions: Ignore user permissions."""
import frappe.model.delete_doc
frappe.model.delete_doc.delete_doc(doctype, name, force, ignore_doctypes, for_reload,
ignore_permissions, flags, ignore_on_trash, ignore_missing)
def delete_doc_if_exists(doctype, name, force=0):
"""Delete document if exists."""
if db.exists(doctype, name):
delete_doc(doctype, name, force=force)
def reload_doctype(doctype, force=False, reset_permissions=False):
"""Reload DocType from model (`[module]/[doctype]/[name]/[name].json`) files."""
reload_doc(scrub(db.get_value("DocType", doctype, "module")), "doctype", scrub(doctype),
force=force, reset_permissions=reset_permissions)
def reload_doc(module, dt=None, dn=None, force=False, reset_permissions=False):
"""Reload Document from model (`[module]/[doctype]/[name]/[name].json`) files.
:param module: Module name.
:param dt: DocType name.
:param dn: Document name.
:param force: Reload even if `modified` timestamp matches.
"""
import frappe.modules
return frappe.modules.reload_doc(module, dt, dn, force=force, reset_permissions=reset_permissions)
def rename_doc(*args, **kwargs):
"""Rename a document. Calls `frappe.model.rename_doc.rename_doc`"""
from frappe.model.rename_doc import rename_doc
return rename_doc(*args, **kwargs)
def get_module(modulename):
"""Returns a module object for given Python module name using `importlib.import_module`."""
return importlib.import_module(modulename)
def scrub(txt):
"""Returns sluggified string. e.g. `Sales Order` becomes `sales_order`."""
return txt.replace(' ','_').replace('-', '_').lower()
def unscrub(txt):
"""Returns titlified string. e.g. `sales_order` becomes `Sales Order`."""
return txt.replace('_',' ').replace('-', ' ').title()
def get_module_path(module, *joins):
"""Get the path of the given module name.
:param module: Module name.
:param *joins: Join additional path elements using `os.path.join`."""
module = scrub(module)
return get_pymodule_path(local.module_app[module] + "." + module, *joins)
def get_app_path(app_name, *joins):
"""Return path of given app.
:param app: App name.
:param *joins: Join additional path elements using `os.path.join`."""
return get_pymodule_path(app_name, *joins)
def get_site_path(*joins):
"""Return path of current site.
:param *joins: Join additional path elements using `os.path.join`."""
return os.path.join(local.site_path, *joins)
def get_pymodule_path(modulename, *joins):
"""Return path of given Python module name.
:param modulename: Python module name.
:param *joins: Join additional path elements using `os.path.join`."""
if not "public" in joins:
joins = [scrub(part) for part in joins]
return os.path.join(os.path.dirname(get_module(scrub(modulename)).__file__), *joins)
def get_module_list(app_name):
"""Get list of modules for given all via `app/modules.txt`."""
return get_file_items(os.path.join(os.path.dirname(get_module(app_name).__file__), "modules.txt"))
def get_all_apps(with_internal_apps=True, sites_path=None):
"""Get list of all apps via `sites/apps.txt`."""
if not sites_path:
sites_path = local.sites_path
apps = get_file_items(os.path.join(sites_path, "apps.txt"), raise_not_found=True)
if with_internal_apps:
for app in get_file_items(os.path.join(local.site_path, "apps.txt")):
if app not in apps:
apps.append(app)
if "frappe" in apps:
apps.remove("frappe")
apps.insert(0, 'frappe')
return apps
def get_installed_apps(sort=False, frappe_last=False):
"""Get list of installed apps in current site."""
if getattr(flags, "in_install_db", True):
return []
if not db:
connect()
installed = json.loads(db.get_global("installed_apps") or "[]")
if sort:
installed = [app for app in get_all_apps(True) if app in installed]
if frappe_last:
if 'frappe' in installed:
installed.remove('frappe')
installed.append('frappe')
return installed
def get_doc_hooks():
'''Returns hooked methods for given doc. It will expand the dict tuple if required.'''
if not hasattr(local, 'doc_events_hooks'):
hooks = get_hooks('doc_events', {})
out = {}
for key, value in iteritems(hooks):
if isinstance(key, tuple):
for doctype in key:
append_hook(out, doctype, value)
else:
append_hook(out, key, value)
local.doc_events_hooks = out
return local.doc_events_hooks
def get_hooks(hook=None, default=None, app_name=None):
"""Get hooks via `app/hooks.py`
:param hook: Name of the hook. Will gather all hooks for this name and return as a list.
:param default: Default if no hook found.
:param app_name: Filter by app."""
def load_app_hooks(app_name=None):
hooks = {}
for app in [app_name] if app_name else get_installed_apps(sort=True):
app = "frappe" if app=="webnotes" else app
try:
app_hooks = get_module(app + ".hooks")
except ImportError:
if local.flags.in_install_app:
# if app is not installed while restoring
# ignore it
pass
print('Could not find app "{0}"'.format(app_name))
if not request:
sys.exit(1)
raise
for key in dir(app_hooks):
if not key.startswith("_"):
append_hook(hooks, key, getattr(app_hooks, key))
return hooks
no_cache = conf.developer_mode or False
if app_name:
hooks = _dict(load_app_hooks(app_name))
else:
if no_cache:
hooks = _dict(load_app_hooks())
else:
hooks = _dict(cache().get_value("app_hooks", load_app_hooks))
if hook:
return hooks.get(hook) or (default if default is not None else [])
else:
return hooks
def append_hook(target, key, value):
'''appends a hook to the the target dict.
If the hook key, exists, it will make it a key.
If the hook value is a dict, like doc_events, it will
listify the values against the key.
'''
if isinstance(value, dict):
# dict? make a list of values against each key
target.setdefault(key, {})
for inkey in value:
append_hook(target[key], inkey, value[inkey])
else:
# make a list
target.setdefault(key, [])
if not isinstance(value, list):
value = [value]
target[key].extend(value)
def setup_module_map():
"""Rebuild map of all modules (internal)."""
_cache = cache()
if conf.db_name:
local.app_modules = _cache.get_value("app_modules")
local.module_app = _cache.get_value("module_app")
if not (local.app_modules and local.module_app):
local.module_app, local.app_modules = {}, {}
for app in get_all_apps(True):
if app=="webnotes": app="frappe"
local.app_modules.setdefault(app, [])
for module in get_module_list(app):
module = scrub(module)
local.module_app[module] = app
local.app_modules[app].append(module)
if conf.db_name:
_cache.set_value("app_modules", local.app_modules)
_cache.set_value("module_app", local.module_app)
def get_file_items(path, raise_not_found=False, ignore_empty_lines=True):
"""Returns items from text file as a list. Ignores empty lines."""
import frappe.utils
content = read_file(path, raise_not_found=raise_not_found)
if content:
content = frappe.utils.strip(content)
return [p.strip() for p in content.splitlines() if (not ignore_empty_lines) or (p.strip() and not p.startswith("#"))]
else:
return []
def get_file_json(path):
"""Read a file and return parsed JSON object."""
with open(path, 'r') as f:
return json.load(f)
def read_file(path, raise_not_found=False):
"""Open a file and return its content as Unicode."""
if isinstance(path, text_type):
path = path.encode("utf-8")
if os.path.exists(path):
with open(path, "r") as f:
return as_unicode(f.read())
elif raise_not_found:
raise IOError("{} Not Found".format(path))
else:
return None
def get_attr(method_string):
"""Get python method object from its name."""
app_name = method_string.split(".")[0]
if not local.flags.in_install and app_name not in get_installed_apps():
throw(_("App {0} is not installed").format(app_name), AppNotInstalledError)
modulename = '.'.join(method_string.split('.')[:-1])
methodname = method_string.split('.')[-1]
return getattr(get_module(modulename), methodname)
def call(fn, *args, **kwargs):
"""Call a function and match arguments."""
if isinstance(fn, string_types):
fn = get_attr(fn)
newargs = get_newargs(fn, kwargs)
return fn(*args, **newargs)
def get_newargs(fn, kwargs):
if hasattr(fn, 'fnargs'):
fnargs = fn.fnargs
else:
try:
fnargs, varargs, varkw, defaults = inspect.getargspec(fn)
except ValueError:
fnargs = inspect.getfullargspec(fn).args
varargs = inspect.getfullargspec(fn).varargs
varkw = inspect.getfullargspec(fn).varkw
defaults = inspect.getfullargspec(fn).defaults
newargs = {}
for a in kwargs:
if (a in fnargs) or varkw:
newargs[a] = kwargs.get(a)
if "flags" in newargs:
del newargs["flags"]
return newargs
def make_property_setter(args, ignore_validate=False, validate_fields_for_doctype=True):
"""Create a new **Property Setter** (for overriding DocType and DocField properties).
If doctype is not specified, it will create a property setter for all fields with the
given fieldname"""
args = _dict(args)
if not args.doctype_or_field:
args.doctype_or_field = 'DocField'
if not args.property_type:
args.property_type = db.get_value('DocField',
{'parent': 'DocField', 'fieldname': args.property}, 'fieldtype') or 'Data'
if not args.doctype:
doctype_list = db.sql_list('select distinct parent from tabDocField where fieldname=%s', args.fieldname)
else:
doctype_list = [args.doctype]
for doctype in doctype_list:
if not args.property_type:
args.property_type = db.get_value('DocField',
{'parent': doctype, 'fieldname': args.fieldname}, 'fieldtype') or 'Data'
ps = get_doc({
'doctype': "Property Setter",
'doctype_or_field': args.doctype_or_field,
'doc_type': doctype,
'field_name': args.fieldname,
'property': args.property,
'value': args.value,
'property_type': args.property_type or "Data",
'__islocal': 1
})
ps.flags.ignore_validate = ignore_validate
ps.flags.validate_fields_for_doctype = validate_fields_for_doctype
ps.validate_fieldtype_change()
ps.insert()
def import_doc(path, ignore_links=False, ignore_insert=False, insert=False):
"""Import a file using Data Import."""
from frappe.core.doctype.data_import.data_import import import_doc
import_doc(path, ignore_links=ignore_links, ignore_insert=ignore_insert, insert=insert)
def copy_doc(doc, ignore_no_copy=True):
""" No_copy fields also get copied."""
import copy
def remove_no_copy_fields(d):
for df in d.meta.get("fields", {"no_copy": 1}):
if hasattr(d, df.fieldname):
d.set(df.fieldname, None)
fields_to_clear = ['name', 'owner', 'creation', 'modified', 'modified_by']
if not local.flags.in_test:
fields_to_clear.append("docstatus")
if not isinstance(doc, dict):
d = doc.as_dict()
else:
d = doc
newdoc = get_doc(copy.deepcopy(d))
newdoc.set("__islocal", 1)
for fieldname in (fields_to_clear + ['amended_from', 'amendment_date']):
newdoc.set(fieldname, None)
if not ignore_no_copy:
remove_no_copy_fields(newdoc)
for i, d in enumerate(newdoc.get_all_children()):
d.set("__islocal", 1)
for fieldname in fields_to_clear:
d.set(fieldname, None)
if not ignore_no_copy:
remove_no_copy_fields(d)
return newdoc
def compare(val1, condition, val2):
"""Compare two values using `frappe.utils.compare`
`condition` could be:
- "^"
- "in"
- "not in"
- "="
- "!="
- ">"
- "<"
- ">="
- "<="
- "not None"
- "None"
"""
import frappe.utils
return frappe.utils.compare(val1, condition, val2)
def respond_as_web_page(title, html, success=None, http_status_code=None,
context=None, indicator_color=None, primary_action='/', primary_label = None, fullpage=False,
width=None, template='message'):
"""Send response as a web page with a message rather than JSON. Used to show permission errors etc.
:param title: Page title and heading.
:param message: Message to be shown.
:param success: Alert message.
:param http_status_code: HTTP status code
:param context: web template context
:param indicator_color: color of indicator in title
:param primary_action: route on primary button (default is `/`)
:param primary_label: label on primary button (default is "Home")
:param fullpage: hide header / footer
:param width: Width of message in pixels
:param template: Optionally pass view template
"""
local.message_title = title
local.message = html
local.response['type'] = 'page'
local.response['route'] = template
local.no_cache = 1
if http_status_code:
local.response['http_status_code'] = http_status_code
if not context:
context = {}
if not indicator_color:
if success:
indicator_color = 'green'
elif http_status_code and http_status_code > 300:
indicator_color = 'red'
else:
indicator_color = 'blue'
context['indicator_color'] = indicator_color
context['primary_label'] = primary_label
context['primary_action'] = primary_action
context['error_code'] = http_status_code
context['fullpage'] = fullpage
if width:
context['card_width'] = width
local.response['context'] = context
def redirect_to_message(title, html, http_status_code=None, context=None, indicator_color=None):
"""Redirects to /message?id=random
Similar to respond_as_web_page, but used to 'redirect' and show message pages like success, failure, etc. with a detailed message
:param title: Page title and heading.
:param message: Message to be shown.
:param http_status_code: HTTP status code.
Example Usage:
frappe.redirect_to_message(_('Thank you'), "<div><p>You will receive an email at test@example.com</p></div>")
"""
message_id = generate_hash(length=8)
message = {
'context': context or {},
'http_status_code': http_status_code or 200
}
message['context'].update({
'header': title,
'title': title,
'message': html
})
if indicator_color:
message['context'].update({
"indicator_color": indicator_color
})
cache().set_value("message_id:{0}".format(message_id), message, expires_in_sec=60)
location = '/message?id={0}'.format(message_id)
if not getattr(local, 'is_ajax', False):
local.response["type"] = "redirect"
local.response["location"] = location
else:
return location
def build_match_conditions(doctype, as_condition=True):
"""Return match (User permissions) for given doctype as list or SQL."""
import frappe.desk.reportview
return frappe.desk.reportview.build_match_conditions(doctype, as_condition=as_condition)
def get_list(doctype, *args, **kwargs):
"""List database query via `frappe.model.db_query`. Will also check for permissions.
:param doctype: DocType on which query is to be made.
:param fields: List of fields or `*`.
:param filters: List of filters (see example).
:param order_by: Order By e.g. `modified desc`.
:param limit_page_start: Start results at record #. Default 0.
:param limit_page_length: No of records in the page. Default 20.
Example usage:
# simple dict filter
frappe.get_list("ToDo", fields=["name", "description"], filters = {"owner":"test@example.com"})
# filter as a list of lists
frappe.get_list("ToDo", fields="*", filters = [["modified", ">", "2014-01-01"]])
# filter as a list of dicts
frappe.get_list("ToDo", fields="*", filters = {"description": ("like", "test%")})
"""
import frappe.model.db_query
return frappe.model.db_query.DatabaseQuery(doctype).execute(None, *args, **kwargs)
def get_all(doctype, *args, **kwargs):
"""List database query via `frappe.model.db_query`. Will **not** check for permissions.
Parameters are same as `frappe.get_list`
:param doctype: DocType on which query is to be made.
:param fields: List of fields or `*`. Default is: `["name"]`.
:param filters: List of filters (see example).
:param order_by: Order By e.g. `modified desc`.
:param limit_start: Start results at record #. Default 0.
:param limit_page_length: No of records in the page. Default 20.
Example usage:
# simple dict filter
frappe.get_all("ToDo", fields=["name", "description"], filters = {"owner":"test@example.com"})
# filter as a list of lists
frappe.get_all("ToDo", fields=["*"], filters = [["modified", ">", "2014-01-01"]])
# filter as a list of dicts
frappe.get_all("ToDo", fields=["*"], filters = {"description": ("like", "test%")})
"""
kwargs["ignore_permissions"] = True
if not "limit_page_length" in kwargs:
kwargs["limit_page_length"] = 0
return get_list(doctype, *args, **kwargs)
def get_value(*args, **kwargs):
"""Returns a document property or list of properties.
Alias for `frappe.db.get_value`
:param doctype: DocType name.
:param filters: Filters like `{"x":"y"}` or name of the document. `None` if Single DocType.
:param fieldname: Column name.
:param ignore: Don't raise exception if table, column is missing.
:param as_dict: Return values as dict.
:param debug: Print query in error log.
"""
return db.get_value(*args, **kwargs)
def as_json(obj, indent=1):
from frappe.utils.response import json_handler
return json.dumps(obj, indent=indent, sort_keys=True, default=json_handler, separators=(',', ': '))
def are_emails_muted():
from frappe.utils import cint
return flags.mute_emails or cint(conf.get("mute_emails") or 0) or False
def get_test_records(doctype):
"""Returns list of objects from `test_records.json` in the given doctype's folder."""
from frappe.modules import get_doctype_module, get_module_path
path = os.path.join(get_module_path(get_doctype_module(doctype)), "doctype", scrub(doctype), "test_records.json")
if os.path.exists(path):
with open(path, "r") as f:
return json.loads(f.read())
else:
return []
def format_value(*args, **kwargs):
"""Format value with given field properties.
:param value: Value to be formatted.
:param df: (Optional) DocField object with properties `fieldtype`, `options` etc."""
import frappe.utils.formatters
return frappe.utils.formatters.format_value(*args, **kwargs)
def format(*args, **kwargs):
"""Format value with given field properties.
:param value: Value to be formatted.
:param df: (Optional) DocField object with properties `fieldtype`, `options` etc."""
import frappe.utils.formatters
return frappe.utils.formatters.format_value(*args, **kwargs)
def get_print(doctype=None, name=None, print_format=None, style=None, html=None, as_pdf=False, doc=None, output = None, no_letterhead = 0, password=None):
"""Get Print Format for given document.
:param doctype: DocType of document.
:param name: Name of document.
:param print_format: Print Format name. Default 'Standard',
:param style: Print Format style.
:param as_pdf: Return as PDF. Default False.
:param password: Password to encrypt the pdf with. Default None"""
from frappe.website.render import build_page
from frappe.utils.pdf import get_pdf
local.form_dict.doctype = doctype
local.form_dict.name = name
local.form_dict.format = print_format
local.form_dict.style = style
local.form_dict.doc = doc
local.form_dict.no_letterhead = no_letterhead
options = None
if password:
options = {'password': password}
if not html:
html = build_page("printview")
if as_pdf:
return get_pdf(html, output = output, options = options)
else:
return html
def attach_print(doctype, name, file_name=None, print_format=None, style=None, html=None, doc=None, lang=None, print_letterhead=True, password=None):
from frappe.utils import scrub_urls
if not file_name: file_name = name
file_name = file_name.replace(' ','').replace('/','-')
print_settings = db.get_singles_dict("Print Settings")
_lang = local.lang
#set lang as specified in print format attachment
if lang: local.lang = lang
local.flags.ignore_print_permissions = True
no_letterhead = not print_letterhead
if int(print_settings.send_print_as_pdf or 0):
out = {
"fname": file_name + ".pdf",
"fcontent": get_print(doctype, name, print_format=print_format, style=style, html=html, as_pdf=True, doc=doc, no_letterhead=no_letterhead, password=password)
}
else:
out = {
"fname": file_name + ".html",
"fcontent": scrub_urls(get_print(doctype, name, print_format=print_format, style=style, html=html, doc=doc, no_letterhead=no_letterhead, password=password)).encode("utf-8")
}
local.flags.ignore_print_permissions = False
#reset lang to original local lang
local.lang = _lang
return out
def publish_progress(*args, **kwargs):
"""Show the user progress for a long request
:param percent: Percent progress
:param title: Title
:param doctype: Optional, for document type
:param docname: Optional, for document name
:param description: Optional description
"""
import frappe.realtime
return frappe.realtime.publish_progress(*args, **kwargs)
def publish_realtime(*args, **kwargs):
"""Publish real-time updates
:param event: Event name, like `task_progress` etc.
:param message: JSON message object. For async must contain `task_id`
:param room: Room in which to publish update (default entire site)
:param user: Transmit to user
:param doctype: Transmit to doctype, docname
:param docname: Transmit to doctype, docname
:param after_commit: (default False) will emit after current transaction is committed
"""
import frappe.realtime
return frappe.realtime.publish_realtime(*args, **kwargs)
def local_cache(namespace, key, generator, regenerate_if_none=False):
"""A key value store for caching within a request
:param namespace: frappe.local.cache[namespace]
:param key: frappe.local.cache[namespace][key] used to retrieve value
:param generator: method to generate a value if not found in store
"""
if namespace not in local.cache:
local.cache[namespace] = {}
if key not in local.cache[namespace]:
local.cache[namespace][key] = generator()
elif local.cache[namespace][key]==None and regenerate_if_none:
# if key exists but the previous result was None
local.cache[namespace][key] = generator()
return local.cache[namespace][key]
def enqueue(*args, **kwargs):
'''
Enqueue method to be executed using a background worker
:param method: method string or method object
:param queue: (optional) should be either long, default or short
:param timeout: (optional) should be set according to the functions
:param event: this is passed to enable clearing of jobs from queues
:param is_async: (optional) if is_async=False, the method is executed immediately, else via a worker
:param job_name: (optional) can be used to name an enqueue call, which can be used to prevent duplicate calls
:param kwargs: keyword arguments to be passed to the method
'''
import frappe.utils.background_jobs
return frappe.utils.background_jobs.enqueue(*args, **kwargs)
def enqueue_doc(*args, **kwargs):
'''
Enqueue method to be executed using a background worker
:param doctype: DocType of the document on which you want to run the event
:param name: Name of the document on which you want to run the event
:param method: method string or method object
:param queue: (optional) should be either long, default or short
:param timeout: (optional) should be set according to the functions
:param kwargs: keyword arguments to be passed to the method
'''
import frappe.utils.background_jobs
return frappe.utils.background_jobs.enqueue_doc(*args, **kwargs)
def get_doctype_app(doctype):
def _get_doctype_app():
doctype_module = local.db.get_value("DocType", doctype, "module")
return local.module_app[scrub(doctype_module)]
return local_cache("doctype_app", doctype, generator=_get_doctype_app)
loggers = {}
log_level = None
def logger(module=None, with_more_info=True):
'''Returns a python logger that uses StreamHandler'''
from frappe.utils.logger import get_logger
return get_logger(module or 'default', with_more_info=with_more_info)
def log_error(message=None, title=None):
'''Log error to Error Log'''
return get_doc(dict(doctype='Error Log', error=as_unicode(message or get_traceback()),
method=title)).insert(ignore_permissions=True)
def get_desk_link(doctype, name):
return '<a href="#Form/{0}/{1}" style="font-weight: bold;">{2} {1}</a>'.format(doctype, name, _(doctype))
def bold(text):
return '<b>{0}</b>'.format(text)
def safe_eval(code, eval_globals=None, eval_locals=None):
'''A safer `eval`'''
whitelisted_globals = {
"int": int,
"float": float,
"long": int,
"round": round
}
if '__' in code:
throw('Illegal rule {0}. Cannot use "__"'.format(bold(code)))
if not eval_globals:
eval_globals = {}
eval_globals['__builtins__'] = {}
eval_globals.update(whitelisted_globals)
return eval(code, eval_globals, eval_locals)
def get_system_settings(key):
if key not in local.system_settings:
local.system_settings.update({key: db.get_single_value('System Settings', key)})
return local.system_settings.get(key)
def get_active_domains():
from frappe.core.doctype.domain_settings.domain_settings import get_active_domains
return get_active_domains()
def get_version(doctype, name, limit = None, head = False, raise_err = True):
'''
Returns a list of version information of a given DocType (Applicable only if DocType has changes tracked).
Example
>>> frappe.get_version('User', 'foobar@gmail.com')
>>>
[
{
"version": [version.data], # Refer Version DocType get_diff method and data attribute
"user": "admin@gmail.com" # User that created this version
"creation": <datetime.datetime> # Creation timestamp of that object.
}
]
'''
meta = get_meta(doctype)
if meta.track_changes:
names = db.sql("""
SELECT name from tabVersion
WHERE ref_doctype = '{doctype}' AND docname = '{name}'
{order_by}
{limit}
""".format(
doctype = doctype,
name = name,
order_by = 'ORDER BY creation' if head else '',
limit = 'LIMIT {limit}'.format(limit = limit) if limit else ''
))
from frappe.chat.util import squashify, dictify, safe_json_loads
versions = [ ]
for name in names:
name = squashify(name)
doc = get_doc('Version', name)
data = doc.data
data = safe_json_loads(data)
data = dictify(dict(
version = data,
user = doc.owner,
creation = doc.creation
))
versions.append(data)
return versions
else:
if raise_err:
raise ValueError('{doctype} has no versions tracked.'.format(
doctype = doctype
))
@whitelist(allow_guest = True)
def ping():
return "pong"
def safe_encode(param, encoding = 'utf-8'):
try:
param = param.encode(encoding)
except Exception:
pass
return param
def safe_decode(param, encoding = 'utf-8'):
try:
param = param.decode(encoding)
except Exception:
pass
return param
def parse_json(val):
from frappe.utils import parse_json
return parse_json(val)
def mock(type, size = 1, locale = 'en'):
results = [ ]
faker = Faker(locale)
if not type in dir(faker):
raise ValueError('Not a valid mock type.')
else:
for i in range(size):
data = getattr(faker, type)()
results.append(data)
from frappe.chat.util import squashify
results = squashify(results)
return results
def validate_and_sanitize_search_inputs(fn):
from frappe.desk.search import validate_and_sanitize_search_inputs as func
return func(fn)
| 30.711191
| 180
| 0.730634
|
bc5ecafb0621cf814426c024118e7091f41f1084
| 1,436
|
py
|
Python
|
src/423. Reconstruct Original Digits from English.py
|
xiaonanln/myleetcode-python
|
95d282f21a257f937cd22ef20c3590a69919e307
|
[
"Apache-2.0"
] | null | null | null |
src/423. Reconstruct Original Digits from English.py
|
xiaonanln/myleetcode-python
|
95d282f21a257f937cd22ef20c3590a69919e307
|
[
"Apache-2.0"
] | null | null | null |
src/423. Reconstruct Original Digits from English.py
|
xiaonanln/myleetcode-python
|
95d282f21a257f937cd22ef20c3590a69919e307
|
[
"Apache-2.0"
] | null | null | null |
from collections import Counter
class Solution(object):
def originalDigits(self, s):
"""
:type s: str
:rtype: str
"""
C = Counter(s)
C2 = [0] * 10
if C['x']: # get six
n = C['x']
C2[6] = n
C['s'] -= n
C['i'] -= n
C['x'] -= n
if C['z']: # get zero
n = C['z']
C2[0] = n
C['z'] -= n
C['e'] -= n
C['r'] -= n
C['o'] -= n
if C['w']: # get two
n = C['w']
C2[2] = n
C['t'] -= n
C['w'] -= n
C['o'] -= n
if C['u']: # get 4
n = C['u']
C2[4] = n
C['f'] -= n
C['o'] -= n
C['u'] -= n
C['r'] -= n
if C['f']: # get 5
n = C['f']
C2[5] = n
C['f'] -= n
C['i'] -= n
C['v'] -= n
C['e'] -= n
if C['v']: # get 7
n = C['v']
C2[7] = n
C['s'] -= n
C['e'] -= n
C['v'] -= n
C['e'] -= n
C['n'] -= n
if C['g']: # get 8
n = C['g']
C2[8] = n
C['e'] -= n
C['i'] -= n
C['g'] -= n
C['h'] -= n
C['t'] -= n
if C['i']: # get 9
n = C['i']
C2[9] = n
C['n'] -= n
C['i'] -= n
C['n'] -= n
C['e'] -= n
if C['h']: # get 9
n = C['h']
C2[3] = n
C['t'] -= n
C['h'] -= n
C['r'] -= n
C['e'] -= n
C['e'] -= n
if C['o']: # get one
n = C['o']
C2[1] = n
C['o'] -= n
C['n'] -= n
C['e'] -= n
# print C2
return ''.join(str(n) for n, c in enumerate(C2) for _ in xrange(c))
print Solution().originalDigits("otw")
# print Solution().originalDigits("fviefuro")
| 15.115789
| 69
| 0.344708
|
11165f4cff5da0aa0da70cdf8c4228d97c0af667
| 23,304
|
py
|
Python
|
lib.py
|
silkyanteater/cue
|
3987d5cd08d1e0795df657ad5a81e48bc2aca782
|
[
"MIT"
] | null | null | null |
lib.py
|
silkyanteater/cue
|
3987d5cd08d1e0795df657ad5a81e48bc2aca782
|
[
"MIT"
] | null | null | null |
lib.py
|
silkyanteater/cue
|
3987d5cd08d1e0795df657ad5a81e48bc2aca782
|
[
"MIT"
] | null | null | null |
import json
import requests
import urllib.parse
import yaml
import toml
import dateutil.parser
import re
import os
import sys
import subprocess
import platform
from const import *
req = requests.models.PreparedRequest()
jira_instance_url = None
jira_request_headers = None
queries_definition_file = None
result_files_dir = None
alert_sound_file = None
all_issues_file = None
all_issues_cache = None
class JiraIssue(object):
def __init__(self, issue_obj):
if 'fields' in issue_obj:
self.raw_data = issue_obj
fields = issue_obj['fields']
sprints = list()
for sprint in (fields['customfield_10104'] or tuple()):
hit = sprint_re.search(sprint)
if hit is not None:
sprints.append(hit.group())
assert fields['project']['key'] == 'UI', f"Can't process a non-UI ticket, found '{fields['project']['key']}'"
self.data = {
'key': issue_obj['key'],
'url': urllib.parse.urljoin(jira_instance_url, f"/browse/{issue_obj['key']}"),
'title': (fields['summary'] or '').strip(),
'type': (fields['issuetype']['name'] or '').strip(),
'assignee': (fields['assignee']['name'] or '').strip() if fields['assignee'] is not None else '',
'status': (fields['status']['name'] or '').strip(),
'resolution': (fields['resolution']['name'] or '').strip() if fields['resolution'] is not None else '',
'target_version': (fields['customfield_13621'] or '').strip(),
'git_branches': re.sub(r'\s+', ' ', fields['customfield_11207'] or '').strip(),
'creator': (fields['creator']['name'] or '').strip(),
'created': dateutil.parser.parse(fields['created']),
'created_str': dateutil.parser.parse(fields['created']).strftime('%m-%b-%Y'),
'updated': dateutil.parser.parse(fields['updated']),
'updated_str': dateutil.parser.parse(fields['updated']).strftime('%m-%b-%Y'),
'labels': sorted(fields['labels']),
'labels_str': f"{', '.join(label.strip() for label in fields['labels'])}",
'description': re.sub(r'\s+', ' ', (fields['description'] or '').strip()),
'time_spent': fields['timespent'],
'time_spent_str': f"{fields['timespent'] // 3600}:{(fields['timespent'] % 3600) // 60:02}" if fields['timespent'] is not None else "",
'estimate': fields['timeestimate'],
'estimate_str': f"{fields['timeestimate'] // 3600}:{(fields['timeestimate'] % 3600) // 60:02}" if fields['timeestimate'] is not None else "",
'original_estimate': fields['timeoriginalestimate'],
'original_estimate_str': f"{fields['timeoriginalestimate'] // 3600}:{(fields['timeoriginalestimate'] % 3600) // 60:02}" if fields['timeoriginalestimate'] is not None else "",
'progress': fields['progress'],
'project': (fields['customfield_13613'] or '').strip(),
'fr': (fields['customfield_13611'] or '').strip(),
'epic': (fields['customfield_10100'] or '').strip(),
'story_points': str(fields['customfield_10106'] or ''),
'sprints': sprints,
'last_sprint': sprints[-1] if len(sprints) > 0 else '',
'sprints_str': ', '.join(sprints),
'parent': fields.get('parent', dict()).get('key', ''),
}
# TODO: add progress status, attachments
# TODO: add a field as how old the data is
# TODO: process non-UI tickets
# TODO: parent issue doesn't come up for sub-tasks
self.core_data = {key: self.data[key] for key, value in issue_display_keys}
self.core_data['key'] = self.data['key']
else:
self.raw_data = self.core_data = dict(issue_obj)
data = dict(issue_obj)
data['sprints'] = [sprint.strip() for sprint in data['sprints_str'].split(',')]
data['last_sprint'] = data['sprints'][-1] if len(data['sprints']) > 0 else ''
self.data = data
def __getattr__(self, attr):
if attr in self.data:
return self.data[attr]
elif attr in self.core_data:
return self.core_data[attr]
else:
raise KeyError(attr)
def __str__(self):
return f"{self.key} - {self.title}"
def _get_formatted_fields(self, fields_definition, *, add_colors = True, add_empty = True, centered = True, expand_links = True, align_field_separator = False):
def render_field(color, field_str, width, *, add_colors = True, centered = True):
if centered:
if add_colors and color is not None:
return f"{color}{field_str:^{width}}{CLR.reset}" if width > 0 else ''
else:
return f"{field_str:^{width}}" if width > 0 else ''
else:
if add_colors and color is not None:
return f"{color}{field_str:{width}}{CLR.reset}" if width > 0 else ''
else:
return f"{field_str:{width}}" if width > 0 else ''
formatted_fields = list()
for fields in fields_definition:
if len(fields) == 4:
key, width, default, color = fields
insert_field_name = False
elif len(fields) == 5:
key, width, default, color, field_name_color = fields
insert_field_name = True
else:
raise AssertionError(f"Field definition item is invalid: {fields}")
attr = getattr(self, key)
if not add_empty and not attr:
continue
attr = expand_issue_link(attr) if expand_links is True else attr
_width = width if width > 0 else len(attr)
field_str = ellipsis(attr, _width) if attr else default
if add_colors is True and isinstance(color, (tuple, list)):
default_color, color_switchers = color
formatted_field = render_field(default_color, field_str, _width, add_colors=add_colors, centered=centered)
for after_this, color_to_switch in color_switchers.items():
switch_position = formatted_field.find(after_this)
if switch_position >= 0:
insert_at_index = switch_position + len(after_this)
formatted_field = formatted_field[:insert_at_index] + color_to_switch + formatted_field[insert_at_index:]
else:
formatted_field = render_field(color, field_str, _width, add_colors=add_colors, centered=centered)
if insert_field_name:
field_name = [item[1] for item in issue_display_keys if item[0] == key][0]
if align_field_separator is True:
if add_colors is False or field_name_color is None:
formatted_field = f"{field_name:>{display_key_len}}: {formatted_field}"
else:
formatted_field = f"{field_name_color}{field_name:>{display_key_len}}{CLR.reset}: {formatted_field}"
else:
if add_colors is False or field_name_color is None:
formatted_field = f"{field_name}: {formatted_field}"
else:
formatted_field = f"{field_name_color}{field_name}{CLR.reset}: {formatted_field}"
formatted_fields.append(formatted_field)
return formatted_fields
def format_oneline(self, add_colors = True):
return f"{issue_fields_oneline_separator.join(self._get_formatted_fields(issue_fields_oneline, add_colors=add_colors, centered=False))}"
def format_compact(self, add_colors = True):
# TODO: show epic parent links
format_str = f"{issue_fields_compact_head_separator.join(self._get_formatted_fields(issue_fields_compact_head, add_colors=add_colors, centered=False))}"
for conditional_row in self._get_formatted_fields(issue_fields_compact_conditional_rows, add_colors=add_colors, add_empty=False, centered=False):
format_str += f"\n {conditional_row}"
body_str = issue_fields_compact_body_separator.join(self._get_formatted_fields(issue_fields_compact_body, add_colors=add_colors))
body_str = f"{issue_fields_compact_body_separator}{body_str}{issue_fields_compact_body_separator}".strip()
format_str += f"\n {body_str}"
return format_str
def format_long(self, *, add_colors = True, expand_links = True, align_field_separator = False):
used_indentation = '' if align_field_separator is True else ' '*issue_fields_long_indent
if add_colors is True:
format_str = f"{issue_fields_long_key_color}{self.key}{CLR.reset}\n{used_indentation}"
else:
format_str = f"{self.key}\n{used_indentation}"
format_str += ('\n' + used_indentation).join(self._get_formatted_fields(issue_fields_long, add_colors=add_colors, expand_links=expand_links, centered=False, align_field_separator=align_field_separator))
return format_str
def format(self, *, variant = None, add_colors = True, expand_links = True, align_field_separator = False):
if variant == 'oneline':
return self.format_oneline(add_colors=add_colors)
elif variant == 'long':
return self.format_long(add_colors=add_colors, expand_links=expand_links, align_field_separator=align_field_separator)
else:
return self.format_compact(add_colors=add_colors)
class JiraIssues(dict):
def __init__(self, issues_data = None):
super().__init__()
if issues_data is None:
return
assert isinstance(issues_data, (list, dict)), f"Unrecognised issues data: {issues_data}"
if isinstance(issues_data, list):
for issue_obj in issues_data:
issue = JiraIssue(issue_obj)
self[issue.key] = issue
else:
for issue_key, issue_dict in issues_data.items():
full_issue_dict = dict(issue_dict)
full_issue_dict.update({'key': issue_key})
issue = JiraIssue(full_issue_dict)
self[issue.key] = issue
def __str__(self):
return '\n'.join(str(issue) for issue in self.values())
def to_list(self):
return sorted(self.values(), key=lambda issue: int(issue.key.split('-')[1]))
def format(self, *, variant = None, add_colors = True, expand_links = True, add_separator_to_multiline = True, align_field_separator = False):
if variant == 'oneline':
return '\n'.join(issue.format(variant=variant, add_colors=add_colors, expand_links=expand_links) for issue in self.to_list())
else:
formatted_issues = tuple(issue.format(variant=variant, add_colors=add_colors, expand_links=expand_links, align_field_separator=align_field_separator) for issue in self.to_list())
if add_separator_to_multiline is True and issue_fields_vertical_separator:
if add_colors is True and issue_fields_vertical_separator_color is not None:
separator = f"{issue_fields_vertical_separator_color}{issue_fields_vertical_separator}{CLR.reset}"
else:
separator = issue_fields_vertical_separator
return separator + '\n' + f"\n{separator}\n".join(formatted_issues) + '\n' + separator
else:
return '\n'.join(formatted_issues)
def filter(self, issure_refs):
keys_to_remove = list()
for key in self.keys():
if key not in issure_refs:
keys_to_remove.append(key)
for key in keys_to_remove:
del self[key]
return self
def update(self, issues):
assert isinstance(issues, type(self)), f"JiraIssues type expected"
for key, value in issues.items():
self[key] = value
return self
def init_lib():
get_user_config()
def get_jira_data(url, *, query_params = None, headers = None):
query_params = dict(query_params)
query_params.update({'fields': ','.join(requested_issue_fields)})
req.prepare_url(url, query_params)
try:
print(f"Sending request: {req.url}")
resp = requests.get(url, params=query_params, headers=headers, allow_redirects=True, timeout=request_timeout_seconds)
except Exception as e:
raise AssertionError(str(e))
except KeyboardInterrupt:
raise AssertionError()
if resp.status_code != 200:
if resp.headers.get('X-Seraph-LoginReason') == 'AUTHENTICATED_FAILED':
raise AssertionError(f"HTTP Error: {resp.status_code} - {requests.status_codes._codes.get(resp.status_code, ['N/A'])[0]} > check login credentials")
elif resp.headers.get('X-Seraph-LoginReason') == 'AUTHENTICATION_DENIED':
raise AssertionError(f"HTTP Error: {resp.status_code} - {requests.status_codes._codes.get(resp.status_code, ['N/A'])[0]} > try a browser login, captcha authentication may have been triggered")
else:
raise AssertionError(f"HTTP Error: {resp.status_code} - {requests.status_codes._codes.get(resp.status_code, ['N/A'])[0]}")
try:
response_json = json.loads(resp.content)
except Exception as e:
raise AssertionError(f"Error while loading json: {e}")
if 'errorMessages' in response_json:
raise AssertionError(f"Error: {', '.join(response_json.get('errorMessages', ('unspecified', )))}")
return response_json
def search_issues(jql, *, maxResults = -1, startAt = None):
# TODO: limit maxResults
url = urllib.parse.urljoin(jira_instance_url, '/rest/api/2/search')
query_params = {
'jql': jql,
'maxResults': maxResults,
}
if startAt is not None:
query_params['startAt'] = startAt
issues = JiraIssues(get_jira_data(url, query_params=query_params, headers=jira_request_headers)['issues'])
update_all_issues_cache(issues)
return issues
def get_jira_issues(jira_issue_refs):
return search_issues(f"key in ({', '.join(jira_issue_refs)})")
def get_user_config():
global jira_instance_url, jira_request_headers, queries_definition_file, result_files_dir, alert_sound_file, all_issues_file
assert os.path.isfile(config_toml_file_name), f"Config file '{config_toml_file_name}' not found"
user_config = toml.loads(open(config_toml_file_name).read())
for required_key in required_config_keys:
assert required_key in user_config, f"Key missing from {config_toml_file_name}: {required_key}"
jira_instance_url = user_config["jira_instance_url"]
key_file_path = user_config["jira_key_file"]
assert os.path.isfile(key_file_path), f"Key file '{key_file_path}' not found"
jira_request_headers = {
'Authorization': f'Basic {open(key_file_path).read().strip()}',
'Content-Type': 'application/json',
}
queries_definition_file = user_config["queries_definition_file"]
result_files_dir = user_config["result_files_dir"]
alert_sound_file = user_config["alert_sound_file"]
all_issues_file = user_config["all_issues_file"]
if not os.path.isdir(result_files_dir):
os.mkdir(result_files_dir)
return user_config
def get_queries():
get_user_config()
queries = yaml.safe_load(open(queries_definition_file).read())
return queries
def get_query(query_name):
queries = [(key, value) for key, value in get_queries().items() if value['name'] == query_name]
assert len(queries) > 0, f"Command '{query_name}' not found in '{queries_definition_file}'"
assert len(queries) <= 1, f"Duplicate command '{query_name}' found in '{queries_definition_file}'"
return queries[0][0], queries[0][1]['jql']
def get_all_query_names():
return tuple(value['name'] for value in get_queries().values())
def get_active_query_names():
return tuple(value['name'] for value in get_queries().values() if value.get('passive', False) is not True)
def write_issues(filename, issues):
with open(os.path.join(result_files_dir, f"{filename}.txt"), 'w+') as txtfile:
txtfile.write(issues.format(variant='long', add_colors=False, expand_links=False, add_separator_to_multiline = False))
def import_core_data_of_issue(issue_text):
core_data = dict()
lines = [line.strip() for line in issue_text.split('\n')]
core_data['key'] = lines[0]
for line in lines[1:]:
if len(line.strip()) == 0:
continue
display_key, value = [item.strip() for item in line.split(':', 1)]
keys = [item[0] for item in issue_display_keys if item[1] == display_key]
assert len(keys) > 0, f"Field '{display_key}' not recognised for issue {core_data['key']}"
core_data[keys[0]] = value
return core_data
def import_core_data_sets(text):
if text.strip() == '':
return dict()
core_data_sets = dict()
for issue_text in re.split(r'\n(?=\S)', text):
core_data = import_core_data_of_issue(issue_text)
core_data_sets[core_data['key']] = core_data
return core_data_sets
def get_stored_issues_for_query(query_name):
query_title, jql = get_query(query_name)
query_file_path = os.path.join(result_files_dir, f"{query_title}.txt")
text = ''
if os.path.isfile(query_file_path):
text = open(query_file_path).read()
return JiraIssues(import_core_data_sets(text))
def load_all_issues_cache():
global all_issues_cache
if all_issues_cache is None:
all_issues_file_path = os.path.join(result_files_dir, all_issues_file)
text = ''
if os.path.isfile(all_issues_file_path):
text = open(all_issues_file_path).read()
all_issues_cache = JiraIssues(import_core_data_sets(text))
return all_issues_cache
def update_all_issues_cache(issues):
all_issues_cache = load_all_issues_cache().update(issues)
with open(os.path.join(result_files_dir, all_issues_file), 'w+') as txtfile:
txtfile.write(all_issues_cache.format(variant='long', add_colors=False, expand_links=False, add_separator_to_multiline = False))
def get_updated_issues(issues, stored_issues):
updated_issues = dict()
for key, issue in issues.items():
if key not in stored_issues or stored_issues[key].core_data != issue.core_data:
updated_issues[key] = issue
return updated_issues
def write_queue(queue_lines, *, append = False):
file_mode = 'a+' if append is True else 'w+'
if file_mode == 'w+' or len(queue_lines) > 0:
with open(os.path.join(result_files_dir, queue_file_name), file_mode) as queuefile:
if len(queue_lines) > 0:
queuefile.write('\n'.join(queue_lines) + '\n')
else:
queuefile.write('')
def update_queue(query_title, updated_issues):
queue_items = [f"{query_title} -- {str(issue)}" for key, issue in updated_issues.items()]
write_queue(queue_items, append=True)
def load_queue():
queue_file_path = os.path.join(result_files_dir, queue_file_name)
queue_content = ''
if os.path.isfile(queue_file_path):
queue_content = open(queue_file_path).read().strip()
return queue_content
def step_through_queue():
queue_lines = [line.strip() for line in load_queue().split('\n') if len(line.strip()) > 0]
if len(queue_lines) > 0:
remaining_lines = list()
resp = None
for line in queue_lines:
if resp != 'all skipped':
resp = None
while resp not in ('', 's', 'd', 'q', 'all skipped'):
resp = input(f"{line} | S(kip) d(one) q(uit) > ").lower()
if resp in ('', 's', 'q', 'all skipped'):
remaining_lines.append(line)
if resp == 'q':
resp = 'all skipped'
elif resp == 'd':
pass
write_queue(remaining_lines)
else:
print("Queue is empty")
def print_queue():
queue_content = load_queue()
if len(queue_content) > 0:
print(queue_content)
else:
print("Queue is empty")
def convert_to_issue_ref(ref):
issue_ref = re.sub(r"([a-zA-Z])(?=\d)", r"\1-", str(ref)).upper()
if digits_re.match(issue_ref):
issue_ref = "UI-" + issue_ref
return issue_ref
def show_help():
sys.stdout.write(help_text)
def get_format_option(quickparse):
format = default_listing_format
for option in quickparse.options:
if option == '--oneline':
format = 'oneline'
elif option == '--compact':
format = 'compact'
elif option == '--long':
format = 'long'
return format
def play_sound(filename):
if 'linux' in platform.system().lower():
p = subprocess.Popen(['aplay', os.path.join(os.getcwd(), filename)],stdout=subprocess.PIPE,stderr=subprocess.PIPE)
else:
p = subprocess.Popen(['afplay', os.path.join(os.getcwd(), filename)],stdout=subprocess.PIPE,stderr=subprocess.PIPE)
output, err = p.communicate()
def show_system_notification(message):
os.system(f"notify-send '{message}'")
def alert_if_queue_not_empty():
queue_file_path = os.path.join(result_files_dir, queue_file_name)
if os.path.isfile(queue_file_path):
queue_items = [line for line in open(queue_file_path).read().strip().split('\n') if len(line.strip()) > 0]
if len(queue_items) > 0:
if QUEUE_ALERT_PLAY_SOUND:
if os.path.isfile(alert_sound_file):
play_sound('finealert.wav')
elif alert_sound_file:
print(f"Alert sound file not found: {alert_sound_file}")
if QUEUE_ALERT_SHOW_SYSTEM_NOTIFICATION:
show_system_notification('\n'.join(queue_items))
print(f"Queue length: {len(queue_items)}")
else:
print("Queue is empty")
else:
print("Queue is empty")
def ellipsis(string, length):
if len(string) <= length or length <= 0:
return string
else:
return f"{string[:length-1]}…"
def expand_issue_link(field):
if issue_ref_re.match(field) is not None:
issues_cache = load_all_issues_cache()
if field in issues_cache:
return f"{field} - {issues_cache[field].title}"
return field
def add_extra_params(jql, quickparse):
required_param_count = jql.count('%s')
if required_param_count > 0:
assert '--extra' in quickparse.options, f"Expected {required_param_count} extra parameters to substitute"
extra_params = quickparse.options['--extra'].split(',')
assert required_param_count == len(extra_params), f"Expected {required_param_count} extra parameters to substitute and got {len(extra_params)}"
for extra_param in extra_params:
jql = jql.replace('%s', convert_to_issue_ref(extra_param), 1)
return jql
else:
assert '--extra' not in quickparse.options, f"No extra parameters required, got: {quickparse.options['--extra']}"
return jql
| 46.983871
| 210
| 0.6365
|
1ce21d190c851537db2da78148fce090dea3564e
| 15,238
|
py
|
Python
|
synapse/storage/events_bg_updates.py
|
whitemike889/synapse
|
97bf3077550915161765fdd1cf9290d8039a55f9
|
[
"Apache-2.0"
] | null | null | null |
synapse/storage/events_bg_updates.py
|
whitemike889/synapse
|
97bf3077550915161765fdd1cf9290d8039a55f9
|
[
"Apache-2.0"
] | null | null | null |
synapse/storage/events_bg_updates.py
|
whitemike889/synapse
|
97bf3077550915161765fdd1cf9290d8039a55f9
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from six import text_type
from canonicaljson import json
from twisted.internet import defer
from synapse.storage.background_updates import BackgroundUpdateStore
logger = logging.getLogger(__name__)
class EventsBackgroundUpdatesStore(BackgroundUpdateStore):
EVENT_ORIGIN_SERVER_TS_NAME = "event_origin_server_ts"
EVENT_FIELDS_SENDER_URL_UPDATE_NAME = "event_fields_sender_url"
DELETE_SOFT_FAILED_EXTREMITIES = "delete_soft_failed_extremities"
def __init__(self, db_conn, hs):
super(EventsBackgroundUpdatesStore, self).__init__(db_conn, hs)
self.register_background_update_handler(
self.EVENT_ORIGIN_SERVER_TS_NAME, self._background_reindex_origin_server_ts
)
self.register_background_update_handler(
self.EVENT_FIELDS_SENDER_URL_UPDATE_NAME,
self._background_reindex_fields_sender,
)
self.register_background_index_update(
"event_contains_url_index",
index_name="event_contains_url_index",
table="events",
columns=["room_id", "topological_ordering", "stream_ordering"],
where_clause="contains_url = true AND outlier = false",
)
# an event_id index on event_search is useful for the purge_history
# api. Plus it means we get to enforce some integrity with a UNIQUE
# clause
self.register_background_index_update(
"event_search_event_id_idx",
index_name="event_search_event_id_idx",
table="event_search",
columns=["event_id"],
unique=True,
psql_only=True,
)
self.register_background_update_handler(
self.DELETE_SOFT_FAILED_EXTREMITIES, self._cleanup_extremities_bg_update
)
@defer.inlineCallbacks
def _background_reindex_fields_sender(self, progress, batch_size):
target_min_stream_id = progress["target_min_stream_id_inclusive"]
max_stream_id = progress["max_stream_id_exclusive"]
rows_inserted = progress.get("rows_inserted", 0)
INSERT_CLUMP_SIZE = 1000
def reindex_txn(txn):
sql = (
"SELECT stream_ordering, event_id, json FROM events"
" INNER JOIN event_json USING (event_id)"
" WHERE ? <= stream_ordering AND stream_ordering < ?"
" ORDER BY stream_ordering DESC"
" LIMIT ?"
)
txn.execute(sql, (target_min_stream_id, max_stream_id, batch_size))
rows = txn.fetchall()
if not rows:
return 0
min_stream_id = rows[-1][0]
update_rows = []
for row in rows:
try:
event_id = row[1]
event_json = json.loads(row[2])
sender = event_json["sender"]
content = event_json["content"]
contains_url = "url" in content
if contains_url:
contains_url &= isinstance(content["url"], text_type)
except (KeyError, AttributeError):
# If the event is missing a necessary field then
# skip over it.
continue
update_rows.append((sender, contains_url, event_id))
sql = "UPDATE events SET sender = ?, contains_url = ? WHERE event_id = ?"
for index in range(0, len(update_rows), INSERT_CLUMP_SIZE):
clump = update_rows[index : index + INSERT_CLUMP_SIZE]
txn.executemany(sql, clump)
progress = {
"target_min_stream_id_inclusive": target_min_stream_id,
"max_stream_id_exclusive": min_stream_id,
"rows_inserted": rows_inserted + len(rows),
}
self._background_update_progress_txn(
txn, self.EVENT_FIELDS_SENDER_URL_UPDATE_NAME, progress
)
return len(rows)
result = yield self.runInteraction(
self.EVENT_FIELDS_SENDER_URL_UPDATE_NAME, reindex_txn
)
if not result:
yield self._end_background_update(self.EVENT_FIELDS_SENDER_URL_UPDATE_NAME)
defer.returnValue(result)
@defer.inlineCallbacks
def _background_reindex_origin_server_ts(self, progress, batch_size):
target_min_stream_id = progress["target_min_stream_id_inclusive"]
max_stream_id = progress["max_stream_id_exclusive"]
rows_inserted = progress.get("rows_inserted", 0)
INSERT_CLUMP_SIZE = 1000
def reindex_search_txn(txn):
sql = (
"SELECT stream_ordering, event_id FROM events"
" WHERE ? <= stream_ordering AND stream_ordering < ?"
" ORDER BY stream_ordering DESC"
" LIMIT ?"
)
txn.execute(sql, (target_min_stream_id, max_stream_id, batch_size))
rows = txn.fetchall()
if not rows:
return 0
min_stream_id = rows[-1][0]
event_ids = [row[1] for row in rows]
rows_to_update = []
chunks = [event_ids[i : i + 100] for i in range(0, len(event_ids), 100)]
for chunk in chunks:
ev_rows = self._simple_select_many_txn(
txn,
table="event_json",
column="event_id",
iterable=chunk,
retcols=["event_id", "json"],
keyvalues={},
)
for row in ev_rows:
event_id = row["event_id"]
event_json = json.loads(row["json"])
try:
origin_server_ts = event_json["origin_server_ts"]
except (KeyError, AttributeError):
# If the event is missing a necessary field then
# skip over it.
continue
rows_to_update.append((origin_server_ts, event_id))
sql = "UPDATE events SET origin_server_ts = ? WHERE event_id = ?"
for index in range(0, len(rows_to_update), INSERT_CLUMP_SIZE):
clump = rows_to_update[index : index + INSERT_CLUMP_SIZE]
txn.executemany(sql, clump)
progress = {
"target_min_stream_id_inclusive": target_min_stream_id,
"max_stream_id_exclusive": min_stream_id,
"rows_inserted": rows_inserted + len(rows_to_update),
}
self._background_update_progress_txn(
txn, self.EVENT_ORIGIN_SERVER_TS_NAME, progress
)
return len(rows_to_update)
result = yield self.runInteraction(
self.EVENT_ORIGIN_SERVER_TS_NAME, reindex_search_txn
)
if not result:
yield self._end_background_update(self.EVENT_ORIGIN_SERVER_TS_NAME)
defer.returnValue(result)
@defer.inlineCallbacks
def _cleanup_extremities_bg_update(self, progress, batch_size):
"""Background update to clean out extremities that should have been
deleted previously.
Mainly used to deal with the aftermath of #5269.
"""
# This works by first copying all existing forward extremities into the
# `_extremities_to_check` table at start up, and then checking each
# event in that table whether we have any descendants that are not
# soft-failed/rejected. If that is the case then we delete that event
# from the forward extremities table.
#
# For efficiency, we do this in batches by recursively pulling out all
# descendants of a batch until we find the non soft-failed/rejected
# events, i.e. the set of descendants whose chain of prev events back
# to the batch of extremities are all soft-failed or rejected.
# Typically, we won't find any such events as extremities will rarely
# have any descendants, but if they do then we should delete those
# extremities.
def _cleanup_extremities_bg_update_txn(txn):
# The set of extremity event IDs that we're checking this round
original_set = set()
# A dict[str, set[str]] of event ID to their prev events.
graph = {}
# The set of descendants of the original set that are not rejected
# nor soft-failed. Ancestors of these events should be removed
# from the forward extremities table.
non_rejected_leaves = set()
# Set of event IDs that have been soft failed, and for which we
# should check if they have descendants which haven't been soft
# failed.
soft_failed_events_to_lookup = set()
# First, we get `batch_size` events from the table, pulling out
# their successor events, if any, and the successor events'
# rejection status.
txn.execute(
"""SELECT prev_event_id, event_id, internal_metadata,
rejections.event_id IS NOT NULL, events.outlier
FROM (
SELECT event_id AS prev_event_id
FROM _extremities_to_check
LIMIT ?
) AS f
LEFT JOIN event_edges USING (prev_event_id)
LEFT JOIN events USING (event_id)
LEFT JOIN event_json USING (event_id)
LEFT JOIN rejections USING (event_id)
""",
(batch_size,),
)
for prev_event_id, event_id, metadata, rejected, outlier in txn:
original_set.add(prev_event_id)
if not event_id or outlier:
# Common case where the forward extremity doesn't have any
# descendants.
continue
graph.setdefault(event_id, set()).add(prev_event_id)
soft_failed = False
if metadata:
soft_failed = json.loads(metadata).get("soft_failed")
if soft_failed or rejected:
soft_failed_events_to_lookup.add(event_id)
else:
non_rejected_leaves.add(event_id)
# Now we recursively check all the soft-failed descendants we
# found above in the same way, until we have nothing left to
# check.
while soft_failed_events_to_lookup:
# We only want to do 100 at a time, so we split given list
# into two.
batch = list(soft_failed_events_to_lookup)
to_check, to_defer = batch[:100], batch[100:]
soft_failed_events_to_lookup = set(to_defer)
sql = """SELECT prev_event_id, event_id, internal_metadata,
rejections.event_id IS NOT NULL
FROM event_edges
INNER JOIN events USING (event_id)
INNER JOIN event_json USING (event_id)
LEFT JOIN rejections USING (event_id)
WHERE
prev_event_id IN (%s)
AND NOT events.outlier
""" % (
",".join("?" for _ in to_check),
)
txn.execute(sql, to_check)
for prev_event_id, event_id, metadata, rejected in txn:
if event_id in graph:
# Already handled this event previously, but we still
# want to record the edge.
graph[event_id].add(prev_event_id)
continue
graph[event_id] = {prev_event_id}
soft_failed = json.loads(metadata).get("soft_failed")
if soft_failed or rejected:
soft_failed_events_to_lookup.add(event_id)
else:
non_rejected_leaves.add(event_id)
# We have a set of non-soft-failed descendants, so we recurse up
# the graph to find all ancestors and add them to the set of event
# IDs that we can delete from forward extremities table.
to_delete = set()
while non_rejected_leaves:
event_id = non_rejected_leaves.pop()
prev_event_ids = graph.get(event_id, set())
non_rejected_leaves.update(prev_event_ids)
to_delete.update(prev_event_ids)
to_delete.intersection_update(original_set)
deleted = self._simple_delete_many_txn(
txn=txn,
table="event_forward_extremities",
column="event_id",
iterable=to_delete,
keyvalues={},
)
logger.info(
"Deleted %d forward extremities of %d checked, to clean up #5269",
deleted,
len(original_set),
)
if deleted:
# We now need to invalidate the caches of these rooms
rows = self._simple_select_many_txn(
txn,
table="events",
column="event_id",
iterable=to_delete,
keyvalues={},
retcols=("room_id",),
)
room_ids = set(row["room_id"] for row in rows)
for room_id in room_ids:
txn.call_after(
self.get_latest_event_ids_in_room.invalidate, (room_id,)
)
self._simple_delete_many_txn(
txn=txn,
table="_extremities_to_check",
column="event_id",
iterable=original_set,
keyvalues={},
)
return len(original_set)
num_handled = yield self.runInteraction(
"_cleanup_extremities_bg_update", _cleanup_extremities_bg_update_txn
)
if not num_handled:
yield self._end_background_update(self.DELETE_SOFT_FAILED_EXTREMITIES)
def _drop_table_txn(txn):
txn.execute("DROP TABLE _extremities_to_check")
yield self.runInteraction(
"_cleanup_extremities_bg_update_drop_table", _drop_table_txn
)
defer.returnValue(num_handled)
| 38.095
| 87
| 0.575272
|
824fbe99d6b76f245b03d8996c1f15c1b38cfeee
| 10,457
|
py
|
Python
|
audioled/colors.py
|
Utrisoft/modular-led-controller-workstation
|
c6f42e63b51c52a9832b78855c67bff5358a22ee
|
[
"MIT"
] | 20
|
2019-04-22T07:34:18.000Z
|
2021-12-30T07:01:20.000Z
|
audioled/colors.py
|
Utrisoft/modular-led-controller-workstation
|
c6f42e63b51c52a9832b78855c67bff5358a22ee
|
[
"MIT"
] | 90
|
2019-04-19T18:14:11.000Z
|
2022-03-12T00:14:23.000Z
|
audioled/colors.py
|
Utrisoft/modular-led-controller-workstation
|
c6f42e63b51c52a9832b78855c67bff5358a22ee
|
[
"MIT"
] | 7
|
2019-04-20T07:12:15.000Z
|
2021-11-13T13:38:42.000Z
|
from __future__ import (absolute_import, division, print_function, unicode_literals)
import colorsys
import math
from collections import OrderedDict
import numpy as np
from PIL import Image
from audioled.effect import Effect
import logging
logger = logging.getLogger(__name__)
blend_modes = ['lightenOnly', 'darkenOnly', 'addition', 'multiply', 'screen', 'overlay', 'softLight']
blend_mode_default = 'lightenOnly'
def blend(pixel_a, pixel_b, blend_mode):
if pixel_a is None and pixel_b is None:
return None
elif pixel_a is not None and pixel_b is None:
return pixel_a
elif pixel_a is None and pixel_b is not None:
return pixel_b
if blend_mode == 'lightenOnly':
return np.maximum(pixel_a, pixel_b)
elif blend_mode == 'darkenOnly':
return np.minimum(pixel_a, pixel_b)
elif blend_mode == 'addition':
return pixel_a + pixel_b
elif blend_mode == 'multiply':
pA = pixel_a / 255.0
pB = pixel_b / 255.0
return 255.0 * pA * pB
elif blend_mode == 'screen':
pA = pixel_a / 255.0
pB = pixel_b / 255.0
return 255.0 * (1 - (1 - pA) * (1 - pB))
elif blend_mode == 'overlay':
pA = pixel_a / 255.0
pB = pixel_b / 255.0
mask = pA >= 0.5
blended = np.zeros(np.shape(pA))
blended[~mask] = (2 * pA * pB)[~mask]
blended[mask] = (1 - 2 * (1 - pA) * (1 - pB))[mask]
return 255.0 * blended
elif blend_mode == 'softLight':
# pegtop
pA = pixel_a / 255.0
pB = pixel_b / 255.0
blended = (1 - 2 * pB) * pA * pA + 2 * pB * pA
return 255.0 * blended
return pixel_a
def hsv_to_rgb(hsv):
a = np.expand_dims(hsv, axis=1).T.astype(np.uint8)
pImg = Image.fromarray(a, mode='HSV')
pImg = pImg.convert('RGB')
out = np.asarray(pImg, dtype=np.uint8)
out = out.reshape(-1, out.shape[-1]).T
return out
def rgb_to_hsv(rgb):
a = np.expand_dims(rgb, axis=1).T.astype(np.uint8)
pImg = Image.fromarray(a, mode='RGB')
pImg = pImg.convert('HSV')
out = np.asarray(pImg, dtype=np.uint8)
out = out.reshape(-1, out.shape[-1]).T
return out
# New Filtergraph Style effects
class StaticRGBColor(Effect):
@staticmethod
def getEffectDescription():
return \
"StaticRGBColor outputs a static color."
def __init__(self, r=255.0, g=255.0, b=255.0):
self.r = r
self.g = g
self.b = b
self.__initstate__()
def __initstate__(self):
# state
self._color = None
super(StaticRGBColor, self).__initstate__()
def numInputChannels(self):
return 0
def numOutputChannels(self):
return 1
@staticmethod
def getParameterDefinition():
definition = {
"parameters":
OrderedDict([
# default, min, max, stepsize
("r", [255.0, 0.0, 255.0, 1.0]),
("g", [255.0, 0.0, 255.0, 1.0]),
("b", [255.0, 0.0, 255.0, 1.0]),
])
}
return definition
@staticmethod
def getParameterHelp():
help = {
"parameters": {
"r": "Amount of red.",
"g": "Amount of green.",
"b": "Amount of blue.",
}
}
return help
def setInputBuffer(self, buffer):
self._inputBuffer = buffer
def setOutputBuffer(self, buffer):
self._outputBuffer = buffer
async def update(self, dt):
await super(StaticRGBColor, self).update(dt)
self._color = np.ones(self._num_pixels) * np.array([[self.r], [self.g], [self.b]])
def process(self):
if self._outputBuffer is not None:
self._outputBuffer[0] = self._color
class ColorWheel(Effect):
""" Generates colors
"""
@staticmethod
def getEffectDescription():
return \
"The ColorWheel moves through the HSV color space and outputs the color."
def __init__(self, cycle_time=30.0, offset=0.0, luminocity=0.5, saturation=1.0, wiggle_amplitude=0.0, wiggle_time=0.0):
self.cycle_time = cycle_time
self.offset = offset
self.wiggle_amplitude = wiggle_amplitude
self.wiggle_time = wiggle_time
self.luminocity = luminocity
self.saturation = saturation
self.__initstate__()
def __initstate__(self):
# state
self._color = None
super(ColorWheel, self).__initstate__()
def numInputChannels(self):
return 0
def numOutputChannels(self):
return 1
@staticmethod
def getParameterDefinition():
definition = {
"parameters":
OrderedDict([
# default, min, max, stepsize
("cycle_time", [30.0, 0, 100, 0.1]),
("offset", [0.0, 0, 1, 0.01]),
("luminocity", [0.5, 0, 1, 0.01]),
("saturation", [1.0, 0, 1, 0.01]),
("wiggle_time", [0.0, 0, 10, 0.1]),
("wiggle_amplitude", [0.0, 0, 1, 0.01]),
])
}
return definition
@staticmethod
def getParameterHelp():
help = {
"parameters": {
"cycle_time":
"Amount of time the Color Wheel needs to cycle through the hue values of the color space.",
"offset":
"Offset of the Color Wheel.",
"luminocity":
"Luminocity of the color space.",
"saturation":
"Color saturation.",
"wiggle_time":
"The Color Wheel can wiggle back and forth while moving through the hue values of the color space. "
"This parameter controls the frequency of the wiggle.",
"wiggle_amplitude":
"The Color Wheel can wiggle back and forth while moving through the hue values of the color space. "
"This parameter controls the amplitude of the wiggle.",
}
}
return help
async def update(self, dt):
await super(ColorWheel, self).update(dt)
self._color = self.get_color_array(self._t, self._num_pixels)
def process(self):
if self._outputBuffer is not None:
self._outputBuffer[0] = self._color
def get_color(self, t, pixel):
h = 0.0
# move through wheel
if self.cycle_time > 0:
h = (t + self.offset % self.cycle_time) / self.cycle_time
else:
h = self.offset
# and wiggle
if self.wiggle_time > 0:
h = h + math.sin(2 * math.pi / self.wiggle_time * t) * self.wiggle_amplitude
r, g, b = colorsys.hls_to_rgb(h, self.luminocity, self.saturation)
return np.array([[r * 255.0], [g * 255.0], [b * 255.0]])
def get_color_array(self, t, num_pix):
return np.ones(num_pix) * self.get_color(t, -1)
class InterpolateRGB(Effect):
@staticmethod
def getEffectDescription():
return \
"RGB interpolation between two color inputs."
def __init__(self):
self.__initstate__()
def numInputChannels(self):
return 2
def numOutputChannels(self):
return 1
def process(self):
if self._inputBuffer is not None and self._outputBuffer is not None:
a = self._inputBuffer[0]
b = self._inputBuffer[1]
if a is not None and b is not None:
fact = np.linspace(0., 1., self._num_pixels)
self._outputBuffer[0] = a + np.multiply((b - a), fact)
elif a is not None:
self._outputBuffer[0] = a
elif b is not None:
self._outputBuffer[0] = b
class InterpolateHSV(Effect):
@staticmethod
def getEffectDescription():
return \
"HSV interpolation between two color inputs."
def __init__(self):
self.__initstate__()
def numInputChannels(self):
return 2
def numOutputChannels(self):
return 1
def process(self):
if self._inputBuffer is not None and self._outputBuffer is not None:
a = self._inputBuffer[0]
b = self._inputBuffer[1]
if a is not None and b is not None:
rgb_a = 1. / 255. * a[0:3, 0]
rgb_b = 1. / 255. * b[0:3, 0]
h_a, s_a, v_a = colorsys.rgb_to_hsv(rgb_a[0], rgb_a[1], rgb_a[2])
h_b, s_b, v_b = colorsys.rgb_to_hsv(rgb_b[0], rgb_b[1], rgb_b[2])
interp_v = np.linspace(v_a, v_b, self._num_pixels)
interp_s = np.linspace(s_a, s_b, self._num_pixels)
interp_h = np.linspace(h_a, h_b, self._num_pixels)
hsv = np.array([interp_h, interp_s, interp_v]) * 255
rgb = hsv_to_rgb(hsv)
self._outputBuffer[0] = rgb
class RGBToHSV(Effect):
@staticmethod
def getEffectDescription():
return \
"Switches encoding from RGB to HSV encoding"
def __init__(self):
self.__initstate__()
def numInputChannels(self):
return 1
def numOutputChannels(self):
return 1
def process(self):
if self._inputBuffer is None or self._outputBuffer is None:
return
a = self._inputBuffer[0]
if a is None:
return
a = np.expand_dims(a, axis=1).T.astype(np.uint8)
pImg = Image.fromarray(a, mode='RGB')
pImg = pImg.convert('HSV')
out = np.asarray(pImg, dtype=np.uint8)
out = out.reshape(-1, out.shape[-1]).T
self._outputBuffer[0] = out
class HSVToRGB(Effect):
@staticmethod
def getEffectDescription():
return \
"Switches encoding from HSV to RGB encoding"
def __init__(self):
self.__initstate__()
def numInputChannels(self):
return 1
def numOutputChannels(self):
return 1
def process(self):
if self._inputBuffer is None or self._outputBuffer is None:
return
a = self._inputBuffer[0]
if a is None:
return
a = np.expand_dims(a, axis=1).T.astype(np.uint8)
pImg = Image.fromarray(a, mode='HSV')
pImg = pImg.convert('RGB')
out = np.asarray(pImg, dtype=np.uint8)
out = out.reshape(-1, out.shape[-1]).T
self._outputBuffer[0] = out
| 29.456338
| 123
| 0.563642
|
e8d7cee092ea4797a19753a075532d7886413e2c
| 1,033
|
py
|
Python
|
tests/kubernetes/checks/test_ApiServerAuditLogMaxBackup.py
|
niradler/checkov
|
2628c6f28a5604efe3877d6eacc3044d2b66b7b1
|
[
"Apache-2.0"
] | 4,013
|
2019-12-09T13:16:54.000Z
|
2022-03-31T14:31:01.000Z
|
tests/kubernetes/checks/test_ApiServerAuditLogMaxBackup.py
|
niradler/checkov
|
2628c6f28a5604efe3877d6eacc3044d2b66b7b1
|
[
"Apache-2.0"
] | 1,258
|
2019-12-17T09:55:51.000Z
|
2022-03-31T19:17:17.000Z
|
tests/kubernetes/checks/test_ApiServerAuditLogMaxBackup.py
|
niradler/checkov
|
2628c6f28a5604efe3877d6eacc3044d2b66b7b1
|
[
"Apache-2.0"
] | 638
|
2019-12-19T08:57:38.000Z
|
2022-03-30T21:38:37.000Z
|
import os
import unittest
from checkov.kubernetes.checks.ApiServerAuditLogMaxBackup import check
from checkov.kubernetes.runner import Runner
from checkov.runner_filter import RunnerFilter
class TestApiServerProfiling(unittest.TestCase):
def test_summary(self):
runner = Runner()
current_dir = os.path.dirname(os.path.realpath(__file__))
test_files_dir = current_dir + "/example_ApiServerAuditLogMaxBackup"
report = runner.run(root_folder=test_files_dir,runner_filter=RunnerFilter(checks=[check.id]))
summary = report.get_summary()
self.assertEqual(summary['passed'], 1)
self.assertEqual(summary['failed'], 2)
self.assertEqual(summary['skipped'], 0)
self.assertEqual(summary['parsing_errors'], 0)
for failed in report.failed_checks:
self.assertIn("should-fail", failed.resource)
for passed in report.passed_checks:
self.assertIn("should-pass", passed.resource)
if __name__ == '__main__':
unittest.main()
| 32.28125
| 101
| 0.71152
|
cc6ca31b200e48bc2ed3f5a89fc17ba43200e23a
| 679
|
py
|
Python
|
napari_image_quality_analyzer/_tests/test_dock_widget.py
|
haesleinhuepf/napari-image-quality-analyzer
|
9dbf6162df1d93fff3b9e7b4a67a1a173db58ea2
|
[
"BSD-3-Clause"
] | null | null | null |
napari_image_quality_analyzer/_tests/test_dock_widget.py
|
haesleinhuepf/napari-image-quality-analyzer
|
9dbf6162df1d93fff3b9e7b4a67a1a173db58ea2
|
[
"BSD-3-Clause"
] | null | null | null |
napari_image_quality_analyzer/_tests/test_dock_widget.py
|
haesleinhuepf/napari-image-quality-analyzer
|
9dbf6162df1d93fff3b9e7b4a67a1a173db58ea2
|
[
"BSD-3-Clause"
] | null | null | null |
from napari_image_quality_analyzer import napari_experimental_provide_dock_widget
import pytest
# this is your plugin name declared in your napari.plugins entry point
MY_PLUGIN_NAME = "napari-image-quality-analyzer"
# the name of your widget(s)
MY_WIDGET_NAMES = ["Example Q Widget", "example_magic_widget"]
@pytest.mark.parametrize("widget_name", MY_WIDGET_NAMES)
def test_something_with_viewer(widget_name, make_napari_viewer):
viewer = make_napari_viewer()
num_dw = len(viewer.window._dock_widgets)
viewer.window.add_plugin_dock_widget(
plugin_name=MY_PLUGIN_NAME, widget_name=widget_name
)
assert len(viewer.window._dock_widgets) == num_dw + 1
| 37.722222
| 81
| 0.79676
|
ebe6ed7478dbc8af6f2fa63c6c9276e5888eac96
| 224
|
py
|
Python
|
meiduo_mall/meiduo_mall/apps/oauth/urls.py
|
00wsmart00/meiduo_project_all
|
43af3685aa847705154d0a1982a9ed8a1432fc43
|
[
"MIT"
] | 1
|
2019-06-08T15:56:14.000Z
|
2019-06-08T15:56:14.000Z
|
meiduo_mall/meiduo_mall/apps/oauth/urls.py
|
00wsmart00/meiduo_project
|
d0bb785c4f882eb41721130d7e54f0f5c473cb10
|
[
"MIT"
] | 9
|
2020-05-11T20:24:01.000Z
|
2022-02-26T15:05:53.000Z
|
meiduo_mall/meiduo_mall/apps/oauth/urls.py
|
00wsmart00/meiduo_project_all
|
43af3685aa847705154d0a1982a9ed8a1432fc43
|
[
"MIT"
] | null | null | null |
from django.conf.urls import url
from . import views
urlpatterns = [
# 获取QQ扫码登录链接
url(r'^qq/authorization/$', views.QQURLView.as_view()),
# QQ用户部分接口:
url(r'^oauth_callback/$', views.QQUserView.as_view()),
]
| 22.4
| 59
| 0.674107
|
21ee0a5aafac4029cdfcbb130e86db063d434ba1
| 1,835
|
py
|
Python
|
test.py
|
DaviDarkFire/cumb_yolo
|
70a846593a5c45235f2aba20d5dd73c1e0d96271
|
[
"MIT"
] | null | null | null |
test.py
|
DaviDarkFire/cumb_yolo
|
70a846593a5c45235f2aba20d5dd73c1e0d96271
|
[
"MIT"
] | null | null | null |
test.py
|
DaviDarkFire/cumb_yolo
|
70a846593a5c45235f2aba20d5dd73c1e0d96271
|
[
"MIT"
] | null | null | null |
import sys
import argparse
from yolo import YOLO, detect_video
from PIL import Image
import os
def detect_img(annotation_path_val, yolo):
with open(annotation_path_val) as f:
lines_val = f.readlines()
n = len(lines_val)
all_time = []
for i in range(n):
#img = '/home/lavicom/datasets/dataset_Pantanal/anta/Visual/DC_5030.jpg'
img = lines_val[i].split(',')[0]
try:
image = Image.open(img)
except:
print('Open Error! Try again!')
continue
else:
r_image, exec_time = yolo.detect_image(image, txtpath=os.path.basename(img)[:-3] + 'txt')
#r_image.show()
all_time.append(exec_time)
print(all_time)
r_image.save(os.path.basename(img))
print(exec_time)
yolo.close_session()
FLAGS = None
if __name__ == '__main__':
# class YOLO defines the default value, so suppress any default here
parser = argparse.ArgumentParser(argument_default=argparse.SUPPRESS)
'''
Command line options
'''
parser.add_argument(
'--path', type=str,
help='path to txt test file'
)
parser.add_argument(
'--model', type=str,
help='path to model weight file, default ' + YOLO.get_defaults("model_path")
)
parser.add_argument(
'--anchors', type=str,
help='path to anchor definitions, default ' + YOLO.get_defaults("anchors_path")
)
parser.add_argument(
'--classes', type=str,
help='path to class definitions, default ' + YOLO.get_defaults("classes_path")
)
parser.add_argument(
'--gpu_num', type=int,
help='Number of GPU to use, default ' + str(YOLO.get_defaults("gpu_num"))
)
FLAGS = parser.parse_args()
detect_img(FLAGS.path, YOLO(**vars(FLAGS)))
| 27.80303
| 101
| 0.614714
|
1afeb0643e188ac29cf48faf94fd9dd5156d8422
| 5,015
|
py
|
Python
|
tests/helpers/fake_backend.py
|
dr-strangecode/signalfx-agent
|
e4bfc33bb22f848501311e7e99c188346a4075bc
|
[
"Apache-2.0"
] | null | null | null |
tests/helpers/fake_backend.py
|
dr-strangecode/signalfx-agent
|
e4bfc33bb22f848501311e7e99c188346a4075bc
|
[
"Apache-2.0"
] | null | null | null |
tests/helpers/fake_backend.py
|
dr-strangecode/signalfx-agent
|
e4bfc33bb22f848501311e7e99c188346a4075bc
|
[
"Apache-2.0"
] | null | null | null |
import asyncio
import gzip
import socket
import threading
from collections import defaultdict
from contextlib import contextmanager
from queue import Queue
from google.protobuf import json_format
from sanic import Sanic, response
from signalfx.generated_protocol_buffers import signal_fx_protocol_buffers_pb2 as sf_pbuf
# This module collects metrics from the agent and can echo them back out for
# making assertions on the collected metrics.
STOP = type("STOP", (), {})
def free_tcp_socket(host="127.0.0.1"):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind((host, 0))
return (sock, sock.getsockname()[1])
# Fake the /v2/datapoint endpoint and just stick all of the metrics in a
# list
# pylint: disable=unused-variable
def _make_fake_ingest(datapoint_queue, events, spans):
app = Sanic()
@app.middleware("request")
async def compress_request(request):
if "Content-Encoding" in request.headers:
if "gzip" in request.headers["Content-Encoding"]:
request.body = gzip.decompress(request.body)
@app.post("/v2/datapoint")
async def handle_datapoints(request):
is_json = "application/json" in request.headers.get("content-type")
dp_upload = sf_pbuf.DataPointUploadMessage()
if is_json:
json_format.Parse(request.body, dp_upload)
else:
dp_upload.ParseFromString(request.body)
datapoint_queue.put(dp_upload)
return response.json("OK")
@app.post("/v2/event")
async def handle_event(request):
is_json = "application/json" in request.headers.get("content-type")
event_upload = sf_pbuf.EventUploadMessage()
if is_json:
json_format.Parse(request.body, event_upload)
else:
event_upload.ParseFromString(request.body)
events.extend(event_upload.events) # pylint: disable=no-member
return response.json("OK")
@app.post("/v1/trace")
async def handle_trace(request):
spans.extend(request.json)
return response.json([])
return app
# Fake the dimension PUT method to capture dimension property/tag updates.
# pylint: disable=unused-variable
def _make_fake_api(dims):
app = Sanic()
@app.put("/v2/dimension/<key>/<value>")
async def put_dim(request, key, value):
content = request.json
dims[key][value] = content
return response.json({})
return app
# Starts up a new set of backend services that will run on a random port. The
# returned object will have properties on it for datapoints, events, and dims.
# The fake servers will be stopped once the context manager block is exited.
# pylint: disable=too-many-locals
@contextmanager
def start(ip_addr="127.0.0.1"):
# Data structures are thread-safe due to the GIL
_dp_upload_queue = Queue()
_datapoints = []
_datapoints_by_metric = defaultdict(list)
_datapoints_by_dim = defaultdict(list)
_events = []
_spans = []
_dims = defaultdict(defaultdict)
ingest_app = _make_fake_ingest(_dp_upload_queue, _events, _spans)
api_app = _make_fake_api(_dims)
[ingest_sock, _ingest_port] = free_tcp_socket(ip_addr)
[api_sock, _api_port] = free_tcp_socket(ip_addr)
loop = asyncio.new_event_loop()
async def start_servers():
ingest_server = ingest_app.create_server(sock=ingest_sock, access_log=False)
api_server = api_app.create_server(sock=api_sock, access_log=False)
loop.create_task(ingest_server)
loop.create_task(api_server)
loop.create_task(start_servers())
threading.Thread(target=loop.run_forever).start()
def add_datapoints():
while True:
dp_upload = _dp_upload_queue.get()
if dp_upload is STOP:
return
_datapoints.extend(dp_upload.datapoints) # pylint: disable=no-member
for dp in dp_upload.datapoints: # pylint: disable=no-member
_datapoints_by_metric[dp.metric].append(dp)
for dim in dp.dimensions:
_datapoints_by_dim[f"{dim.key}:{dim.value}"].append(dp)
threading.Thread(target=add_datapoints).start()
class FakeBackend: # pylint: disable=too-few-public-methods
ingest_host = ip_addr
ingest_port = _ingest_port
ingest_url = f"http://{ingest_host}:{ingest_port}"
api_host = ip_addr
api_port = _api_port
api_url = f"http://{api_host}:{api_port}"
datapoints = _datapoints
datapoints_by_metric = _datapoints_by_metric
datapoints_by_dim = _datapoints_by_dim
events = _events
spans = _spans
dims = _dims
def reset_datapoints(self):
self.datapoints.clear()
self.datapoints_by_metric.clear()
self.datapoints_by_dim.clear()
try:
yield FakeBackend()
finally:
ingest_sock.close()
api_sock.close()
loop.stop()
_dp_upload_queue.put(STOP)
| 31.740506
| 89
| 0.675573
|
077e236bcd843b09444484fb2a869e0bf03037d7
| 677
|
py
|
Python
|
makahiki/apps/widgets/participation/management/commands/award_participation.py
|
justinslee/Wai-Not-Makahiki
|
4b7dd685012ec64758affe0ecee3103596d16aa7
|
[
"MIT"
] | 1
|
2015-07-22T11:31:20.000Z
|
2015-07-22T11:31:20.000Z
|
makahiki/apps/widgets/participation/management/commands/award_participation.py
|
justinslee/Wai-Not-Makahiki
|
4b7dd685012ec64758affe0ecee3103596d16aa7
|
[
"MIT"
] | null | null | null |
makahiki/apps/widgets/participation/management/commands/award_participation.py
|
justinslee/Wai-Not-Makahiki
|
4b7dd685012ec64758affe0ecee3103596d16aa7
|
[
"MIT"
] | null | null | null |
"""Invocation: python manage.py award_participation
Checks whether or not each team's participation rate, and awards points to team members if meets
certain criteria."""
import datetime
from apps.managers.challenge_mgr.challenge_mgr import MakahikiBaseCommand
from apps.widgets.participation import participation
class Command(MakahikiBaseCommand):
"""command"""
help = 'Award points for meeting the active participation rate.'
def handle(self, *args, **options):
"""check the energy goal for all teams"""
print '****** Processing award participation for %s *******\n' % datetime.datetime.today()
participation.award_participation()
| 32.238095
| 98
| 0.735598
|
f42cbc2f13ecd5fc133ca0e70cc4f8477165017a
| 7,110
|
py
|
Python
|
hashid_field/lookups.py
|
nshafer/django-hashids-field
|
053a02992f48267f407b63ef9ffa617210932a46
|
[
"MIT"
] | 310
|
2016-10-04T20:26:01.000Z
|
2022-03-30T05:10:20.000Z
|
hashid_field/lookups.py
|
nshafer/django-hashids-field
|
053a02992f48267f407b63ef9ffa617210932a46
|
[
"MIT"
] | 64
|
2016-12-27T18:29:01.000Z
|
2022-02-12T01:37:20.000Z
|
hashid_field/lookups.py
|
nshafer/django-hashids-field
|
053a02992f48267f407b63ef9ffa617210932a46
|
[
"MIT"
] | 35
|
2017-06-05T14:48:31.000Z
|
2022-03-17T14:34:00.000Z
|
import itertools
from django.db.models.lookups import Lookup, GreaterThan, GreaterThanOrEqual, LessThan, LessThanOrEqual
from django.utils.datastructures import OrderedSet
from django.core.exceptions import EmptyResultSet
from .hashid import Hashid
from .conf import settings
def _is_int_representation(number):
"""Returns whether a value is an integer or a string representation of an integer."""
try:
int(number)
except ValueError:
return False
else:
return True
def get_id_for_hashid_field(field, value):
if isinstance(value, Hashid):
return value.id
try:
hashid = field.get_hashid(value)
except ValueError:
raise ValueError(field.error_messages['invalid'] % {'value': value})
if isinstance(value, int) and not field.allow_int_lookup:
raise ValueError(field.error_messages['invalid_hashid'] % {'value': value})
if isinstance(value, str) and not field.allow_int_lookup:
# Make sure int lookups are not allowed, even if prefixed, unless the
# given value is actually a hashid made up entirely of numbers.
without_prefix = value[len(field.prefix):]
if _is_int_representation(without_prefix) and without_prefix != hashid.hashid:
raise ValueError(field.error_messages['invalid_hashid'] % {'value': value})
return hashid.id
# Most of this code is derived or copied from Django. (django/db/models/lookups.py)
# It has been included here to increase compatibility of this module with Django versions 1.11, 2.2 and 3.0.
# Django is Copyright (c) Django Software Foundation and individual contributors.
# Please see https://github.com/django/django/blob/master/LICENSE
class HashidFieldGetDbPrepValueMixin:
get_db_prep_lookup_value_is_iterable = False
def get_db_prep_lookup(self, value, connection):
# There are two modes this method can be called in... a single value or an iterable of values (usually a set)
# For a single value, just try to process it, then return the value, or else throw EmptyResultSet
# For multiple values, process each one in turn. If any of them are invalid, throw it away. If all are invalid,
# throw EmptyResultSet
# For relational fields, use the 'field' attribute of the output_field
field = getattr(self.lhs.output_field, 'field', self.lhs.output_field)
if self.get_db_prep_lookup_value_is_iterable:
lookup_ids = []
for val in value:
try:
lookup_id = get_id_for_hashid_field(field, val)
except ValueError:
if settings.HASHID_FIELD_LOOKUP_EXCEPTION:
raise
# Ignore this value
pass
else:
lookup_ids.append(lookup_id)
if len(lookup_ids) == 0:
raise EmptyResultSet
return '%s', lookup_ids
else:
try:
lookup_id = get_id_for_hashid_field(field, value)
except ValueError:
if settings.HASHID_FIELD_LOOKUP_EXCEPTION:
raise
raise EmptyResultSet
return '%s', [lookup_id]
class HashidExactLookup(HashidFieldGetDbPrepValueMixin, Lookup):
prepare_rhs = False
def as_sql(self, compiler, connection):
lhs_sql, params = self.process_lhs(compiler, connection)
rhs_sql, rhs_params = self.process_rhs(compiler, connection)
params.extend(rhs_params)
rhs_sql = self.get_rhs_op(connection, rhs_sql)
return '%s %s' % (lhs_sql, rhs_sql), params
def get_rhs_op(self, connection, rhs):
return "= %s" % rhs
class HashidIterableLookup(HashidExactLookup):
# This is an amalgamation of Django's FieldGetDbPrepValueIterableMixin and In lookup to allow support of both
# iterables (lists, tuples) and subqueries.
get_db_prep_lookup_value_is_iterable = True
def get_prep_lookup(self):
if hasattr(self.rhs, 'resolve_expression'):
return self.rhs
prepared_values = []
if hasattr(self.rhs, '_prepare'):
# A subquery is like an iterable but its items shouldn't be
# prepared independently.
return self.rhs._prepare(self.lhs.output_field)
for rhs_value in self.rhs:
if hasattr(rhs_value, 'resolve_expression'):
# An expression will be handled by the database but can coexist
# alongside real values.
pass
prepared_values.append(rhs_value)
return prepared_values
def process_rhs(self, compiler, connection):
db_rhs = getattr(self.rhs, '_db', None)
if db_rhs is not None and db_rhs != connection.alias:
raise ValueError(
"Subqueries aren't allowed across different databases. Force "
"the inner query to be evaluated using `list(inner_query)`."
)
if self.rhs_is_direct_value():
try:
rhs = OrderedSet(self.rhs)
except TypeError: # Unhashable items in self.rhs
rhs = self.rhs
if not rhs:
raise EmptyResultSet
# rhs should be an iterable; use batch_process_rhs() to
# prepare/transform those values.
sqls, sqls_params = self.batch_process_rhs(compiler, connection, rhs)
placeholder = '(' + ', '.join(sqls) + ')'
return (placeholder, sqls_params)
else:
return super().process_rhs(compiler, connection)
def resolve_expression_parameter(self, compiler, connection, sql, param):
params = [param]
if hasattr(param, 'resolve_expression'):
param = param.resolve_expression(compiler.query)
if hasattr(param, 'as_sql'):
sql, params = param.as_sql(compiler, connection)
return sql, params
def batch_process_rhs(self, compiler, connection, rhs=None):
pre_processed = super().batch_process_rhs(compiler, connection, rhs)
# The params list may contain expressions which compile to a
# sql/param pair. Zip them to get sql and param pairs that refer to the
# same argument and attempt to replace them with the result of
# compiling the param step.
sql, params = zip(*(
self.resolve_expression_parameter(compiler, connection, sql, param)
for sql, param in zip(*pre_processed)
))
params = itertools.chain.from_iterable(params)
return sql, tuple(params)
def get_rhs_op(self, connection, rhs):
return 'IN %s' % rhs
class HashidGreaterThan(HashidFieldGetDbPrepValueMixin, GreaterThan):
prepare_rhs = False
class HashidGreaterThanOrEqual(HashidFieldGetDbPrepValueMixin, GreaterThanOrEqual):
prepare_rhs = False
class HashidLessThan(HashidFieldGetDbPrepValueMixin, LessThan):
prepare_rhs = False
class HashidLessThanOrEqual(HashidFieldGetDbPrepValueMixin, LessThanOrEqual):
prepare_rhs = False
| 39.94382
| 119
| 0.656681
|
6518fe40e6059c4a3be4fb7eb4633c656e73e661
| 810
|
py
|
Python
|
SIGNUS/app/models/mongodb/category.py
|
837477/SIGNUS
|
cd395dfd45d2c36d09ec9a8069e6e52e19f058e8
|
[
"MIT"
] | null | null | null |
SIGNUS/app/models/mongodb/category.py
|
837477/SIGNUS
|
cd395dfd45d2c36d09ec9a8069e6e52e19f058e8
|
[
"MIT"
] | null | null | null |
SIGNUS/app/models/mongodb/category.py
|
837477/SIGNUS
|
cd395dfd45d2c36d09ec9a8069e6e52e19f058e8
|
[
"MIT"
] | null | null | null |
'''
MongoDB Category Collection Model
'''
from flask import current_app
class Category:
"""SIGNUS DB category Model"""
def __init__(self, client):
self.col = client[current_app.config['MONGODB_DB_NAME']]['category']
def find_one(self, category_name, projection=None):
''' 특정 Document 반환 '''
return self.col.find_one(
{"category_name": category_name},
projection
)
def find_many(self, category_list, projection=None):
''' 다수 Document 반환 '''
return list(self.col.find(
{"category_name": {"$in": category_list}},
projection
))
def find_all(self, projection=None):
''' 모든 Document 반환 '''
return list(self.col.find(
{},
projection
))
| 25.3125
| 76
| 0.569136
|
405d5c527d1e4caf7cb9ac404e4d5fb8e12ea8e6
| 3,182
|
py
|
Python
|
airflow/example_dags/example_xcom.py
|
takuti/airflow
|
0ac3b8c3dd749c59e60cf0169580b9e7c5049d9e
|
[
"Apache-2.0"
] | 15,947
|
2019-01-05T13:51:02.000Z
|
2022-03-31T23:33:16.000Z
|
airflow/example_dags/example_xcom.py
|
takuti/airflow
|
0ac3b8c3dd749c59e60cf0169580b9e7c5049d9e
|
[
"Apache-2.0"
] | 14,603
|
2019-01-05T09:43:19.000Z
|
2022-03-31T23:11:59.000Z
|
airflow/example_dags/example_xcom.py
|
takuti/airflow
|
0ac3b8c3dd749c59e60cf0169580b9e7c5049d9e
|
[
"Apache-2.0"
] | 8,429
|
2019-01-05T19:45:47.000Z
|
2022-03-31T22:13:01.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of XComs."""
from datetime import datetime
from airflow import DAG
from airflow.decorators import task
from airflow.operators.bash import BashOperator
value_1 = [1, 2, 3]
value_2 = {'a': 'b'}
@task
def push(ti=None):
"""Pushes an XCom without a specific target"""
ti.xcom_push(key='value from pusher 1', value=value_1)
@task
def push_by_returning():
"""Pushes an XCom without a specific target, just by returning it"""
return value_2
def _compare_values(pulled_value, check_value):
if pulled_value != check_value:
raise ValueError(f'The two values differ {pulled_value} and {check_value}')
@task
def puller(pulled_value_2, ti=None):
"""Pull all previously pushed XComs and check if the pushed values match the pulled values."""
pulled_value_1 = ti.xcom_pull(task_ids="push", key="value from pusher 1")
_compare_values(pulled_value_1, value_1)
_compare_values(pulled_value_2, value_2)
@task
def pull_value_from_bash_push(ti=None):
bash_pushed_via_return_value = ti.xcom_pull(key="return_value", task_ids='bash_push')
bash_manually_pushed_value = ti.xcom_pull(key="manually_pushed_value", task_ids='bash_push')
print(f"The xcom value pushed by task push via return value is {bash_pushed_via_return_value}")
print(f"The xcom value pushed by task push manually is {bash_manually_pushed_value}")
with DAG(
'example_xcom',
schedule_interval="@once",
start_date=datetime(2021, 1, 1),
catchup=False,
tags=['example'],
) as dag:
bash_push = BashOperator(
task_id='bash_push',
bash_command='echo "bash_push demo" && '
'echo "Manually set xcom value '
'{{ ti.xcom_push(key="manually_pushed_value", value="manually_pushed_value") }}" && '
'echo "value_by_return"',
)
bash_pull = BashOperator(
task_id='bash_pull',
bash_command='echo "bash pull demo" && '
f'echo "The xcom pushed manually is {bash_push.output["manually_pushed_value"]}" && '
f'echo "The returned_value xcom is {bash_push.output}" && '
'echo "finished"',
do_xcom_push=False,
)
python_pull_from_bash = pull_value_from_bash_push()
[bash_pull, python_pull_from_bash] << bash_push
puller(push_by_returning()) << push()
# Task dependencies created via `XComArgs`:
# pull << push2
| 33.145833
| 99
| 0.715588
|
1769d3020ff39e6ecf86b7d75f74eb266af0a37e
| 5,970
|
py
|
Python
|
pyspedas/mms/fpi/fpi.py
|
shihikoo/pyspedas
|
5251c52d074c4f2c3d91f2f8a457d36f3ad2ddb4
|
[
"MIT"
] | null | null | null |
pyspedas/mms/fpi/fpi.py
|
shihikoo/pyspedas
|
5251c52d074c4f2c3d91f2f8a457d36f3ad2ddb4
|
[
"MIT"
] | null | null | null |
pyspedas/mms/fpi/fpi.py
|
shihikoo/pyspedas
|
5251c52d074c4f2c3d91f2f8a457d36f3ad2ddb4
|
[
"MIT"
] | null | null | null |
from pyspedas.mms.mms_load_data import mms_load_data
from pyspedas.mms.fpi.mms_fpi_set_metadata import mms_fpi_set_metadata
from pyspedas.mms.fpi.mms_load_fpi_calc_pad import mms_load_fpi_calc_pad
from pyspedas.mms.print_vars import print_vars
from pyspedas.mms.mms_config import CONFIG
@print_vars
def mms_load_fpi(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='fast',
level='l2', datatype='*', varformat=None, varnames=[], suffix='',
get_support_data=False, time_clip=False, no_update=False, center_measurement=False,
available=False, notplot=False, latest_version=False, major_version=False,
min_version=None, cdf_version=None, spdf=False, always_prompt=False):
"""
This function loads FPI data into tplot variables
Parameters:
trange : list of str
time range of interest [starttime, endtime] with the format
'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day
['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss']
probe : str or list of str
list of probes, valid values for MMS probes are ['1','2','3','4'].
data_rate : str or list of str
instrument data rates for FPI include 'brst', 'fast'. The
default is 'srvy'.
level : str
indicates level of data processing. the default if no level is specified is 'l2'
datatype : str or list of str
Valid datatypes for FPI are:
'des-moms', 'dis-moms' (default)
'des-dist', 'dis-dist'
get_support_data: bool
Data with an attribute "VAR_TYPE" with a value of "support_data"
will be loaded into tplot. By default, only loads in data with a
"VAR_TYPE" attribute of "data".
time_clip: bool
Data will be clipped to the exact trange specified by the trange keyword.
varformat: str
The file variable formats to load into tplot. Wildcard character
"*" is accepted. By default, all variables are loaded in.
varnames: list of str
List of variable names to load (if not specified,
all data variables are loaded)
suffix: str
The tplot variable names will be given this suffix. By default,
no suffix is added.
center_measurement: bool
If True, the CDF epoch variables are time-shifted to the middle
of the accumulation interval by their DELTA_PLUS_VAR and
DELTA_MINUS_VAR variable attributes
notplot: bool
If True, then data are returned in a hash table instead of
being stored in tplot variables (useful for debugging, and
access to multi-dimensional data products)
available: bool
If True, simply return the available data files (without downloading)
for the requested paramters
no_update: bool
Set this flag to preserve the original data. if not set and newer
data is found the existing data will be overwritten
cdf_version: str
Specify a specific CDF version # to load (e.g., cdf_version='4.3.0')
min_version: str
Specify a minimum CDF version # to load
latest_version: bool
Only grab the latest CDF version in the requested time interval
major_version: bool
Only open the latest major CDF version (e.g., X in vX.Y.Z) in the requested time interval
always_prompt: bool
Set this keyword to always prompt for the user's username and password;
useful if you accidently save an incorrect password, or if your SDC password has changed
spdf: bool
If True, download the data from the SPDF instead of the SDC
Returns:
List of tplot variables created.
"""
# different datatypes for burst mode files
if data_rate.lower() == 'brst':
if isinstance(datatype, str):
if (datatype == '*' or datatype == '') and level.lower() != 'ql':
datatype = ['des-dist', 'dis-dist', 'dis-moms', 'des-moms']
else:
if isinstance(datatype, str):
if (datatype == '*' or datatype == '') and level.lower() == 'ql':
datatype = ['des', 'dis']
if (datatype == '*' or datatype == '') and level.lower() != 'ql':
datatype = ['des-dist', 'dis-dist', 'dis-moms', 'des-moms']
# kludge for level = 'sitl' -> datatype shouldn't be defined for sitl data.
if level.lower() == 'sitl' or level.lower() == 'trig':
datatype = ''
tvars = mms_load_data(trange=trange, probe=probe, data_rate=data_rate, level=level, instrument='fpi',
datatype=datatype, varformat=varformat, varnames=varnames, suffix=suffix, get_support_data=get_support_data,
time_clip=time_clip, no_update=no_update, center_measurement=center_measurement, available=available,
notplot=notplot, latest_version=latest_version, major_version=major_version, min_version=min_version,
cdf_version=cdf_version, spdf=spdf, always_prompt=always_prompt)
if tvars is None or available or notplot or CONFIG['download_only']:
return tvars
mms_fpi_set_metadata(probe, data_rate, datatype, level, suffix=suffix)
if not isinstance(probe, list): probe = [probe]
if not isinstance(data_rate, list): data_rate = [data_rate]
if not isinstance(datatype, list): datatype = [datatype]
if not isinstance(level, list): level = [level]
for prb in probe:
for drate in data_rate:
for dtype in datatype:
for lvl in level:
out_var = mms_load_fpi_calc_pad(probe=prb, level=lvl, datatype=dtype, data_rate=drate, suffix=suffix, autoscale=True)
if out_var:
tvars.extend(out_var)
return tvars
| 42.340426
| 137
| 0.635176
|
6f2958843bc73d8afc81973f1eb13f75b637c4e1
| 46,634
|
py
|
Python
|
src/utils/parser/COOL_parser.py
|
BeginnerCompilers/cool-compiler-2021
|
fe8c4dbbbaff9dc855322c2df2892dcfc5d449f9
|
[
"MIT"
] | null | null | null |
src/utils/parser/COOL_parser.py
|
BeginnerCompilers/cool-compiler-2021
|
fe8c4dbbbaff9dc855322c2df2892dcfc5d449f9
|
[
"MIT"
] | null | null | null |
src/utils/parser/COOL_parser.py
|
BeginnerCompilers/cool-compiler-2021
|
fe8c4dbbbaff9dc855322c2df2892dcfc5d449f9
|
[
"MIT"
] | null | null | null |
from utils.parser.shift_reduce_parser import ShiftReduceParser
'''
Las tablas action y goto del parser lalr1 se precompilaron porque demora mucho su calculo
El algoritmo para copiarlas fue el siguiente:
###code###
parser = LALR1_Parser(G)
with open('action.txt','w') as action:
for key,value in parser.action.items():
action.write(f'{key} : {value},\n')
with open('goto.txt','w') as goto:
for key,value in parser.goto.items():
goto.write(f'{key} : {value},\n')
#### ### ### ##
'''
class COOL_Parser(ShiftReduceParser):
def __init__(self, G):
self.action = {
(0, 'class') : ('SHIFT', 1),
(1, 'type') : ('SHIFT', 2),
(2, '{') : ('SHIFT', 3),
(2, 'inherits') : ('SHIFT', 133),
(3, 'id') : ('SHIFT', 4),
(3, '}') : ('REDUCE', 7),
(4, '(') : ('SHIFT', 110),
(4, ':') : ('SHIFT', 5),
(5, 'type') : ('SHIFT', 6),
(6, ';') : ('REDUCE', 8),
(6, '<-') : ('SHIFT', 7),
(7, 'id') : ('SHIFT', 18),
(7, 'isvoid') : ('SHIFT', 27),
(7, 'false') : ('SHIFT', 29),
(7, 'if') : ('SHIFT', 24),
(7, '~') : ('SHIFT', 15),
(7, 'string') : ('SHIFT', 30),
(7, '{') : ('SHIFT', 14),
(7, 'case') : ('SHIFT', 25),
(7, 'new') : ('SHIFT', 22),
(7, 'true') : ('SHIFT', 28),
(7, 'int') : ('SHIFT', 21),
(7, '(') : ('SHIFT', 13),
(7, 'not') : ('SHIFT', 26),
(7, 'let') : ('SHIFT', 8),
(7, 'while') : ('SHIFT', 20),
(8, 'id') : ('SHIFT', 9),
(9, ':') : ('SHIFT', 10),
(10, 'type') : ('SHIFT', 11),
(11, '<-') : ('SHIFT', 12),
(11, ',') : ('SHIFT', 104),
(11, 'in') : ('REDUCE', 22),
(12, 'id') : ('SHIFT', 18),
(12, '{') : ('SHIFT', 14),
(12, 'false') : ('SHIFT', 29),
(12, 'case') : ('SHIFT', 25),
(12, 'if') : ('SHIFT', 24),
(12, 'string') : ('SHIFT', 30),
(12, 'isvoid') : ('SHIFT', 27),
(12, 'new') : ('SHIFT', 22),
(12, 'not') : ('SHIFT', 26),
(12, 'let') : ('SHIFT', 8),
(12, '~') : ('SHIFT', 15),
(12, 'true') : ('SHIFT', 28),
(12, 'while') : ('SHIFT', 20),
(12, 'int') : ('SHIFT', 21),
(12, '(') : ('SHIFT', 13),
(13, 'id') : ('SHIFT', 18),
(13, 'let') : ('SHIFT', 8),
(13, 'not') : ('SHIFT', 26),
(13, 'false') : ('SHIFT', 29),
(13, 'isvoid') : ('SHIFT', 27),
(13, 'if') : ('SHIFT', 24),
(13, 'while') : ('SHIFT', 20),
(13, '~') : ('SHIFT', 15),
(13, 'string') : ('SHIFT', 30),
(13, 'new') : ('SHIFT', 22),
(13, 'true') : ('SHIFT', 28),
(13, 'int') : ('SHIFT', 21),
(13, '(') : ('SHIFT', 13),
(13, '{') : ('SHIFT', 14),
(13, 'case') : ('SHIFT', 25),
(14, 'id') : ('SHIFT', 18),
(14, 'isvoid') : ('SHIFT', 27),
(14, 'false') : ('SHIFT', 29),
(14, 'if') : ('SHIFT', 24),
(14, '~') : ('SHIFT', 15),
(14, 'string') : ('SHIFT', 30),
(14, '{') : ('SHIFT', 14),
(14, 'case') : ('SHIFT', 25),
(14, 'new') : ('SHIFT', 22),
(14, 'true') : ('SHIFT', 28),
(14, 'int') : ('SHIFT', 21),
(14, '(') : ('SHIFT', 13),
(14, 'not') : ('SHIFT', 26),
(14, 'let') : ('SHIFT', 8),
(14, 'while') : ('SHIFT', 20),
(15, 'string') : ('SHIFT', 30),
(15, 'id') : ('SHIFT', 16),
(15, 'new') : ('SHIFT', 22),
(15, 'isvoid') : ('SHIFT', 27),
(15, 'true') : ('SHIFT', 28),
(15, '~') : ('SHIFT', 15),
(15, 'int') : ('SHIFT', 21),
(15, 'false') : ('SHIFT', 29),
(15, '(') : ('SHIFT', 13),
(15, 'if') : ('SHIFT', 24),
(16, '(') : ('SHIFT', 17),
(16, 'in') : ('REDUCE', 48),
(16, ';') : ('REDUCE', 48),
(16, 'loop') : ('REDUCE', 48),
(16, 'pool') : ('REDUCE', 48),
(16, ',') : ('REDUCE', 48),
(16, '.') : ('REDUCE', 48),
(16, ')') : ('REDUCE', 48),
(16, 'then') : ('REDUCE', 48),
(16, 'else') : ('REDUCE', 48),
(16, '}') : ('REDUCE', 48),
(16, 'fi') : ('REDUCE', 48),
(16, '=') : ('REDUCE', 48),
(16, '+') : ('REDUCE', 48),
(16, 'of') : ('REDUCE', 48),
(16, '-') : ('REDUCE', 48),
(16, '*') : ('REDUCE', 48),
(16, '/') : ('REDUCE', 48),
(16, '<') : ('REDUCE', 48),
(16, '<=') : ('REDUCE', 48),
(16, '@') : ('REDUCE', 48),
(17, 'not') : ('SHIFT', 26),
(17, 'let') : ('SHIFT', 8),
(17, 'string') : ('SHIFT', 30),
(17, '~') : ('SHIFT', 15),
(17, 'while') : ('SHIFT', 20),
(17, ')') : ('REDUCE', 56),
(17, 'id') : ('SHIFT', 18),
(17, 'new') : ('SHIFT', 22),
(17, 'true') : ('SHIFT', 28),
(17, '(') : ('SHIFT', 13),
(17, 'int') : ('SHIFT', 21),
(17, '{') : ('SHIFT', 14),
(17, 'isvoid') : ('SHIFT', 27),
(17, 'case') : ('SHIFT', 25),
(17, 'false') : ('SHIFT', 29),
(17, 'if') : ('SHIFT', 24),
(18, '(') : ('SHIFT', 17),
(18, 'in') : ('REDUCE', 48),
(18, ';') : ('REDUCE', 48),
(18, 'loop') : ('REDUCE', 48),
(18, 'pool') : ('REDUCE', 48),
(18, ',') : ('REDUCE', 48),
(18, '.') : ('REDUCE', 48),
(18, ')') : ('REDUCE', 48),
(18, 'then') : ('REDUCE', 48),
(18, 'else') : ('REDUCE', 48),
(18, '}') : ('REDUCE', 48),
(18, 'fi') : ('REDUCE', 48),
(18, '=') : ('REDUCE', 48),
(18, '+') : ('REDUCE', 48),
(18, 'of') : ('REDUCE', 48),
(18, '-') : ('REDUCE', 48),
(18, '*') : ('REDUCE', 48),
(18, '/') : ('REDUCE', 48),
(18, '<') : ('REDUCE', 48),
(18, '<=') : ('REDUCE', 48),
(18, '@') : ('REDUCE', 48),
(18, '<-') : ('SHIFT', 19),
(19, 'string') : ('SHIFT', 30),
(19, 'not') : ('SHIFT', 26),
(19, 'let') : ('SHIFT', 8),
(19, 'while') : ('SHIFT', 20),
(19, 'new') : ('SHIFT', 22),
(19, 'id') : ('SHIFT', 18),
(19, 'true') : ('SHIFT', 28),
(19, 'int') : ('SHIFT', 21),
(19, '(') : ('SHIFT', 13),
(19, '{') : ('SHIFT', 14),
(19, 'isvoid') : ('SHIFT', 27),
(19, 'case') : ('SHIFT', 25),
(19, '~') : ('SHIFT', 15),
(19, 'false') : ('SHIFT', 29),
(19, 'if') : ('SHIFT', 24),
(20, 'int') : ('SHIFT', 21),
(20, '(') : ('SHIFT', 13),
(20, 'not') : ('SHIFT', 26),
(20, 'isvoid') : ('SHIFT', 27),
(20, 'let') : ('SHIFT', 8),
(20, 'while') : ('SHIFT', 20),
(20, '~') : ('SHIFT', 15),
(20, 'id') : ('SHIFT', 18),
(20, 'false') : ('SHIFT', 29),
(20, 'if') : ('SHIFT', 24),
(20, 'string') : ('SHIFT', 30),
(20, '{') : ('SHIFT', 14),
(20, 'new') : ('SHIFT', 22),
(20, 'case') : ('SHIFT', 25),
(20, 'true') : ('SHIFT', 28),
(21, 'in') : ('REDUCE', 47),
(21, ';') : ('REDUCE', 47),
(21, 'loop') : ('REDUCE', 47),
(21, 'pool') : ('REDUCE', 47),
(21, ',') : ('REDUCE', 47),
(21, '.') : ('REDUCE', 47),
(21, ')') : ('REDUCE', 47),
(21, 'then') : ('REDUCE', 47),
(21, 'else') : ('REDUCE', 47),
(21, '}') : ('REDUCE', 47),
(21, 'fi') : ('REDUCE', 47),
(21, '=') : ('REDUCE', 47),
(21, '+') : ('REDUCE', 47),
(21, 'of') : ('REDUCE', 47),
(21, '-') : ('REDUCE', 47),
(21, '*') : ('REDUCE', 47),
(21, '/') : ('REDUCE', 47),
(21, '<') : ('REDUCE', 47),
(21, '<=') : ('REDUCE', 47),
(21, '@') : ('REDUCE', 47),
(22, 'type') : ('SHIFT', 23),
(23, 'in') : ('REDUCE', 49),
(23, ';') : ('REDUCE', 49),
(23, 'loop') : ('REDUCE', 49),
(23, 'pool') : ('REDUCE', 49),
(23, ',') : ('REDUCE', 49),
(23, '.') : ('REDUCE', 49),
(23, ')') : ('REDUCE', 49),
(23, 'then') : ('REDUCE', 49),
(23, 'else') : ('REDUCE', 49),
(23, '}') : ('REDUCE', 49),
(23, 'fi') : ('REDUCE', 49),
(23, '=') : ('REDUCE', 49),
(23, '+') : ('REDUCE', 49),
(23, 'of') : ('REDUCE', 49),
(23, '-') : ('REDUCE', 49),
(23, '*') : ('REDUCE', 49),
(23, '/') : ('REDUCE', 49),
(23, '<') : ('REDUCE', 49),
(23, '<=') : ('REDUCE', 49),
(23, '@') : ('REDUCE', 49),
(24, 'true') : ('SHIFT', 28),
(24, '(') : ('SHIFT', 13),
(24, 'int') : ('SHIFT', 21),
(24, '{') : ('SHIFT', 14),
(24, 'id') : ('SHIFT', 18),
(24, 'case') : ('SHIFT', 25),
(24, 'false') : ('SHIFT', 29),
(24, 'if') : ('SHIFT', 24),
(24, 'string') : ('SHIFT', 30),
(24, 'not') : ('SHIFT', 26),
(24, 'let') : ('SHIFT', 8),
(24, 'isvoid') : ('SHIFT', 27),
(24, 'while') : ('SHIFT', 20),
(24, '~') : ('SHIFT', 15),
(24, 'new') : ('SHIFT', 22),
(25, '{') : ('SHIFT', 14),
(25, 'false') : ('SHIFT', 29),
(25, 'case') : ('SHIFT', 25),
(25, 'if') : ('SHIFT', 24),
(25, 'string') : ('SHIFT', 30),
(25, 'new') : ('SHIFT', 22),
(25, 'id') : ('SHIFT', 18),
(25, 'not') : ('SHIFT', 26),
(25, 'let') : ('SHIFT', 8),
(25, 'while') : ('SHIFT', 20),
(25, 'true') : ('SHIFT', 28),
(25, 'int') : ('SHIFT', 21),
(25, '(') : ('SHIFT', 13),
(25, 'isvoid') : ('SHIFT', 27),
(25, '~') : ('SHIFT', 15),
(26, 'string') : ('SHIFT', 30),
(26, 'not') : ('SHIFT', 26),
(26, 'let') : ('SHIFT', 8),
(26, 'while') : ('SHIFT', 20),
(26, 'new') : ('SHIFT', 22),
(26, 'id') : ('SHIFT', 18),
(26, 'true') : ('SHIFT', 28),
(26, 'int') : ('SHIFT', 21),
(26, '(') : ('SHIFT', 13),
(26, '{') : ('SHIFT', 14),
(26, 'isvoid') : ('SHIFT', 27),
(26, 'case') : ('SHIFT', 25),
(26, '~') : ('SHIFT', 15),
(26, 'false') : ('SHIFT', 29),
(26, 'if') : ('SHIFT', 24),
(27, 'string') : ('SHIFT', 30),
(27, 'id') : ('SHIFT', 16),
(27, 'new') : ('SHIFT', 22),
(27, 'isvoid') : ('SHIFT', 27),
(27, 'true') : ('SHIFT', 28),
(27, '~') : ('SHIFT', 15),
(27, 'int') : ('SHIFT', 21),
(27, 'false') : ('SHIFT', 29),
(27, '(') : ('SHIFT', 13),
(27, 'if') : ('SHIFT', 24),
(28, 'in') : ('REDUCE', 44),
(28, ';') : ('REDUCE', 44),
(28, 'loop') : ('REDUCE', 44),
(28, 'pool') : ('REDUCE', 44),
(28, ',') : ('REDUCE', 44),
(28, '.') : ('REDUCE', 44),
(28, ')') : ('REDUCE', 44),
(28, 'then') : ('REDUCE', 44),
(28, 'else') : ('REDUCE', 44),
(28, '}') : ('REDUCE', 44),
(28, 'fi') : ('REDUCE', 44),
(28, '=') : ('REDUCE', 44),
(28, '+') : ('REDUCE', 44),
(28, 'of') : ('REDUCE', 44),
(28, '-') : ('REDUCE', 44),
(28, '*') : ('REDUCE', 44),
(28, '/') : ('REDUCE', 44),
(28, '<') : ('REDUCE', 44),
(28, '<=') : ('REDUCE', 44),
(28, '@') : ('REDUCE', 44),
(29, 'in') : ('REDUCE', 45),
(29, ';') : ('REDUCE', 45),
(29, 'loop') : ('REDUCE', 45),
(29, 'pool') : ('REDUCE', 45),
(29, ',') : ('REDUCE', 45),
(29, '.') : ('REDUCE', 45),
(29, ')') : ('REDUCE', 45),
(29, 'then') : ('REDUCE', 45),
(29, 'else') : ('REDUCE', 45),
(29, '}') : ('REDUCE', 45),
(29, 'fi') : ('REDUCE', 45),
(29, '=') : ('REDUCE', 45),
(29, '+') : ('REDUCE', 45),
(29, 'of') : ('REDUCE', 45),
(29, '-') : ('REDUCE', 45),
(29, '*') : ('REDUCE', 45),
(29, '/') : ('REDUCE', 45),
(29, '<') : ('REDUCE', 45),
(29, '<=') : ('REDUCE', 45),
(29, '@') : ('REDUCE', 45),
(30, 'in') : ('REDUCE', 46),
(30, ';') : ('REDUCE', 46),
(30, 'loop') : ('REDUCE', 46),
(30, 'pool') : ('REDUCE', 46),
(30, ',') : ('REDUCE', 46),
(30, '.') : ('REDUCE', 46),
(30, ')') : ('REDUCE', 46),
(30, 'then') : ('REDUCE', 46),
(30, 'else') : ('REDUCE', 46),
(30, '}') : ('REDUCE', 46),
(30, 'fi') : ('REDUCE', 46),
(30, '=') : ('REDUCE', 46),
(30, '+') : ('REDUCE', 46),
(30, 'of') : ('REDUCE', 46),
(30, '-') : ('REDUCE', 46),
(30, '*') : ('REDUCE', 46),
(30, '/') : ('REDUCE', 46),
(30, '<') : ('REDUCE', 46),
(30, '<=') : ('REDUCE', 46),
(30, '@') : ('REDUCE', 46),
(31, 'fi') : ('REDUCE', 41),
(31, '/') : ('REDUCE', 41),
(31, '}') : ('REDUCE', 41),
(31, 'else') : ('REDUCE', 41),
(31, 'in') : ('REDUCE', 41),
(31, '<') : ('REDUCE', 41),
(31, '=') : ('REDUCE', 41),
(31, ';') : ('REDUCE', 41),
(31, '<=') : ('REDUCE', 41),
(31, '+') : ('REDUCE', 41),
(31, ')') : ('REDUCE', 41),
(31, 'of') : ('REDUCE', 41),
(31, '-') : ('REDUCE', 41),
(31, 'then') : ('REDUCE', 41),
(31, 'loop') : ('REDUCE', 41),
(31, '*') : ('REDUCE', 41),
(31, 'pool') : ('REDUCE', 41),
(31, ',') : ('REDUCE', 41),
(32, 'fi') : ('REDUCE', 43),
(32, '/') : ('REDUCE', 43),
(32, '}') : ('REDUCE', 43),
(32, 'else') : ('REDUCE', 43),
(32, 'in') : ('REDUCE', 43),
(32, '<') : ('REDUCE', 43),
(32, '=') : ('REDUCE', 43),
(32, ';') : ('REDUCE', 43),
(32, '<=') : ('REDUCE', 43),
(32, '+') : ('REDUCE', 43),
(32, ')') : ('REDUCE', 43),
(32, 'of') : ('REDUCE', 43),
(32, '-') : ('REDUCE', 43),
(32, 'then') : ('REDUCE', 43),
(32, 'loop') : ('REDUCE', 43),
(32, '*') : ('REDUCE', 43),
(32, 'pool') : ('REDUCE', 43),
(32, ',') : ('REDUCE', 43),
(32, '@') : ('SHIFT', 61),
(32, '.') : ('SHIFT', 33),
(33, 'id') : ('SHIFT', 34),
(34, '(') : ('SHIFT', 35),
(35, 'not') : ('SHIFT', 26),
(35, 'let') : ('SHIFT', 8),
(35, 'string') : ('SHIFT', 30),
(35, '~') : ('SHIFT', 15),
(35, 'while') : ('SHIFT', 20),
(35, ')') : ('REDUCE', 56),
(35, 'id') : ('SHIFT', 18),
(35, 'new') : ('SHIFT', 22),
(35, 'true') : ('SHIFT', 28),
(35, '(') : ('SHIFT', 13),
(35, 'int') : ('SHIFT', 21),
(35, '{') : ('SHIFT', 14),
(35, 'isvoid') : ('SHIFT', 27),
(35, 'case') : ('SHIFT', 25),
(35, 'false') : ('SHIFT', 29),
(35, 'if') : ('SHIFT', 24),
(36, ')') : ('SHIFT', 37),
(37, 'in') : ('REDUCE', 53),
(37, ';') : ('REDUCE', 53),
(37, 'loop') : ('REDUCE', 53),
(37, 'pool') : ('REDUCE', 53),
(37, ',') : ('REDUCE', 53),
(37, '.') : ('REDUCE', 53),
(37, ')') : ('REDUCE', 53),
(37, 'then') : ('REDUCE', 53),
(37, 'else') : ('REDUCE', 53),
(37, '}') : ('REDUCE', 53),
(37, 'fi') : ('REDUCE', 53),
(37, '=') : ('REDUCE', 53),
(37, '+') : ('REDUCE', 53),
(37, 'of') : ('REDUCE', 53),
(37, '-') : ('REDUCE', 53),
(37, '*') : ('REDUCE', 53),
(37, '/') : ('REDUCE', 53),
(37, '<') : ('REDUCE', 53),
(37, '<=') : ('REDUCE', 53),
(37, '@') : ('REDUCE', 53),
(38, '<=') : ('SHIFT', 55),
(38, 'fi') : ('REDUCE', 31),
(38, '}') : ('REDUCE', 31),
(38, 'in') : ('REDUCE', 31),
(38, ';') : ('REDUCE', 31),
(38, ')') : ('REDUCE', 31),
(38, 'of') : ('REDUCE', 31),
(38, 'then') : ('REDUCE', 31),
(38, 'loop') : ('REDUCE', 31),
(38, 'else') : ('REDUCE', 31),
(38, 'pool') : ('REDUCE', 31),
(38, ',') : ('REDUCE', 31),
(38, '=') : ('SHIFT', 39),
(38, '<') : ('SHIFT', 53),
(39, 'string') : ('SHIFT', 30),
(39, 'not') : ('SHIFT', 26),
(39, 'let') : ('SHIFT', 8),
(39, 'while') : ('SHIFT', 20),
(39, 'new') : ('SHIFT', 22),
(39, 'id') : ('SHIFT', 18),
(39, 'true') : ('SHIFT', 28),
(39, 'int') : ('SHIFT', 21),
(39, '(') : ('SHIFT', 13),
(39, '{') : ('SHIFT', 14),
(39, 'isvoid') : ('SHIFT', 27),
(39, 'case') : ('SHIFT', 25),
(39, '~') : ('SHIFT', 15),
(39, 'false') : ('SHIFT', 29),
(39, 'if') : ('SHIFT', 24),
(40, 'fi') : ('REDUCE', 30),
(40, '}') : ('REDUCE', 30),
(40, 'in') : ('REDUCE', 30),
(40, ';') : ('REDUCE', 30),
(40, ')') : ('REDUCE', 30),
(40, 'of') : ('REDUCE', 30),
(40, 'then') : ('REDUCE', 30),
(40, 'loop') : ('REDUCE', 30),
(40, 'else') : ('REDUCE', 30),
(40, 'pool') : ('REDUCE', 30),
(40, ',') : ('REDUCE', 30),
(41, '+') : ('SHIFT', 42),
(41, '-') : ('SHIFT', 50),
(41, 'fi') : ('REDUCE', 34),
(41, '}') : ('REDUCE', 34),
(41, 'in') : ('REDUCE', 34),
(41, '<') : ('REDUCE', 34),
(41, '=') : ('REDUCE', 34),
(41, ';') : ('REDUCE', 34),
(41, '<=') : ('REDUCE', 34),
(41, ')') : ('REDUCE', 34),
(41, 'of') : ('REDUCE', 34),
(41, 'then') : ('REDUCE', 34),
(41, 'loop') : ('REDUCE', 34),
(41, 'else') : ('REDUCE', 34),
(41, 'pool') : ('REDUCE', 34),
(41, ',') : ('REDUCE', 34),
(42, 'string') : ('SHIFT', 30),
(42, 'id') : ('SHIFT', 16),
(42, 'new') : ('SHIFT', 22),
(42, 'isvoid') : ('SHIFT', 27),
(42, 'true') : ('SHIFT', 28),
(42, '~') : ('SHIFT', 15),
(42, 'int') : ('SHIFT', 21),
(42, 'false') : ('SHIFT', 29),
(42, '(') : ('SHIFT', 13),
(42, 'if') : ('SHIFT', 24),
(43, '*') : ('SHIFT', 44),
(43, '/') : ('SHIFT', 47),
(43, 'fi') : ('REDUCE', 35),
(43, '}') : ('REDUCE', 35),
(43, 'in') : ('REDUCE', 35),
(43, '<') : ('REDUCE', 35),
(43, '=') : ('REDUCE', 35),
(43, ';') : ('REDUCE', 35),
(43, '<=') : ('REDUCE', 35),
(43, '+') : ('REDUCE', 35),
(43, ')') : ('REDUCE', 35),
(43, 'of') : ('REDUCE', 35),
(43, '-') : ('REDUCE', 35),
(43, 'then') : ('REDUCE', 35),
(43, 'loop') : ('REDUCE', 35),
(43, 'else') : ('REDUCE', 35),
(43, 'pool') : ('REDUCE', 35),
(43, ',') : ('REDUCE', 35),
(44, 'string') : ('SHIFT', 30),
(44, 'id') : ('SHIFT', 16),
(44, 'new') : ('SHIFT', 22),
(44, 'isvoid') : ('SHIFT', 27),
(44, 'true') : ('SHIFT', 28),
(44, '~') : ('SHIFT', 15),
(44, 'int') : ('SHIFT', 21),
(44, 'false') : ('SHIFT', 29),
(44, '(') : ('SHIFT', 13),
(44, 'if') : ('SHIFT', 24),
(45, 'fi') : ('REDUCE', 38),
(45, '/') : ('REDUCE', 38),
(45, '}') : ('REDUCE', 38),
(45, 'else') : ('REDUCE', 38),
(45, 'in') : ('REDUCE', 38),
(45, '<') : ('REDUCE', 38),
(45, '=') : ('REDUCE', 38),
(45, ';') : ('REDUCE', 38),
(45, '<=') : ('REDUCE', 38),
(45, '+') : ('REDUCE', 38),
(45, ')') : ('REDUCE', 38),
(45, 'of') : ('REDUCE', 38),
(45, '-') : ('REDUCE', 38),
(45, 'then') : ('REDUCE', 38),
(45, 'loop') : ('REDUCE', 38),
(45, '*') : ('REDUCE', 38),
(45, 'pool') : ('REDUCE', 38),
(45, ',') : ('REDUCE', 38),
(46, 'in') : ('REDUCE', 52),
(46, ';') : ('REDUCE', 52),
(46, 'loop') : ('REDUCE', 52),
(46, 'pool') : ('REDUCE', 52),
(46, ',') : ('REDUCE', 52),
(46, '.') : ('REDUCE', 52),
(46, ')') : ('REDUCE', 52),
(46, 'then') : ('REDUCE', 52),
(46, 'else') : ('REDUCE', 52),
(46, '}') : ('REDUCE', 52),
(46, 'fi') : ('REDUCE', 52),
(46, '=') : ('REDUCE', 52),
(46, '+') : ('REDUCE', 52),
(46, 'of') : ('REDUCE', 52),
(46, '-') : ('REDUCE', 52),
(46, '*') : ('REDUCE', 52),
(46, '/') : ('REDUCE', 52),
(46, '<') : ('REDUCE', 52),
(46, '<=') : ('REDUCE', 52),
(46, '@') : ('REDUCE', 52),
(47, 'string') : ('SHIFT', 30),
(47, 'id') : ('SHIFT', 16),
(47, 'new') : ('SHIFT', 22),
(47, 'isvoid') : ('SHIFT', 27),
(47, 'true') : ('SHIFT', 28),
(47, '~') : ('SHIFT', 15),
(47, 'int') : ('SHIFT', 21),
(47, 'false') : ('SHIFT', 29),
(47, '(') : ('SHIFT', 13),
(47, 'if') : ('SHIFT', 24),
(48, 'fi') : ('REDUCE', 39),
(48, '/') : ('REDUCE', 39),
(48, '}') : ('REDUCE', 39),
(48, 'else') : ('REDUCE', 39),
(48, 'in') : ('REDUCE', 39),
(48, '<') : ('REDUCE', 39),
(48, '=') : ('REDUCE', 39),
(48, ';') : ('REDUCE', 39),
(48, '<=') : ('REDUCE', 39),
(48, '+') : ('REDUCE', 39),
(48, ')') : ('REDUCE', 39),
(48, 'of') : ('REDUCE', 39),
(48, '-') : ('REDUCE', 39),
(48, 'then') : ('REDUCE', 39),
(48, 'loop') : ('REDUCE', 39),
(48, '*') : ('REDUCE', 39),
(48, 'pool') : ('REDUCE', 39),
(48, ',') : ('REDUCE', 39),
(49, 'fi') : ('REDUCE', 40),
(49, '/') : ('REDUCE', 40),
(49, '}') : ('REDUCE', 40),
(49, 'else') : ('REDUCE', 40),
(49, 'in') : ('REDUCE', 40),
(49, '<') : ('REDUCE', 40),
(49, '=') : ('REDUCE', 40),
(49, ';') : ('REDUCE', 40),
(49, '<=') : ('REDUCE', 40),
(49, '+') : ('REDUCE', 40),
(49, ')') : ('REDUCE', 40),
(49, 'of') : ('REDUCE', 40),
(49, '-') : ('REDUCE', 40),
(49, 'then') : ('REDUCE', 40),
(49, 'loop') : ('REDUCE', 40),
(49, '*') : ('REDUCE', 40),
(49, 'pool') : ('REDUCE', 40),
(49, ',') : ('REDUCE', 40),
(50, 'string') : ('SHIFT', 30),
(50, 'id') : ('SHIFT', 16),
(50, 'new') : ('SHIFT', 22),
(50, 'isvoid') : ('SHIFT', 27),
(50, 'true') : ('SHIFT', 28),
(50, '~') : ('SHIFT', 15),
(50, 'int') : ('SHIFT', 21),
(50, 'false') : ('SHIFT', 29),
(50, '(') : ('SHIFT', 13),
(50, 'if') : ('SHIFT', 24),
(51, '*') : ('SHIFT', 44),
(51, 'fi') : ('REDUCE', 36),
(51, '}') : ('REDUCE', 36),
(51, 'in') : ('REDUCE', 36),
(51, '<') : ('REDUCE', 36),
(51, '=') : ('REDUCE', 36),
(51, ';') : ('REDUCE', 36),
(51, '<=') : ('REDUCE', 36),
(51, '+') : ('REDUCE', 36),
(51, ')') : ('REDUCE', 36),
(51, 'of') : ('REDUCE', 36),
(51, '-') : ('REDUCE', 36),
(51, 'then') : ('REDUCE', 36),
(51, 'loop') : ('REDUCE', 36),
(51, 'else') : ('REDUCE', 36),
(51, 'pool') : ('REDUCE', 36),
(51, ',') : ('REDUCE', 36),
(51, '/') : ('SHIFT', 47),
(52, '*') : ('SHIFT', 44),
(52, 'fi') : ('REDUCE', 37),
(52, '}') : ('REDUCE', 37),
(52, 'in') : ('REDUCE', 37),
(52, '<') : ('REDUCE', 37),
(52, '=') : ('REDUCE', 37),
(52, ';') : ('REDUCE', 37),
(52, '<=') : ('REDUCE', 37),
(52, '+') : ('REDUCE', 37),
(52, ')') : ('REDUCE', 37),
(52, 'of') : ('REDUCE', 37),
(52, '-') : ('REDUCE', 37),
(52, 'then') : ('REDUCE', 37),
(52, 'loop') : ('REDUCE', 37),
(52, 'else') : ('REDUCE', 37),
(52, 'pool') : ('REDUCE', 37),
(52, ',') : ('REDUCE', 37),
(52, '/') : ('SHIFT', 47),
(53, 'string') : ('SHIFT', 30),
(53, 'id') : ('SHIFT', 16),
(53, 'new') : ('SHIFT', 22),
(53, 'isvoid') : ('SHIFT', 27),
(53, 'true') : ('SHIFT', 28),
(53, '~') : ('SHIFT', 15),
(53, 'int') : ('SHIFT', 21),
(53, 'false') : ('SHIFT', 29),
(53, '(') : ('SHIFT', 13),
(53, 'if') : ('SHIFT', 24),
(54, '+') : ('SHIFT', 42),
(54, '-') : ('SHIFT', 50),
(54, 'fi') : ('REDUCE', 32),
(54, '}') : ('REDUCE', 32),
(54, 'in') : ('REDUCE', 32),
(54, '<') : ('REDUCE', 32),
(54, '=') : ('REDUCE', 32),
(54, ';') : ('REDUCE', 32),
(54, '<=') : ('REDUCE', 32),
(54, ')') : ('REDUCE', 32),
(54, 'of') : ('REDUCE', 32),
(54, 'then') : ('REDUCE', 32),
(54, 'loop') : ('REDUCE', 32),
(54, 'else') : ('REDUCE', 32),
(54, 'pool') : ('REDUCE', 32),
(54, ',') : ('REDUCE', 32),
(55, 'string') : ('SHIFT', 30),
(55, 'id') : ('SHIFT', 16),
(55, 'new') : ('SHIFT', 22),
(55, 'isvoid') : ('SHIFT', 27),
(55, 'true') : ('SHIFT', 28),
(55, '~') : ('SHIFT', 15),
(55, 'int') : ('SHIFT', 21),
(55, 'false') : ('SHIFT', 29),
(55, '(') : ('SHIFT', 13),
(55, 'if') : ('SHIFT', 24),
(56, '+') : ('SHIFT', 42),
(56, '-') : ('SHIFT', 50),
(56, 'fi') : ('REDUCE', 33),
(56, '}') : ('REDUCE', 33),
(56, 'in') : ('REDUCE', 33),
(56, '<') : ('REDUCE', 33),
(56, '=') : ('REDUCE', 33),
(56, ';') : ('REDUCE', 33),
(56, '<=') : ('REDUCE', 33),
(56, ')') : ('REDUCE', 33),
(56, 'of') : ('REDUCE', 33),
(56, 'then') : ('REDUCE', 33),
(56, 'loop') : ('REDUCE', 33),
(56, 'else') : ('REDUCE', 33),
(56, 'pool') : ('REDUCE', 33),
(56, ',') : ('REDUCE', 33),
(57, ',') : ('SHIFT', 58),
(57, ')') : ('REDUCE', 58),
(58, 'not') : ('SHIFT', 26),
(58, 'let') : ('SHIFT', 8),
(58, 'string') : ('SHIFT', 30),
(58, '~') : ('SHIFT', 15),
(58, 'while') : ('SHIFT', 20),
(58, 'new') : ('SHIFT', 22),
(58, 'id') : ('SHIFT', 18),
(58, 'true') : ('SHIFT', 28),
(58, '(') : ('SHIFT', 13),
(58, 'int') : ('SHIFT', 21),
(58, '{') : ('SHIFT', 14),
(58, 'case') : ('SHIFT', 25),
(58, 'false') : ('SHIFT', 29),
(58, 'isvoid') : ('SHIFT', 27),
(58, 'if') : ('SHIFT', 24),
(59, ')') : ('REDUCE', 59),
(60, ')') : ('REDUCE', 57),
(61, 'type') : ('SHIFT', 62),
(62, '.') : ('SHIFT', 63),
(63, 'id') : ('SHIFT', 64),
(64, '(') : ('SHIFT', 65),
(65, 'not') : ('SHIFT', 26),
(65, 'let') : ('SHIFT', 8),
(65, 'string') : ('SHIFT', 30),
(65, '~') : ('SHIFT', 15),
(65, 'while') : ('SHIFT', 20),
(65, ')') : ('REDUCE', 56),
(65, 'id') : ('SHIFT', 18),
(65, 'new') : ('SHIFT', 22),
(65, 'true') : ('SHIFT', 28),
(65, '(') : ('SHIFT', 13),
(65, 'int') : ('SHIFT', 21),
(65, '{') : ('SHIFT', 14),
(65, 'isvoid') : ('SHIFT', 27),
(65, 'case') : ('SHIFT', 25),
(65, 'false') : ('SHIFT', 29),
(65, 'if') : ('SHIFT', 24),
(66, ')') : ('SHIFT', 67),
(67, 'in') : ('REDUCE', 55),
(67, ';') : ('REDUCE', 55),
(67, 'loop') : ('REDUCE', 55),
(67, 'pool') : ('REDUCE', 55),
(67, ',') : ('REDUCE', 55),
(67, '.') : ('REDUCE', 55),
(67, ')') : ('REDUCE', 55),
(67, 'then') : ('REDUCE', 55),
(67, 'else') : ('REDUCE', 55),
(67, '}') : ('REDUCE', 55),
(67, 'fi') : ('REDUCE', 55),
(67, '=') : ('REDUCE', 55),
(67, '+') : ('REDUCE', 55),
(67, 'of') : ('REDUCE', 55),
(67, '-') : ('REDUCE', 55),
(67, '*') : ('REDUCE', 55),
(67, '/') : ('REDUCE', 55),
(67, '<') : ('REDUCE', 55),
(67, '<=') : ('REDUCE', 55),
(67, '@') : ('REDUCE', 55),
(68, 'fi') : ('REDUCE', 29),
(68, '}') : ('REDUCE', 29),
(68, 'in') : ('REDUCE', 29),
(68, ';') : ('REDUCE', 29),
(68, ')') : ('REDUCE', 29),
(68, 'of') : ('REDUCE', 29),
(68, 'then') : ('REDUCE', 29),
(68, 'loop') : ('REDUCE', 29),
(68, 'else') : ('REDUCE', 29),
(68, 'pool') : ('REDUCE', 29),
(68, ',') : ('REDUCE', 29),
(69, 'of') : ('SHIFT', 70),
(70, 'id') : ('SHIFT', 71),
(71, ':') : ('SHIFT', 72),
(72, 'type') : ('SHIFT', 73),
(73, '=>') : ('SHIFT', 74),
(74, 'id') : ('SHIFT', 18),
(74, 'isvoid') : ('SHIFT', 27),
(74, 'false') : ('SHIFT', 29),
(74, 'if') : ('SHIFT', 24),
(74, '~') : ('SHIFT', 15),
(74, 'string') : ('SHIFT', 30),
(74, '{') : ('SHIFT', 14),
(74, 'case') : ('SHIFT', 25),
(74, 'new') : ('SHIFT', 22),
(74, 'true') : ('SHIFT', 28),
(74, 'int') : ('SHIFT', 21),
(74, '(') : ('SHIFT', 13),
(74, 'not') : ('SHIFT', 26),
(74, 'let') : ('SHIFT', 8),
(74, 'while') : ('SHIFT', 20),
(75, ';') : ('SHIFT', 76),
(76, 'id') : ('SHIFT', 71),
(76, 'esac') : ('REDUCE', 27),
(77, 'esac') : ('REDUCE', 28),
(78, 'esac') : ('SHIFT', 79),
(79, 'fi') : ('REDUCE', 26),
(79, '}') : ('REDUCE', 26),
(79, 'in') : ('REDUCE', 26),
(79, ';') : ('REDUCE', 26),
(79, ')') : ('REDUCE', 26),
(79, 'of') : ('REDUCE', 26),
(79, 'then') : ('REDUCE', 26),
(79, 'loop') : ('REDUCE', 26),
(79, 'else') : ('REDUCE', 26),
(79, 'pool') : ('REDUCE', 26),
(79, ',') : ('REDUCE', 26),
(80, 'then') : ('SHIFT', 81),
(81, 'isvoid') : ('SHIFT', 27),
(81, 'false') : ('SHIFT', 29),
(81, 'case') : ('SHIFT', 25),
(81, 'if') : ('SHIFT', 24),
(81, '~') : ('SHIFT', 15),
(81, 'string') : ('SHIFT', 30),
(81, 'not') : ('SHIFT', 26),
(81, 'new') : ('SHIFT', 22),
(81, 'id') : ('SHIFT', 18),
(81, 'let') : ('SHIFT', 8),
(81, 'while') : ('SHIFT', 20),
(81, 'true') : ('SHIFT', 28),
(81, 'int') : ('SHIFT', 21),
(81, '(') : ('SHIFT', 13),
(81, '{') : ('SHIFT', 14),
(82, 'else') : ('SHIFT', 83),
(83, 'while') : ('SHIFT', 20),
(83, 'id') : ('SHIFT', 18),
(83, 'new') : ('SHIFT', 22),
(83, 'true') : ('SHIFT', 28),
(83, '(') : ('SHIFT', 13),
(83, 'int') : ('SHIFT', 21),
(83, '{') : ('SHIFT', 14),
(83, 'isvoid') : ('SHIFT', 27),
(83, 'case') : ('SHIFT', 25),
(83, '~') : ('SHIFT', 15),
(83, 'false') : ('SHIFT', 29),
(83, 'if') : ('SHIFT', 24),
(83, 'string') : ('SHIFT', 30),
(83, 'not') : ('SHIFT', 26),
(83, 'let') : ('SHIFT', 8),
(84, 'fi') : ('SHIFT', 85),
(85, 'in') : ('REDUCE', 50),
(85, ';') : ('REDUCE', 50),
(85, 'loop') : ('REDUCE', 50),
(85, 'pool') : ('REDUCE', 50),
(85, ',') : ('REDUCE', 50),
(85, '.') : ('REDUCE', 50),
(85, ')') : ('REDUCE', 50),
(85, 'then') : ('REDUCE', 50),
(85, 'else') : ('REDUCE', 50),
(85, '}') : ('REDUCE', 50),
(85, 'fi') : ('REDUCE', 50),
(85, '=') : ('REDUCE', 50),
(85, '+') : ('REDUCE', 50),
(85, 'of') : ('REDUCE', 50),
(85, '-') : ('REDUCE', 50),
(85, '*') : ('REDUCE', 50),
(85, '/') : ('REDUCE', 50),
(85, '<') : ('REDUCE', 50),
(85, '<=') : ('REDUCE', 50),
(85, '@') : ('REDUCE', 50),
(86, 'loop') : ('SHIFT', 87),
(87, 'true') : ('SHIFT', 28),
(87, 'int') : ('SHIFT', 21),
(87, '(') : ('SHIFT', 13),
(87, 'not') : ('SHIFT', 26),
(87, 'let') : ('SHIFT', 8),
(87, 'id') : ('SHIFT', 18),
(87, 'while') : ('SHIFT', 20),
(87, 'false') : ('SHIFT', 29),
(87, 'isvoid') : ('SHIFT', 27),
(87, 'if') : ('SHIFT', 24),
(87, '~') : ('SHIFT', 15),
(87, 'string') : ('SHIFT', 30),
(87, 'new') : ('SHIFT', 22),
(87, '{') : ('SHIFT', 14),
(87, 'case') : ('SHIFT', 25),
(88, 'pool') : ('SHIFT', 89),
(89, 'fi') : ('REDUCE', 17),
(89, '}') : ('REDUCE', 17),
(89, 'in') : ('REDUCE', 17),
(89, ';') : ('REDUCE', 17),
(89, ')') : ('REDUCE', 17),
(89, 'of') : ('REDUCE', 17),
(89, 'then') : ('REDUCE', 17),
(89, 'loop') : ('REDUCE', 17),
(89, 'else') : ('REDUCE', 17),
(89, 'pool') : ('REDUCE', 17),
(89, ',') : ('REDUCE', 17),
(90, 'fi') : ('REDUCE', 16),
(90, '}') : ('REDUCE', 16),
(90, 'in') : ('REDUCE', 16),
(90, ';') : ('REDUCE', 16),
(90, ')') : ('REDUCE', 16),
(90, 'of') : ('REDUCE', 16),
(90, 'then') : ('REDUCE', 16),
(90, 'loop') : ('REDUCE', 16),
(90, 'else') : ('REDUCE', 16),
(90, 'pool') : ('REDUCE', 16),
(90, ',') : ('REDUCE', 16),
(91, ')') : ('SHIFT', 92),
(92, 'in') : ('REDUCE', 54),
(92, ';') : ('REDUCE', 54),
(92, 'loop') : ('REDUCE', 54),
(92, 'pool') : ('REDUCE', 54),
(92, ',') : ('REDUCE', 54),
(92, '.') : ('REDUCE', 54),
(92, ')') : ('REDUCE', 54),
(92, 'then') : ('REDUCE', 54),
(92, 'else') : ('REDUCE', 54),
(92, '}') : ('REDUCE', 54),
(92, 'fi') : ('REDUCE', 54),
(92, '=') : ('REDUCE', 54),
(92, '+') : ('REDUCE', 54),
(92, 'of') : ('REDUCE', 54),
(92, '-') : ('REDUCE', 54),
(92, '*') : ('REDUCE', 54),
(92, '/') : ('REDUCE', 54),
(92, '<') : ('REDUCE', 54),
(92, '<=') : ('REDUCE', 54),
(92, '@') : ('REDUCE', 54),
(93, 'fi') : ('REDUCE', 42),
(93, '/') : ('REDUCE', 42),
(93, '}') : ('REDUCE', 42),
(93, 'else') : ('REDUCE', 42),
(93, 'in') : ('REDUCE', 42),
(93, '<') : ('REDUCE', 42),
(93, '=') : ('REDUCE', 42),
(93, ';') : ('REDUCE', 42),
(93, '<=') : ('REDUCE', 42),
(93, '+') : ('REDUCE', 42),
(93, ')') : ('REDUCE', 42),
(93, 'of') : ('REDUCE', 42),
(93, '-') : ('REDUCE', 42),
(93, 'then') : ('REDUCE', 42),
(93, 'loop') : ('REDUCE', 42),
(93, '*') : ('REDUCE', 42),
(93, 'pool') : ('REDUCE', 42),
(93, ',') : ('REDUCE', 42),
(94, '}') : ('SHIFT', 95),
(95, 'fi') : ('REDUCE', 18),
(95, '}') : ('REDUCE', 18),
(95, 'in') : ('REDUCE', 18),
(95, ';') : ('REDUCE', 18),
(95, ')') : ('REDUCE', 18),
(95, 'of') : ('REDUCE', 18),
(95, 'then') : ('REDUCE', 18),
(95, 'loop') : ('REDUCE', 18),
(95, 'else') : ('REDUCE', 18),
(95, 'pool') : ('REDUCE', 18),
(95, ',') : ('REDUCE', 18),
(96, ';') : ('SHIFT', 97),
(97, 'id') : ('SHIFT', 18),
(97, 'isvoid') : ('SHIFT', 27),
(97, 'false') : ('SHIFT', 29),
(97, 'if') : ('SHIFT', 24),
(97, '~') : ('SHIFT', 15),
(97, 'string') : ('SHIFT', 30),
(97, '{') : ('SHIFT', 14),
(97, 'case') : ('SHIFT', 25),
(97, 'new') : ('SHIFT', 22),
(97, '}') : ('REDUCE', 19),
(97, 'true') : ('SHIFT', 28),
(97, 'int') : ('SHIFT', 21),
(97, '(') : ('SHIFT', 13),
(97, 'not') : ('SHIFT', 26),
(97, 'let') : ('SHIFT', 8),
(97, 'while') : ('SHIFT', 20),
(98, '}') : ('REDUCE', 20),
(99, ')') : ('SHIFT', 100),
(100, 'in') : ('REDUCE', 51),
(100, ';') : ('REDUCE', 51),
(100, 'loop') : ('REDUCE', 51),
(100, 'pool') : ('REDUCE', 51),
(100, ',') : ('REDUCE', 51),
(100, '.') : ('REDUCE', 51),
(100, ')') : ('REDUCE', 51),
(100, 'then') : ('REDUCE', 51),
(100, 'else') : ('REDUCE', 51),
(100, '}') : ('REDUCE', 51),
(100, 'fi') : ('REDUCE', 51),
(100, '=') : ('REDUCE', 51),
(100, '+') : ('REDUCE', 51),
(100, 'of') : ('REDUCE', 51),
(100, '-') : ('REDUCE', 51),
(100, '*') : ('REDUCE', 51),
(100, '/') : ('REDUCE', 51),
(100, '<') : ('REDUCE', 51),
(100, '<=') : ('REDUCE', 51),
(100, '@') : ('REDUCE', 51),
(101, 'in') : ('REDUCE', 23),
(101, ',') : ('SHIFT', 102),
(102, 'id') : ('SHIFT', 9),
(103, 'in') : ('REDUCE', 25),
(104, 'id') : ('SHIFT', 9),
(105, 'in') : ('REDUCE', 24),
(106, 'in') : ('SHIFT', 107),
(107, 'string') : ('SHIFT', 30),
(107, 'not') : ('SHIFT', 26),
(107, 'let') : ('SHIFT', 8),
(107, 'while') : ('SHIFT', 20),
(107, 'new') : ('SHIFT', 22),
(107, 'id') : ('SHIFT', 18),
(107, 'true') : ('SHIFT', 28),
(107, 'int') : ('SHIFT', 21),
(107, '(') : ('SHIFT', 13),
(107, '{') : ('SHIFT', 14),
(107, 'isvoid') : ('SHIFT', 27),
(107, 'case') : ('SHIFT', 25),
(107, '~') : ('SHIFT', 15),
(107, 'false') : ('SHIFT', 29),
(107, 'if') : ('SHIFT', 24),
(108, 'fi') : ('REDUCE', 21),
(108, '}') : ('REDUCE', 21),
(108, 'in') : ('REDUCE', 21),
(108, ';') : ('REDUCE', 21),
(108, ')') : ('REDUCE', 21),
(108, 'of') : ('REDUCE', 21),
(108, 'then') : ('REDUCE', 21),
(108, 'loop') : ('REDUCE', 21),
(108, 'else') : ('REDUCE', 21),
(108, 'pool') : ('REDUCE', 21),
(108, ',') : ('REDUCE', 21),
(109, ';') : ('REDUCE', 9),
(110, ')') : ('REDUCE', 11),
(110, 'id') : ('SHIFT', 111),
(111, ':') : ('SHIFT', 112),
(112, 'type') : ('SHIFT', 113),
(113, ')') : ('REDUCE', 15),
(113, ',') : ('REDUCE', 15),
(114, ')') : ('SHIFT', 115),
(115, ':') : ('SHIFT', 116),
(116, 'type') : ('SHIFT', 117),
(117, '{') : ('SHIFT', 118),
(118, 'id') : ('SHIFT', 18),
(118, 'new') : ('SHIFT', 22),
(118, 'true') : ('SHIFT', 28),
(118, 'int') : ('SHIFT', 21),
(118, '(') : ('SHIFT', 13),
(118, 'isvoid') : ('SHIFT', 27),
(118, '~') : ('SHIFT', 15),
(118, '{') : ('SHIFT', 14),
(118, 'case') : ('SHIFT', 25),
(118, 'false') : ('SHIFT', 29),
(118, 'if') : ('SHIFT', 24),
(118, 'not') : ('SHIFT', 26),
(118, 'let') : ('SHIFT', 8),
(118, 'string') : ('SHIFT', 30),
(118, 'while') : ('SHIFT', 20),
(119, '}') : ('SHIFT', 120),
(120, ';') : ('REDUCE', 10),
(121, ',') : ('SHIFT', 122),
(121, ')') : ('REDUCE', 13),
(122, 'id') : ('SHIFT', 111),
(123, ')') : ('REDUCE', 14),
(124, ')') : ('REDUCE', 12),
(125, '}') : ('SHIFT', 126),
(126, ';') : ('REDUCE', 3),
(127, ';') : ('SHIFT', 128),
(128, 'id') : ('SHIFT', 4),
(128, '}') : ('REDUCE', 7),
(129, '}') : ('REDUCE', 5),
(130, ';') : ('SHIFT', 131),
(131, 'id') : ('SHIFT', 4),
(131, '}') : ('REDUCE', 7),
(132, '}') : ('REDUCE', 6),
(133, 'type') : ('SHIFT', 134),
(134, '{') : ('SHIFT', 135),
(135, 'id') : ('SHIFT', 4),
(135, '}') : ('REDUCE', 7),
(136, '}') : ('SHIFT', 137),
(137, ';') : ('REDUCE', 4),
(138, '$') : ('REDUCE', 0),
(139, ';') : ('SHIFT', 140),
(140, 'class') : ('SHIFT', 1),
(140, '$') : ('REDUCE', 1),
(141, '$') : ('REDUCE', 2),
(142, '$') : ('OK', None)
}
self.goto = {
(0, '<class-list>') : 138,
(0, '<program>') : 142,
(0, '<def-class>') : 139,
(3, '<def-meth>') : 130,
(3, '<def-attr>') : 127,
(3, '<feature-list>') : 125,
(7, '<comp>') : 38,
(7, '<factor>') : 49,
(7, '<term>') : 52,
(7, '<arith>') : 41,
(7, '<expr>') : 109,
(7, '<dispatch>') : 46,
(7, '<atom>') : 32,
(8, '<id-list>') : 106,
(12, '<comp>') : 38,
(12, '<expr>') : 101,
(12, '<atom>') : 32,
(12, '<arith>') : 41,
(12, '<dispatch>') : 46,
(12, '<factor>') : 49,
(12, '<term>') : 52,
(13, '<comp>') : 38,
(13, '<expr>') : 99,
(13, '<factor>') : 49,
(13, '<arith>') : 41,
(13, '<term>') : 52,
(13, '<dispatch>') : 46,
(13, '<atom>') : 32,
(14, '<comp>') : 38,
(14, '<factor>') : 49,
(14, '<expr-list>') : 94,
(14, '<term>') : 52,
(14, '<arith>') : 41,
(14, '<dispatch>') : 46,
(14, '<expr>') : 96,
(14, '<atom>') : 32,
(15, '<atom>') : 32,
(15, '<dispatch>') : 46,
(15, '<factor>') : 93,
(17, '<arith>') : 41,
(17, '<term>') : 52,
(17, '<dispatch>') : 46,
(17, '<comp>') : 38,
(17, '<expr>') : 57,
(17, '<nonEmpty-argList>') : 60,
(17, '<atom>') : 32,
(17, '<arg-list>') : 91,
(17, '<factor>') : 49,
(19, '<arith>') : 41,
(19, '<term>') : 52,
(19, '<dispatch>') : 46,
(19, '<comp>') : 38,
(19, '<atom>') : 32,
(19, '<expr>') : 90,
(19, '<factor>') : 49,
(20, '<comp>') : 38,
(20, '<atom>') : 32,
(20, '<factor>') : 49,
(20, '<arith>') : 41,
(20, '<term>') : 52,
(20, '<dispatch>') : 46,
(20, '<expr>') : 86,
(24, '<arith>') : 41,
(24, '<atom>') : 32,
(24, '<comp>') : 38,
(24, '<expr>') : 80,
(24, '<factor>') : 49,
(24, '<term>') : 52,
(24, '<dispatch>') : 46,
(25, '<term>') : 52,
(25, '<dispatch>') : 46,
(25, '<expr>') : 69,
(25, '<comp>') : 38,
(25, '<arith>') : 41,
(25, '<atom>') : 32,
(25, '<factor>') : 49,
(26, '<arith>') : 41,
(26, '<term>') : 52,
(26, '<dispatch>') : 46,
(26, '<comp>') : 38,
(26, '<atom>') : 32,
(26, '<expr>') : 68,
(26, '<factor>') : 49,
(27, '<atom>') : 32,
(27, '<dispatch>') : 46,
(27, '<factor>') : 31,
(35, '<arith>') : 41,
(35, '<term>') : 52,
(35, '<dispatch>') : 46,
(35, '<comp>') : 38,
(35, '<expr>') : 57,
(35, '<nonEmpty-argList>') : 60,
(35, '<atom>') : 32,
(35, '<arg-list>') : 36,
(35, '<factor>') : 49,
(39, '<arith>') : 41,
(39, '<term>') : 52,
(39, '<expr>') : 40,
(39, '<dispatch>') : 46,
(39, '<comp>') : 38,
(39, '<atom>') : 32,
(39, '<factor>') : 49,
(42, '<atom>') : 32,
(42, '<term>') : 43,
(42, '<dispatch>') : 46,
(42, '<factor>') : 49,
(44, '<atom>') : 32,
(44, '<factor>') : 45,
(44, '<dispatch>') : 46,
(47, '<atom>') : 32,
(47, '<factor>') : 48,
(47, '<dispatch>') : 46,
(50, '<atom>') : 32,
(50, '<term>') : 51,
(50, '<dispatch>') : 46,
(50, '<factor>') : 49,
(53, '<arith>') : 54,
(53, '<atom>') : 32,
(53, '<term>') : 52,
(53, '<dispatch>') : 46,
(53, '<factor>') : 49,
(55, '<arith>') : 56,
(55, '<atom>') : 32,
(55, '<term>') : 52,
(55, '<dispatch>') : 46,
(55, '<factor>') : 49,
(58, '<arith>') : 41,
(58, '<term>') : 52,
(58, '<dispatch>') : 46,
(58, '<comp>') : 38,
(58, '<expr>') : 57,
(58, '<atom>') : 32,
(58, '<nonEmpty-argList>') : 59,
(58, '<factor>') : 49,
(65, '<arith>') : 41,
(65, '<term>') : 52,
(65, '<dispatch>') : 46,
(65, '<comp>') : 38,
(65, '<expr>') : 57,
(65, '<nonEmpty-argList>') : 60,
(65, '<atom>') : 32,
(65, '<arg-list>') : 66,
(65, '<factor>') : 49,
(70, '<case-list>') : 78,
(74, '<comp>') : 38,
(74, '<expr>') : 75,
(74, '<factor>') : 49,
(74, '<term>') : 52,
(74, '<arith>') : 41,
(74, '<dispatch>') : 46,
(74, '<atom>') : 32,
(76, '<case-list>') : 77,
(81, '<term>') : 52,
(81, '<arith>') : 41,
(81, '<dispatch>') : 46,
(81, '<comp>') : 38,
(81, '<atom>') : 32,
(81, '<expr>') : 82,
(81, '<factor>') : 49,
(83, '<comp>') : 38,
(83, '<atom>') : 32,
(83, '<arith>') : 41,
(83, '<factor>') : 49,
(83, '<term>') : 52,
(83, '<expr>') : 84,
(83, '<dispatch>') : 46,
(87, '<arith>') : 41,
(87, '<expr>') : 88,
(87, '<atom>') : 32,
(87, '<comp>') : 38,
(87, '<term>') : 52,
(87, '<factor>') : 49,
(87, '<dispatch>') : 46,
(97, '<comp>') : 38,
(97, '<factor>') : 49,
(97, '<term>') : 52,
(97, '<arith>') : 41,
(97, '<dispatch>') : 46,
(97, '<expr>') : 96,
(97, '<expr-list>') : 98,
(97, '<atom>') : 32,
(102, '<id-list>') : 103,
(104, '<id-list>') : 105,
(107, '<arith>') : 41,
(107, '<term>') : 52,
(107, '<dispatch>') : 46,
(107, '<comp>') : 38,
(107, '<atom>') : 32,
(107, '<expr>') : 108,
(107, '<factor>') : 49,
(110, '<param>') : 121,
(110, '<param-list>') : 114,
(110, '<nonEmpty-paramList>') : 124,
(118, '<term>') : 52,
(118, '<comp>') : 38,
(118, '<factor>') : 49,
(118, '<atom>') : 32,
(118, '<arith>') : 41,
(118, '<expr>') : 119,
(118, '<dispatch>') : 46,
(122, '<param>') : 121,
(122, '<nonEmpty-paramList>') : 123,
(128, '<feature-list>') : 129,
(128, '<def-meth>') : 130,
(128, '<def-attr>') : 127,
(131, '<def-meth>') : 130,
(131, '<feature-list>') : 132,
(131, '<def-attr>') : 127,
(135, '<def-meth>') : 130,
(135, '<def-attr>') : 127,
(135, '<feature-list>') : 136,
(140, '<def-class>') : 139,
(140, '<class-list>') : 141
}
ShiftReduceParser.__init__(self,G)
| 35.734866
| 89
| 0.317772
|
d3cf1ff55a512295988cf6c1a8758e6d20b27b68
| 2,751
|
py
|
Python
|
PyInstaller/hooks/hook-gi.repository.Gio.py
|
hawkhai/pyinstaller
|
016a24479b34de161792c72dde455a81ad4c78ae
|
[
"Apache-2.0"
] | 9,267
|
2015-01-01T04:08:45.000Z
|
2022-03-31T11:42:38.000Z
|
PyInstaller/hooks/hook-gi.repository.Gio.py
|
hawkhai/pyinstaller
|
016a24479b34de161792c72dde455a81ad4c78ae
|
[
"Apache-2.0"
] | 5,150
|
2015-01-01T12:09:56.000Z
|
2022-03-31T18:06:12.000Z
|
PyInstaller/hooks/hook-gi.repository.Gio.py
|
hawkhai/pyinstaller
|
016a24479b34de161792c72dde455a81ad4c78ae
|
[
"Apache-2.0"
] | 2,101
|
2015-01-03T10:25:27.000Z
|
2022-03-30T11:04:42.000Z
|
#-----------------------------------------------------------------------------
# Copyright (c) 2005-2021, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License (version 2
# or later) with exception for distributing the bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
#-----------------------------------------------------------------------------
"""
Import hook for Gio https://developer.gnome.org/gio/stable from the GLib library https://wiki.gnome.org/Projects/GLib
introspected through PyGobject https://wiki.gnome.org/PyGObject via the GObject Introspection middleware layer
https://wiki.gnome.org/Projects/GObjectIntrospection
Tested with GLib 2.44.1, PyGObject 3.16.2, GObject Introspection 1.44.0 on Mac OS 10.10.5 and
GLib 2.42.2, PyGObject 3.14.0, and GObject Introspection 1.42 on Windows 7.
"""
import glob
import os
import PyInstaller.log as logging
from PyInstaller.compat import base_prefix, is_darwin, is_linux, is_win
from PyInstaller.utils.hooks.gi import get_gi_libdir, get_gi_typelibs
logger = logging.getLogger(__name__)
binaries, datas, hiddenimports = get_gi_typelibs('Gio', '2.0')
libdir = get_gi_libdir('Gio', '2.0')
path = None
if is_win:
pattern = os.path.join(libdir, 'gio', 'modules', '*.dll')
elif is_darwin or is_linux:
gio_libdir = os.path.join(libdir, 'gio', 'modules')
if not os.path.exists(gio_libdir):
# homebrew installs the files elsewhere..
gio_libdir = os.path.join(os.path.commonprefix([base_prefix, gio_libdir]), 'lib', 'gio', 'modules')
pattern = os.path.join(gio_libdir, '*.so')
if pattern:
for f in glob.glob(pattern):
binaries.append((f, 'gio_modules'))
else:
# To add a new platform add a new elif above with the proper is_<platform> and proper pattern for finding the Gio
# modules on your platform.
logger.warning('Bundling Gio modules is currently not supported on your platform.')
# Bundle the mime cache -- might not be needed on Windows
# -> this is used for content type detection (also used by GdkPixbuf)
# -> gio/xdgmime/xdgmime.c looks for mime/mime.cache in the users home directory, followed by XDG_DATA_DIRS if specified
# in the environment, otherwise it searches /usr/local/share/ and /usr/share/
if not is_win:
_mime_searchdirs = ['/usr/local/share', '/usr/share']
if 'XDG_DATA_DIRS' in os.environ:
_mime_searchdirs.insert(0, os.environ['XDG_DATA_DIRS'])
for sd in _mime_searchdirs:
spath = os.path.join(sd, 'mime', 'mime.cache')
if os.path.exists(spath):
datas.append((spath, 'share/mime'))
break
| 41.681818
| 120
| 0.683751
|
65f4e4fac326e267d044a088997085e9a150dda2
| 5,375
|
py
|
Python
|
provider.py
|
FrankCAN/ShufflePointNet
|
db4856088c210895327f5bb2c2a8faf33108bd3a
|
[
"MIT"
] | 13
|
2019-09-26T03:20:06.000Z
|
2020-11-30T18:06:06.000Z
|
provider.py
|
jtpils/ShufflePointNet
|
30cb1ab1e43ef042b8fbe3d9b6f82312320dd967
|
[
"MIT"
] | 7
|
2019-10-09T02:12:07.000Z
|
2020-12-30T03:06:56.000Z
|
provider.py
|
jtpils/ShufflePointNet
|
30cb1ab1e43ef042b8fbe3d9b6f82312320dd967
|
[
"MIT"
] | 4
|
2019-11-12T15:28:58.000Z
|
2021-03-17T10:42:12.000Z
|
import os
import sys
import numpy as np
import h5py
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(BASE_DIR)
# Download dataset for point cloud classification
DATA_DIR = os.path.join(BASE_DIR, 'data')
if not os.path.exists(DATA_DIR):
os.mkdir(DATA_DIR)
if not os.path.exists(os.path.join(DATA_DIR, 'modelnet40_ply_hdf5_2048')):
www = 'https://shapenet.cs.stanford.edu/media/modelnet40_ply_hdf5_2048.zip'
zipfile = os.path.basename(www)
os.system('wget %s; unzip %s' % (www, zipfile))
os.system('mv %s %s' % (zipfile[:-4], DATA_DIR))
os.system('rm %s' % (zipfile))
def shuffle_data(data, labels):
""" Shuffle data and labels.
Input:
data: B,N,... numpy array
label: B,... numpy array
Return:
shuffled data, label and shuffle indices
"""
idx = np.arange(len(labels))
np.random.shuffle(idx)
return data[idx, ...], labels[idx], idx
def rotate_point_cloud(batch_data):
""" Randomly rotate the point clouds to augument the dataset
rotation is per shape based along up direction
Input:
BxNx3 array, original batch of point clouds
Return:
BxNx3 array, rotated batch of point clouds
"""
rotated_data = np.zeros(batch_data.shape, dtype=np.float32)
for k in range(batch_data.shape[0]):
rotation_angle = np.random.uniform() * 2 * np.pi
cosval = np.cos(rotation_angle)
sinval = np.sin(rotation_angle)
rotation_matrix = np.array([[cosval, 0, sinval],
[0, 1, 0],
[-sinval, 0, cosval]])
shape_pc = batch_data[k, ...]
rotated_data[k, ...] = np.dot(shape_pc.reshape((-1, 3)), rotation_matrix)
return rotated_data
def rotate_point_cloud_by_angle(batch_data, rotation_angle):
""" Rotate the point cloud along up direction with certain angle.
Input:
BxNx3 array, original batch of point clouds
Return:
BxNx3 array, rotated batch of point clouds
"""
rotated_data = np.zeros(batch_data.shape, dtype=np.float32)
for k in range(batch_data.shape[0]):
#rotation_angle = np.random.uniform() * 2 * np.pi
cosval = np.cos(rotation_angle)
sinval = np.sin(rotation_angle)
rotation_matrix = np.array([[cosval, 0, sinval],
[0, 1, 0],
[-sinval, 0, cosval]])
shape_pc = batch_data[k, ...]
rotated_data[k, ...] = np.dot(shape_pc.reshape((-1, 3)), rotation_matrix)
return rotated_data
def rotate_perturbation_point_cloud(batch_data, angle_sigma=0.06, angle_clip=0.18):
""" Randomly perturb the point clouds by small rotations
Input:
BxNx3 array, original batch of point clouds
Return:
BxNx3 array, rotated batch of point clouds
"""
rotated_data = np.zeros(batch_data.shape, dtype=np.float32)
for k in range(batch_data.shape[0]):
angles = np.clip(angle_sigma*np.random.randn(3), -angle_clip, angle_clip)
Rx = np.array([[1,0,0],
[0,np.cos(angles[0]),-np.sin(angles[0])],
[0,np.sin(angles[0]),np.cos(angles[0])]])
Ry = np.array([[np.cos(angles[1]),0,np.sin(angles[1])],
[0,1,0],
[-np.sin(angles[1]),0,np.cos(angles[1])]])
Rz = np.array([[np.cos(angles[2]),-np.sin(angles[2]),0],
[np.sin(angles[2]),np.cos(angles[2]),0],
[0,0,1]])
R = np.dot(Rz, np.dot(Ry,Rx))
shape_pc = batch_data[k, ...]
rotated_data[k, ...] = np.dot(shape_pc.reshape((-1, 3)), R)
return rotated_data
def jitter_point_cloud(batch_data, sigma=0.01, clip=0.05):
""" Randomly jitter points. jittering is per point.
Input:
BxNx3 array, original batch of point clouds
Return:
BxNx3 array, jittered batch of point clouds
"""
B, N, C = batch_data.shape
assert(clip > 0)
jittered_data = np.clip(sigma * np.random.randn(B, N, C), -1*clip, clip)
jittered_data += batch_data
return jittered_data
def shift_point_cloud(batch_data, shift_range=0.1):
""" Randomly shift point cloud. Shift is per point cloud.
Input:
BxNx3 array, original batch of point clouds
Return:
BxNx3 array, shifted batch of point clouds
"""
B, N, C = batch_data.shape
shifts = np.random.uniform(-shift_range, shift_range, (B,3))
for batch_index in range(B):
batch_data[batch_index,:,:] += shifts[batch_index,:]
return batch_data
def random_scale_point_cloud(batch_data, scale_low=0.8, scale_high=1.25):
""" Randomly scale the point cloud. Scale is per point cloud.
Input:
BxNx3 array, original batch of point clouds
Return:
BxNx3 array, scaled batch of point clouds
"""
B, N, C = batch_data.shape
scales = np.random.uniform(scale_low, scale_high, B)
for batch_index in range(B):
batch_data[batch_index,:,:] *= scales[batch_index]
return batch_data
def getDataFiles(list_filename):
return [line.rstrip() for line in open(list_filename)]
def load_h5(h5_filename):
f = h5py.File(h5_filename)
data = f['data'][:]
label = f['label'][:]
return (data, label)
def loadDataFile(filename):
return load_h5(filename)
def load_h5_data_label_seg(h5_filename):
f = h5py.File(h5_filename)
data = f['data'][:] # (2048, 2048, 3)
label = f['label'][:] # (2048, 1)
seg = f['pid'][:] # (2048, 2048)
return (data, label, seg)
def write_h5(h5_filename, data, label):
f = h5py.File(h5_filename + '_aug', 'w')
f.create_dataset(label, data)
return (data, label)
| 31.617647
| 83
| 0.657488
|
55027e91b2abf028f4a6704c7f6152fe8315c339
| 6,207
|
py
|
Python
|
sdk/python/core/tests/test_on_demand.py
|
geordish/ydk-gen
|
eb95b2a86f61180a773b18d0c65b6b906ed542ee
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/core/tests/test_on_demand.py
|
geordish/ydk-gen
|
eb95b2a86f61180a773b18d0c65b6b906ed542ee
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/core/tests/test_on_demand.py
|
geordish/ydk-gen
|
eb95b2a86f61180a773b18d0c65b6b906ed542ee
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# ----------------------------------------------------------------
# Copyright 2017 Cisco Systems
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------
"""test_on_demand_downloading.py
sanity test for on demand module downloading:
- Parse and download new modules from xml payload
"""
from __future__ import absolute_import
import sys
import tempfile
import unittest
from ydk.path import Repository
from ydk.types import EncodingFormat
from ydk.providers import CodecServiceProvider
from ydk.providers import NetconfServiceProvider
from ydk.services import CRUDService
from ydk.services import CodecService
from ydk.models.augmentation import ietf_aug_base_1
from test_utils import ParametrizedTestCase
from test_utils import get_device_info
AUGMENTED_XML_PAYLOAD = """<cpython xmlns="http://cisco.com/ns/yang/ietf-aug-base-1">
<doc>
<aug-5-identityref xmlns:yaug-five="http://cisco.com/ns/yang/yaug-five">yaug-five:derived-aug-identity</aug-5-identityref>
<disutils>
<four-aug-list xmlns="http://cisco.com/ns/yang/yaug-four">
<enabled>true</enabled>
</four-aug-list>
</disutils>
<ydktest-aug-4 xmlns="http://cisco.com/ns/yang/yaug-four">
<aug-four>aug four</aug-four>
</ydktest-aug-4>
<ydktest-aug-1 xmlns="http://cisco.com/ns/yang/yaug-one">
<aug-one>aug one</aug-one>
</ydktest-aug-1>
<ydktest-aug-2 xmlns="http://cisco.com/ns/yang/yaug-two">
<aug-two>aug two</aug-two>
</ydktest-aug-2>
</doc>
<lib>
<ydktest-aug-4 xmlns="http://cisco.com/ns/yang/yaug-four">
<ydktest-aug-nested-4>
<aug-four>aug four</aug-four>
</ydktest-aug-nested-4>
</ydktest-aug-4>
<ydktest-aug-1 xmlns="http://cisco.com/ns/yang/yaug-one">
<ydktest-aug-nested-1>
<aug-one>aug one</aug-one>
</ydktest-aug-nested-1>
</ydktest-aug-1>
<ydktest-aug-2 xmlns="http://cisco.com/ns/yang/yaug-two">
<ydktest-aug-nested-2>
<aug-two>aug two</aug-two>
</ydktest-aug-nested-2>
</ydktest-aug-2>
</lib>
</cpython>"""
AUGMENTED_JSON_PAYLOAD = """{
"ietf-aug-base-1:cpython": {
"doc": {
"disutils": {
"ydktest-aug-ietf-4:four-aug-list": {
"enabled": true
}
},
"ydktest-aug-ietf-4:ydktest-aug-4": {
"aug-four": "aug four"
},
"ydktest-aug-ietf-1:ydktest-aug-1": {
"aug-one": "aug one"
},
"ydktest-aug-ietf-2:ydktest-aug-2": {
"aug-two": "aug two"
},
"ydktest-aug-ietf-5:aug-5-identityref": "ydktest-aug-ietf-5:derived-aug-identity"
},
"lib": {
"ydktest-aug-ietf-4:ydktest-aug-4": {
"ydktest-aug-nested-4": {
"aug-four": "aug four"
}
},
"ydktest-aug-ietf-1:ydktest-aug-1": {
"ydktest-aug-nested-1": {
"aug-one": "aug one"
}
},
"ydktest-aug-ietf-2:ydktest-aug-2": {
"ydktest-aug-nested-2": {
"aug-two": "aug two"
}
}
}
}
}"""
class SanityYang(unittest.TestCase):
@classmethod
def setUpClass(cls):
tmp_dir = tempfile.mkdtemp()
repo = Repository(tmp_dir)
cls.ncc_empty_repo = NetconfServiceProvider(
repo,
cls.hostname,
cls.username,
cls.password,
cls.port,
cls.protocol,
cls.on_demand,
cls.timeout)
cls.ncc = NetconfServiceProvider(
cls.hostname,
cls.username,
cls.password,
cls.port,
cls.protocol,
cls.on_demand,
cls.common_cache,
cls.timeout)
cls.crud = CRUDService()
cls.codec_provider = CodecServiceProvider()
cls.codec = CodecService()
def test_on_demand_downloading(self):
# create augmentation configuration
cpython = ietf_aug_base_1.Cpython()
cpython.doc.ydktest_aug_1.aug_one = 'aug one'
cpython.doc.ydktest_aug_2.aug_two = 'aug two'
cpython.doc.ydktest_aug_4.aug_four = 'aug four'
cpython.lib.ydktest_aug_1.ydktest_aug_nested_1.aug_one = 'aug one'
cpython.lib.ydktest_aug_2.ydktest_aug_nested_2.aug_two = 'aug two'
cpython.lib.ydktest_aug_4.ydktest_aug_nested_4.aug_four = 'aug four'
cpython.doc.disutils.four_aug_list.enabled = True
item1 = cpython.doc.disutils.four_aug_list.Ldata()
item2 = cpython.doc.disutils.four_aug_list.Ldata()
item1.name, item1.number = 'one', 1
item2.name, item1.number = 'two', 2
self.crud.create(self.ncc, cpython)
self.crud.read(self.ncc_empty_repo, ietf_aug_base_1.Cpython())
def test_on_demand_loading_xml(self):
self.codec_provider.encoding = EncodingFormat.XML
entity1 = self.codec.decode(self.codec_provider, AUGMENTED_XML_PAYLOAD)
self.assertEqual(entity1.lib.ydktest_aug_4.ydktest_aug_nested_4.aug_four, "aug four")
def test_on_demand_loading_json(self):
self.codec_provider.encoding = EncodingFormat.JSON
entity1 = self.codec.decode(self.codec_provider, AUGMENTED_JSON_PAYLOAD)
self.assertEqual(entity1.doc.aug_5_identityref, "ydktest-aug-ietf-5:derived-aug-identity")
if __name__ == '__main__':
device, non_demand, common_cache, timeout = get_device_info()
suite = unittest.TestSuite()
suite.addTest(ParametrizedTestCase.parametrize(
SanityYang,
device=device,
non_demand=non_demand,
common_cache=common_cache,
timeout=timeout))
ret = not unittest.TextTestRunner(verbosity=2).run(suite).wasSuccessful()
sys.exit(ret)
| 32.160622
| 126
| 0.631867
|
ff38ea12e5311af84d28fea8091183a78ace56d7
| 375
|
py
|
Python
|
framework/test/utils/test_add_user.py
|
RuhuiCheng/cheetah
|
a113cdd2c48db51822f8c8eb3abe374383875202
|
[
"Apache-2.0"
] | null | null | null |
framework/test/utils/test_add_user.py
|
RuhuiCheng/cheetah
|
a113cdd2c48db51822f8c8eb3abe374383875202
|
[
"Apache-2.0"
] | null | null | null |
framework/test/utils/test_add_user.py
|
RuhuiCheng/cheetah
|
a113cdd2c48db51822f8c8eb3abe374383875202
|
[
"Apache-2.0"
] | null | null | null |
from airflow import models, settings
from airflow.contrib.auth.backends.password_auth import PasswordUser
def test_add_user():
user = PasswordUser(models.User())
user.username = 'admin'
user.email = 'admin@test.cn'
user.password = 'admin'
user.superuser = 1
session = settings.Session()
session.add(user)
session.commit()
session.close()
| 26.785714
| 68
| 0.701333
|
bef9371b044329703308f0108f8199397b26bb55
| 8,469
|
py
|
Python
|
test/functional/feature_assumevalid.py
|
opteron-coin/opteron
|
a88fca12413879ed70058d91e2d53a0faf4719c3
|
[
"MIT"
] | 1
|
2018-12-03T14:46:25.000Z
|
2018-12-03T14:46:25.000Z
|
test/functional/feature_assumevalid.py
|
opteron-coin/opteron
|
a88fca12413879ed70058d91e2d53a0faf4719c3
|
[
"MIT"
] | null | null | null |
test/functional/feature_assumevalid.py
|
opteron-coin/opteron
|
a88fca12413879ed70058d91e2d53a0faf4719c3
|
[
"MIT"
] | 4
|
2018-12-04T14:53:43.000Z
|
2020-08-05T04:31:00.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Copyright (c) 2017-2018 The Astral Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test logic for skipping signature validation on old blocks.
Test logic for skipping signature validation on blocks which we've assumed
valid (https://github.com/AstralProject/Astralcoin/pull/9484)
We build a chain that includes and invalid signature for one of the
transactions:
0: genesis block
1: block 1 with coinbase transaction output.
2-101: bury that block with 100 blocks so the coinbase transaction
output can be spent
102: a block containing a transaction spending the coinbase
transaction output. The transaction has an invalid signature.
103-2202: bury the bad block with just over two weeks' worth of blocks
(2100 blocks)
Start three nodes:
- node0 has no -assumevalid parameter. Try to sync to block 2202. It will
reject block 102 and only sync as far as block 101
- node1 has -assumevalid set to the hash of block 102. Try to sync to
block 2202. node1 will sync all the way to block 2202.
- node2 has -assumevalid set to the hash of block 102. Try to sync to
block 200. node2 will reject block 102 since it's assumed valid, but it
isn't buried by at least two weeks' work.
"""
import time
from test_framework.blocktools import (create_block, create_coinbase)
from test_framework.key import CECKey
from test_framework.mininode import (CBlockHeader,
COutPoint,
CTransaction,
CTxIn,
CTxOut,
NetworkThread,
NodeConn,
NodeConnCB,
msg_block,
msg_headers)
from test_framework.script import (CScript, OP_TRUE)
from test_framework.test_framework import AstralTestFramework
from test_framework.util import (p2p_port, assert_equal)
class BaseNode(NodeConnCB):
def send_header_for_blocks(self, new_blocks):
headers_message = msg_headers()
headers_message.headers = [CBlockHeader(b) for b in new_blocks]
self.send_message(headers_message)
class AssumeValidTest(AstralTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
def setup_network(self):
self.add_nodes(3)
# Start node0. We don't start the other nodes yet since
# we need to pre-mine a block with an invalid transaction
# signature so we can pass in the block hash as assumevalid.
self.start_node(0)
def send_blocks_until_disconnected(self, node):
"""Keep sending blocks to the node until we're disconnected."""
for i in range(len(self.blocks)):
if not node.connection:
break
try:
node.send_message(msg_block(self.blocks[i]))
except IOError as e:
assert str(e) == 'Not connected, no pushbuf'
break
def assert_blockchain_height(self, node, height):
"""Wait until the blockchain is no longer advancing and verify it's reached the expected height."""
last_height = node.getblock(node.getbestblockhash())['height']
timeout = 10
while True:
time.sleep(0.25)
current_height = node.getblock(node.getbestblockhash())['height']
if current_height != last_height:
last_height = current_height
if timeout < 0:
assert False, "blockchain too short after timeout: %d" % current_height
timeout - 0.25
continue
elif current_height > height:
assert False, "blockchain too long: %d" % current_height
elif current_height == height:
break
def run_test(self):
# Connect to node0
node0 = BaseNode()
connections = []
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], node0))
node0.add_connection(connections[0])
NetworkThread().start() # Start up network handling in another thread
node0.wait_for_verack()
# Build the blockchain
self.tip = int(self.nodes[0].getbestblockhash(), 16)
self.block_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time'] + 1
self.blocks = []
# Get a pubkey for the coinbase TXO
coinbase_key = CECKey()
coinbase_key.set_secretbytes(b"horsebattery")
coinbase_pubkey = coinbase_key.get_pubkey()
# Create the first block with a coinbase output to our key
height = 1
block = create_block(self.tip, create_coinbase(height, coinbase_pubkey), self.block_time)
self.blocks.append(block)
self.block_time += 1
block.solve()
# Save the coinbase for later
self.block1 = block
self.tip = block.sha256
height += 1
# Bury the block 100 deep so the coinbase output is spendable
for i in range(100):
block = create_block(self.tip, create_coinbase(height), self.block_time)
block.solve()
self.blocks.append(block)
self.tip = block.sha256
self.block_time += 1
height += 1
# Create a transaction spending the coinbase output with an invalid (null) signature
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.block1.vtx[0].sha256, 0), scriptSig=b""))
tx.vout.append(CTxOut(49 * 100000000, CScript([OP_TRUE])))
tx.calc_sha256()
block102 = create_block(self.tip, create_coinbase(height), self.block_time)
self.block_time += 1
block102.vtx.extend([tx])
block102.hashMerkleRoot = block102.calc_merkle_root()
block102.rehash()
block102.solve()
self.blocks.append(block102)
self.tip = block102.sha256
self.block_time += 1
height += 1
# Bury the assumed valid block 2100 deep
for i in range(2100):
block = create_block(self.tip, create_coinbase(height), self.block_time)
block.nVersion = 4
block.solve()
self.blocks.append(block)
self.tip = block.sha256
self.block_time += 1
height += 1
# Start node1 and node2 with assumevalid so they accept a block with a bad signature.
self.start_node(1, extra_args=["-assumevalid=" + hex(block102.sha256)])
node1 = BaseNode() # connects to node1
connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], node1))
node1.add_connection(connections[1])
node1.wait_for_verack()
self.start_node(2, extra_args=["-assumevalid=" + hex(block102.sha256)])
node2 = BaseNode() # connects to node2
connections.append(NodeConn('127.0.0.1', p2p_port(2), self.nodes[2], node2))
node2.add_connection(connections[2])
node2.wait_for_verack()
# send header lists to all three nodes
node0.send_header_for_blocks(self.blocks[0:2000])
node0.send_header_for_blocks(self.blocks[2000:])
node1.send_header_for_blocks(self.blocks[0:2000])
node1.send_header_for_blocks(self.blocks[2000:])
node2.send_header_for_blocks(self.blocks[0:200])
# Send blocks to node0. Block 102 will be rejected.
self.send_blocks_until_disconnected(node0)
self.assert_blockchain_height(self.nodes[0], 101)
# Send all blocks to node1. All blocks will be accepted.
for i in range(2202):
node1.send_message(msg_block(self.blocks[i]))
# Syncing 2200 blocks can take a while on slow systems. Give it plenty of time to sync.
node1.sync_with_ping(120)
assert_equal(self.nodes[1].getblock(self.nodes[1].getbestblockhash())['height'], 2202)
# Send blocks to node2. Block 102 will be rejected.
self.send_blocks_until_disconnected(node2)
self.assert_blockchain_height(self.nodes[2], 101)
if __name__ == '__main__':
AssumeValidTest().main()
| 41.514706
| 107
| 0.629236
|
9bcdd546302fb7b8945c3e8d6eb15b6fe444e032
| 57
|
py
|
Python
|
cortex/server/__init__.py
|
chib0/asd-winter2019
|
c7d95305b1e8b99013fd40da1e7ebe01c2d0102a
|
[
"Apache-2.0"
] | null | null | null |
cortex/server/__init__.py
|
chib0/asd-winter2019
|
c7d95305b1e8b99013fd40da1e7ebe01c2d0102a
|
[
"Apache-2.0"
] | 4
|
2021-02-02T22:38:53.000Z
|
2022-01-13T02:32:33.000Z
|
cortex/server/__init__.py
|
chib0/asd-winter2019
|
c7d95305b1e8b99013fd40da1e7ebe01c2d0102a
|
[
"Apache-2.0"
] | null | null | null |
from .server import get_server, _run_server as run_server
| 57
| 57
| 0.859649
|
f20ed4b50d61cb02306993b0da254f5bc9a77436
| 3,300
|
py
|
Python
|
convolutional_neural_network.py
|
ascvorcov/z80-ddqn
|
407de1a4231dd298eb1e749aaa65f498dcdf6958
|
[
"MIT"
] | 3
|
2020-03-30T17:08:29.000Z
|
2020-04-29T13:35:07.000Z
|
convolutional_neural_network.py
|
ascvorcov/z80-ddqn
|
407de1a4231dd298eb1e749aaa65f498dcdf6958
|
[
"MIT"
] | 11
|
2020-01-28T22:19:44.000Z
|
2022-03-12T00:03:42.000Z
|
convolutional_neural_network.py
|
ascvorcov/z80-ddqn
|
407de1a4231dd298eb1e749aaa65f498dcdf6958
|
[
"MIT"
] | null | null | null |
from keras.models import Sequential
from keras.layers.convolutional import Conv2D
from keras.layers import Dense, Flatten, Input, Lambda, Add
from keras.optimizers import RMSprop
from keras.models import Model
from keras import backend as K
from keras.utils import plot_model
def huber_loss(q, y_pred):
error = K.abs(q - y_pred)
#clip_error = keras.backend.clip(error, 0.0, 1.0)
linear_error = error - .5
use_linear_flag = K.cast((error > 1), 'float32')
return K.mean((use_linear_flag * linear_error + .5*(1-use_linear_flag) * K.square(error)))
class ConvolutionalNeuralNetwork:
def create_original(input_shape, action_space):
model = Sequential()
model.add(Conv2D(32,
8,
strides=(4, 4),
padding="valid",
activation="relu",
input_shape=input_shape,
data_format="channels_first"))
model.add(Conv2D(64,
4,
strides=(2, 2),
padding="valid",
activation="relu",
input_shape=input_shape,
data_format="channels_first"))
model.add(Conv2D(64,
3,
strides=(1, 1),
padding="valid",
activation="relu",
input_shape=input_shape,
data_format="channels_first"))
model.add(Flatten())
model.add(Dense(512, activation="relu"))
model.add(Dense(action_space))
model.compile(loss=huber_loss,
optimizer=RMSprop(lr=0.00025,
rho=0.95,
epsilon=0.01),
metrics=["accuracy"])
return model
def create_dueling(input_shape, action_space):
inputs = Input(shape=input_shape)
shared = Conv2D(32, (8, 8), strides=(4, 4), padding="valid", activation="relu", data_format="channels_first")(inputs)
shared = Conv2D(64, (4, 4), strides=(2, 2), padding="valid", activation="relu", data_format="channels_first")(shared)
shared = Conv2D(64, (3, 3), strides=(1, 1), padding="valid", activation="relu", data_format="channels_first")(shared)
flatten = Flatten()(shared)
advantage_fc = Dense(512, activation="relu")(flatten)
advantage = Dense(action_space)(advantage_fc)
advantage = Lambda(lambda a: a[:, :] - K.mean(a[:, :], keepdims=True),
output_shape=(action_space,))(advantage)
value_fc = Dense(512, activation="relu")(flatten)
value = Dense(1)(value_fc)
value = Lambda(lambda s: K.expand_dims(s[:, 0], -1),
output_shape=(action_space,))(value)
q_value = Add()([value, advantage])
model = Model(inputs=inputs, outputs=q_value)
model.compile(loss=huber_loss,
optimizer=RMSprop(lr=0.00025, rho=0.95, epsilon=0.01),
metrics=["accuracy"])
return model
#plot_model(self.model, show_shapes=True, expand_nested=True, to_file='model.png')
| 44
| 125
| 0.541818
|
547f4927656312817e381e8fdbd9611eec873fc1
| 730
|
py
|
Python
|
meetup-pyar-2018-cython/testing-cases/pure-python-numpy/difference.py
|
tzulberti/charlas
|
d73a1ad7b4f5f7fd6578a8dc79a5143629044e12
|
[
"MIT"
] | 1
|
2018-11-24T13:06:51.000Z
|
2018-11-24T13:06:51.000Z
|
meetup-pyar-2018-cython/testing-cases/pure-python-numpy/difference.py
|
tzulberti/charlas
|
d73a1ad7b4f5f7fd6578a8dc79a5143629044e12
|
[
"MIT"
] | 1
|
2018-11-28T01:43:22.000Z
|
2018-11-28T01:43:22.000Z
|
meetup-pyar-2018-cython/testing-cases/pure-python-numpy/difference.py
|
tzulberti/charlas
|
d73a1ad7b4f5f7fd6578a8dc79a5143629044e12
|
[
"MIT"
] | 2
|
2018-11-24T01:10:19.000Z
|
2022-02-14T02:21:03.000Z
|
# -*- coding: utf-8 -*-
import numpy
def levenshtein(seq1, seq2):
size_x = len(seq1) + 1
size_y = len(seq2) + 1
matrix = numpy.zeros((size_x, size_y))
for x in range(size_x):
matrix[x, 0] = x
for y in range(size_y):
matrix[0, y] = y
for x in range(1, size_x):
for y in range(1, size_y):
if seq1[x-1] == seq2[y-1]:
substitution_cost = 0
else:
substitution_cost = 1
matrix[x, y] = min(
matrix[x-1, y] + 1, # deletion
matrix[x, y-1] + 1, # insertion
matrix[x-1, y-1] + substitution_cost, #substitution
)
return matrix[size_x - 1, size_y - 1]
| 24.333333
| 67
| 0.484932
|
7b66ec3e8081b04ef6986b8a18071d47bd4058c8
| 13,020
|
py
|
Python
|
jans-pycloudlib/jans/pycloudlib/persistence/sql.py
|
duttarnab/jans
|
b4ae02f9cb60433a44a2b889268525532d82a247
|
[
"Apache-2.0"
] | null | null | null |
jans-pycloudlib/jans/pycloudlib/persistence/sql.py
|
duttarnab/jans
|
b4ae02f9cb60433a44a2b889268525532d82a247
|
[
"Apache-2.0"
] | null | null | null |
jans-pycloudlib/jans/pycloudlib/persistence/sql.py
|
duttarnab/jans
|
b4ae02f9cb60433a44a2b889268525532d82a247
|
[
"Apache-2.0"
] | null | null | null |
"""
jans.pycloudlib.persistence.sql
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains various helpers related to SQL persistence.
"""
import logging
import os
from collections import defaultdict
from sqlalchemy import create_engine
from sqlalchemy import MetaData
from sqlalchemy import func
from sqlalchemy import select
from jans.pycloudlib import get_manager
from jans.pycloudlib.utils import encode_text
logger = logging.getLogger(__name__)
def get_sql_password(manager) -> str:
"""Get password used for SQL database user.
Priority:
1. get from password file
2. get from secrets
:returns: Plaintext password.
"""
secret_name = "sql_password"
password_file = os.environ.get("CN_SQL_PASSWORD_FILE", "/etc/jans/conf/sql_password")
if os.path.isfile(password_file):
with open(password_file) as f:
password = f.read().strip()
manager.secret.set(secret_name, password)
logger.warning(
f"Loading password from {password_file} file is deprecated and will be removed in future releases. "
f"Note, the password has been saved to secrets with key {secret_name} for later usage."
)
else:
# get from secrets (if any)
password = manager.secret.get(secret_name)
return password
class BaseClient:
"""Base class for SQL client adapter.
"""
def __init__(self, manager):
self.manager = manager
self.engine = create_engine(
self.engine_url,
pool_pre_ping=True,
hide_parameters=True,
)
self._metadata = None
@property
def metadata(self):
"""Lazy init of metadata."""
if not self._metadata:
self._metadata = MetaData(bind=self.engine)
self._metadata.reflect()
return self._metadata
@property
def dialect(self):
"""Dialect name."""
raise NotImplementedError
@property
def connector(self):
"""Connector name."""
raise NotImplementedError
@property
def quote_char(self):
"""Character used for quoting identifier."""
raise NotImplementedError
@property
def engine_url(self):
"""Engine connection URL."""
raise NotImplementedError
def connected(self):
"""Check whether connection is alive by executing simple query.
"""
with self.engine.connect() as conn:
result = conn.execute("SELECT 1 AS is_alive")
return result.fetchone()[0] > 0
def get_table_mapping(self) -> dict:
"""Get mapping of column name and type from all tables.
"""
table_mapping = defaultdict(dict)
for table_name, table in self.metadata.tables.items():
for column in table.c:
# truncate COLLATION string
if getattr(column.type, "collation", None):
column.type.collation = None
table_mapping[table_name][column.name] = str(column.type)
return dict(table_mapping)
def row_exists(self, table_name, id_):
"""Check whether a row is exist."""
table = self.metadata.tables.get(table_name)
if table is None:
return False
query = select([func.count()]).select_from(table).where(
table.c.doc_id == id_
)
with self.engine.connect() as conn:
result = conn.execute(query)
return result.fetchone()[0] > 0
def quoted_id(self, identifier):
"""Get quoted identifier name."""
return f"{self.quote_char}{identifier}{self.quote_char}"
def create_table(self, table_name: str, column_mapping: dict, pk_column: str):
"""Create table with its columns."""
columns = []
for column_name, column_type in column_mapping.items():
column_def = f"{self.quoted_id(column_name)} {column_type}"
if column_name == pk_column:
column_def += " NOT NULL UNIQUE"
columns.append(column_def)
columns_fmt = ", ".join(columns)
pk_def = f"PRIMARY KEY ({self.quoted_id(pk_column)})"
query = f"CREATE TABLE {self.quoted_id(table_name)} ({columns_fmt}, {pk_def})"
with self.engine.connect() as conn:
try:
conn.execute(query)
# refresh metadata as we have newly created table
self.metadata.reflect()
except Exception as exc: # noqa: B902
self.on_create_table_error(exc)
def on_create_table_error(self, exc):
"""Callback called when error occured during table creation."""
raise NotImplementedError
def create_index(self, query):
"""Create index using raw query."""
with self.engine.connect() as conn:
try:
conn.execute(query)
except Exception as exc: # noqa: B902
self.on_create_index_error(exc)
def on_create_index_error(self, exc):
"""Callback called when error occured during index creation."""
raise NotImplementedError
def insert_into(self, table_name, column_mapping):
"""Insert a row into a table."""
table = self.metadata.tables.get(table_name)
for column in table.c:
unmapped = column.name not in column_mapping
is_json = column.type.__class__.__name__.lower() == "json"
if not all([unmapped, is_json]):
continue
column_mapping[column.name] = {"v": []}
query = table.insert().values(column_mapping)
with self.engine.connect() as conn:
try:
conn.execute(query)
except Exception as exc: # noqa: B902
self.on_insert_into_error(exc)
def on_insert_into_error(self, exc):
"""Callback called when error occured during row insertion."""
raise NotImplementedError
def get(self, table_name, id_, column_names=None) -> dict:
"""Get a row from a table with matching ID."""
table = self.metadata.tables.get(table_name)
attrs = column_names or []
if attrs:
cols = [table.c[attr] for attr in attrs]
else:
cols = [table]
query = select(cols).select_from(table).where(
table.c.doc_id == id_
)
with self.engine.connect() as conn:
result = conn.execute(query)
entry = result.fetchone()
if not entry:
return {}
return dict(entry)
def update(self, table_name, id_, column_mapping) -> bool:
"""Update a table row with matching ID."""
table = self.metadata.tables.get(table_name)
query = table.update().where(table.c.doc_id == id_).values(column_mapping)
with self.engine.connect() as conn:
result = conn.execute(query)
return bool(result.rowcount)
def search(self, table_name, column_names=None) -> dict:
"""Get all rows from a table."""
table = self.metadata.tables.get(table_name)
attrs = column_names or []
if attrs:
cols = [table.c[attr] for attr in attrs]
else:
cols = [table]
query = select(cols).select_from(table)
with self.engine.connect() as conn:
result = conn.execute(query)
for entry in result:
yield dict(entry)
@property
def server_version(self):
"""Display server version."""
raise NotImplementedError
class PostgresqlClient(BaseClient):
"""Class for PostgreSQL adapter.
"""
@property
def dialect(self):
return "pgsql"
@property
def connector(self):
"""Connector name."""
return "postgresql+psycopg2"
@property
def quote_char(self):
"""Character used for quoting identifier."""
return '"'
@property
def engine_url(self):
host = os.environ.get("CN_SQL_DB_HOST", "localhost")
port = os.environ.get("CN_SQL_DB_PORT", 5432)
database = os.environ.get("CN_SQL_DB_NAME", "jans")
user = os.environ.get("CN_SQL_DB_USER", "jans")
password = get_sql_password(self.manager)
return f"{self.connector}://{user}:{password}@{host}:{port}/{database}"
def on_create_table_error(self, exc):
if exc.orig.pgcode in ["42P07"]:
# error with following code will be suppressed
# - 42P07: relation exists
pass
else:
raise exc
def on_create_index_error(self, exc):
if exc.orig.pgcode in ["42P07"]:
# error with following code will be suppressed
# - 42P07: relation exists
pass
else:
raise exc
def on_insert_into_error(self, exc):
if exc.orig.pgcode in ["23505"]:
# error with following code will be suppressed
# - 23505: unique violation
pass
else:
raise exc
@property
def server_version(self):
"""Display server version."""
return self.engine.scalar("SHOW server_version")
class MysqlClient(BaseClient):
"""Class for MySQL adapter.
"""
@property
def dialect(self):
return "mysql"
@property
def connector(self):
"""Connector name."""
return "mysql+pymysql"
@property
def quote_char(self):
"""Character used for quoting identifier."""
return "`"
@property
def engine_url(self):
host = os.environ.get("CN_SQL_DB_HOST", "localhost")
port = os.environ.get("CN_SQL_DB_PORT", 3306)
database = os.environ.get("CN_SQL_DB_NAME", "jans")
user = os.environ.get("CN_SQL_DB_USER", "jans")
password = get_sql_password(self.manager)
return f"{self.connector}://{user}:{password}@{host}:{port}/{database}"
def on_create_table_error(self, exc):
if exc.orig.args[0] in [1050]:
# error with following code will be suppressed
# - 1050: table exists
pass
else:
raise exc
def on_create_index_error(self, exc):
if exc.orig.args[0] in [1061]:
# error with following code will be suppressed
# - 1061: duplicate key name (index)
pass
else:
raise exc
def on_insert_into_error(self, exc):
if exc.orig.args[0] in [1062]:
# error with following code will be suppressed
# - 1062: duplicate entry
pass
else:
raise exc
@property
def server_version(self):
"""Display server version."""
return self.engine.scalar("SELECT VERSION()")
class SQLClient:
"""This class interacts with SQL database.
"""
#: Methods from adapter
_allowed_adapter_methods = (
"connected",
"create_table",
"get_table_mapping",
"create_index",
"quoted_id",
"row_exists",
"insert_into",
"get",
"update",
"search",
"server_version",
)
def __init__(self, manager=None):
manager = manager or get_manager()
dialect = os.environ.get("CN_SQL_DB_DIALECT", "mysql")
if dialect in ("pgsql", "postgresql"):
self.adapter = PostgresqlClient(manager)
elif dialect == "mysql":
self.adapter = MysqlClient(manager)
self._adapter_methods = [
method for method in dir(self.adapter)
if method in self._allowed_adapter_methods
]
def __getattr__(self, name):
# call adapter method first (if any)
if name in self._adapter_methods:
return getattr(self.adapter, name)
raise AttributeError(
f"'{self.__class__.__name__}' object has no attribute '{name}'"
)
def render_sql_properties(manager, src: str, dest: str) -> None:
"""Render file contains properties to connect to SQL database server.
:params manager: An instance of :class:`~jans.pycloudlib.manager._Manager`.
:params src: Absolute path to the template.
:params dest: Absolute path where generated file is located.
"""
with open(src) as f:
txt = f.read()
with open(dest, "w") as f:
rendered_txt = txt % {
"rdbm_db": os.environ.get("CN_SQL_DB_NAME", "jans"),
"rdbm_type": os.environ.get("CN_SQL_DB_DIALECT", "mysql"),
"rdbm_host": os.environ.get("CN_SQL_DB_HOST", "localhost"),
"rdbm_port": os.environ.get("CN_SQL_DB_PORT", 3306),
"rdbm_user": os.environ.get("CN_SQL_DB_USER", "jans"),
"rdbm_password_enc": encode_text(
get_sql_password(manager),
manager.secret.get("encoded_salt"),
).decode(),
"server_time_zone": os.environ.get("CN_SQL_DB_TIMEZONE", "UTC"),
}
f.write(rendered_txt)
| 30.208817
| 116
| 0.59278
|
9e82fc55b1def371b6df4e8c5d123ad37b357a1d
| 1,942
|
py
|
Python
|
kda/graphql/meta/types.py
|
larrykamau/delivery-server
|
4155f16e1dec313eeddfbe10c268714fa9bbe783
|
[
"MIT"
] | null | null | null |
kda/graphql/meta/types.py
|
larrykamau/delivery-server
|
4155f16e1dec313eeddfbe10c268714fa9bbe783
|
[
"MIT"
] | null | null | null |
kda/graphql/meta/types.py
|
larrykamau/delivery-server
|
4155f16e1dec313eeddfbe10c268714fa9bbe783
|
[
"MIT"
] | null | null | null |
import graphene
from ...core.models import ModelWithMetadata
from .resolvers import (
resolve_metadata,
resolve_object_with_metadata_type,
resolve_private_metadata,
)
class MetadataItem(graphene.ObjectType):
key = graphene.String(required=True, description="Key of a metadata item.")
value = graphene.String(required=True, description="Value of a metadata item.")
class ObjectWithMetadata(graphene.Interface):
private_metadata = graphene.List(
MetadataItem,
required=True,
description=(
"List of private metadata items."
"Requires proper staff permissions to access."
),
)
metadata = graphene.List(
MetadataItem,
required=True,
description=(
"List of public metadata items. Can be accessed without permissions."
),
)
# Deprecated we should remove it in #5221
private_meta = graphene.List(
"kda.graphql.meta.deprecated.types.MetaStore",
required=True,
description="List of privately stored metadata namespaces.",
deprecation_reason=(
"Use the `privetaMetadata` field. This field will be removed after "
"2020-07-31."
),
)
meta = graphene.List(
"kda.graphql.meta.deprecated.types.MetaStore",
required=True,
description="List of publicly stored metadata namespaces.",
deprecation_reason=(
"Use the `metadata` field. This field will be removed after 2020-07-31."
),
)
@staticmethod
def resolve_metadata(root: ModelWithMetadata, _info):
return resolve_metadata(root.metadata)
@staticmethod
def resolve_private_metadata(root: ModelWithMetadata, info):
return resolve_private_metadata(root, info)
@classmethod
def resolve_type(cls, instance: ModelWithMetadata, _info):
return resolve_object_with_metadata_type(instance)
| 30.825397
| 84
| 0.667868
|
e2f1d6d6c2117e500d92396d6e7e1fe54a1251e8
| 766
|
py
|
Python
|
SnookR/invites/urls.py
|
eSmelser/SnookR
|
39b841e9a3e9e6b5d94904e0ccf37faef0ec712d
|
[
"MIT"
] | null | null | null |
SnookR/invites/urls.py
|
eSmelser/SnookR
|
39b841e9a3e9e6b5d94904e0ccf37faef0ec712d
|
[
"MIT"
] | 12
|
2017-07-12T02:41:32.000Z
|
2017-12-30T23:08:00.000Z
|
SnookR/invites/urls.py
|
eSmelser/SnookR
|
39b841e9a3e9e6b5d94904e0ccf37faef0ec712d
|
[
"MIT"
] | 1
|
2017-07-17T14:56:16.000Z
|
2017-07-17T14:56:16.000Z
|
from django.conf.urls import include, url
from invites import views
session_event_patterns = ([
url(r'^session-event/step-1/$', views.SessionEventStartView.as_view(), name='start'),
url(r'^session-event/step-2/team/(?P<team_id>\d+)/$', views.SessionSelectView.as_view(), name='session-select'),
url(r'^session-event/step-3/team/(?P<team_id>\d+)/session/(?P<session_id>\d+)/$', views.SessionEventSelectView.as_view(), name='event-select'),
url(r'^session-event/step-4/confirm/$', views.SessionEventInviteConfirmView.as_view(), name='confirm'),
url(r'^session-event/step-5/create/$', views.SessionEventInviteCreateView.as_view(), name='create'),
], 'session-event')
urlpatterns = [
url(r'^session-event/', include(session_event_patterns)),
]
| 51.066667
| 147
| 0.715405
|
a14b100c3cfe1ccf97e0981732def29402bc49e9
| 8,741
|
py
|
Python
|
btu/btu_core/doctype/btu_task_schedule/btu_task_schedule.py
|
Govind-Jangid/btu
|
856cce48fbf2fd349c064af67b2fc2d85918c61c
|
[
"MIT"
] | 7
|
2021-08-30T16:55:01.000Z
|
2022-02-16T02:30:30.000Z
|
btu/btu_core/doctype/btu_task_schedule/btu_task_schedule.py
|
Govind-Jangid/btu
|
856cce48fbf2fd349c064af67b2fc2d85918c61c
|
[
"MIT"
] | null | null | null |
btu/btu_core/doctype/btu_task_schedule/btu_task_schedule.py
|
Govind-Jangid/btu
|
856cce48fbf2fd349c064af67b2fc2d85918c61c
|
[
"MIT"
] | 6
|
2021-11-04T13:25:48.000Z
|
2022-02-22T11:11:46.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Codrotech Inc. and contributors
#
# Copyright (c) 2021, Datahenge LLC and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import calendar
from calendar import monthrange, timegm
from datetime import datetime
from time import gmtime, localtime, mktime
# Third Party
import cron_descriptor
# Frappe
import frappe
from frappe import _
from frappe.model.document import Document
# BTU
from btu import ( validate_cron_string, Result)
from btu.btu_api.scheduler import SchedulerAPI
class BTUTaskSchedule(Document): # pylint: disable=too-many-instance-attributes
def on_trash(self):
"""
After deleting this Task Schedule, delete the corresponding Redis data.
"""
self.cancel_schedule()
# btu_core.redis_cancel_by_queue_job_id(self.redis_job_id)
def before_validate(self):
self.task_description = self.get_task_doc().desc_short
# Create a friendly, human-readable description based on the cron string:
self.schedule_description = cron_descriptor.get_description(self.cron_string)
# Clear fields that are not relevant for this schedule type.
if self.run_frequency == "Cron Style":
self.day_of_week = None
self.day_of_month = None
self.month = None
self.hour = None
self.minute = None
if self.run_frequency == "Hourly":
self.day_of_week = None
self.day_of_month = None
self.month = None
self.hour = None
if self.run_frequency == "Daily":
self.day_of_week = None
self.day_of_month = None
self.month = None
def validate(self):
if self.run_frequency == "Hourly":
check_minutes(self.minute)
self.cron_string = schedule_to_cron_string(self)
elif self.run_frequency == "Daily":
check_hours(self.hour)
check_minutes(self.minute)
self.cron_string = schedule_to_cron_string(self)
elif self.run_frequency == "Weekly":
check_day_of_week(self.day_of_week)
check_hours(self.hour)
check_minutes(self.minute)
self.cron_string = schedule_to_cron_string(self)
elif self.run_frequency == "Monthly":
check_day_of_month(self.run_frequency, self.day_of_month)
check_hours(self.hour)
check_minutes(self.minute)
self.cron_string = schedule_to_cron_string(self)
elif self.run_frequency == "Yearly":
check_day_of_month(self.run_frequency, self.day_of_month, self.month)
check_hours(self.hour)
check_minutes(self.minute)
self.cron_string = schedule_to_cron_string(self)
elif self.run_frequency == "Cron Style":
validate_cron_string(str(self.cron_string))
def before_save(self):
if bool(self.enabled) is True:
self.resubmit_task_schedule()
else:
doc_orig = self.get_doc_before_save()
if doc_orig and doc_orig.enabled != self.enabled:
# Request the BTU Scheduler to cancel (if status was not previously Disabled)
self.cancel_schedule()
# -----end of standard controller methods-----
def resubmit_task_schedule(self, autosave=False):
"""
Send a request to the BTU Scheduler background daemon to reload this Task Schedule in RQ.
"""
response = SchedulerAPI.reload_task_schedule(task_schedule_id=self.name)
if response.startswith('Exception while connecting'):
raise ConnectionError(response)
print(f"Response from BTU Scheduler: {response}")
frappe.msgprint(f"Response from BTU Scheduler daemon:<br>{response}")
if autosave:
self.save()
def cancel_schedule(self):
"""
Ask the BTU Scheduler daemon to cancel this Task Schedule in the Redis Queue.
"""
response = SchedulerAPI.cancel_task_schedule(task_schedule_id=self.name)
message = f"Request = Cancel Subscription.\nResponse from BTU Scheduler: {response}"
print(message)
frappe.msgprint(message)
self.redis_job_id = ""
def get_task_doc(self):
return frappe.get_doc("BTU Task", self.task)
@frappe.whitelist()
def get_last_execution_results(self):
# response = SchedulerAPI.reload_task_schedule(task_schedule_id=self.name)
import zlib
from frappe.utils.background_jobs import get_redis_conn
conn = get_redis_conn()
# conn.type('rq:job:TS000001')
# conn.hkeys('rq:job:TS000001')
try:
# job_data = conn.hgetall(f'rq:job:{self.redis_job_id}').decode('utf-8')
# frappe.msgprint(job_data)
job_status = conn.hget(f'rq:job:{self.redis_job_id}', 'status').decode('utf-8')
except Exception:
frappe.msgprint(f"No job information is available for Job {self.redis_job_id}")
return
if job_status == 'finished':
frappe.msgprint(f"Job {self.redis_job_id} completed successfully.")
return
frappe.msgprint(f"Job status = {job_status}")
compressed_data = conn.hget(f'rq:job:{self.redis_job_id}', 'exc_info')
if not compressed_data:
frappe.msgprint("No results available; job may not have been processed yet.")
else:
frappe.msgprint(zlib.decompress(compressed_data))
@frappe.whitelist()
def button_test_email_via_log(self):
"""
Write an entry to the BTU Task Log, which should trigger emails. Then delete the entry.
"""
from btu.btu_core.doctype.btu_task_log.btu_task_log import write_log_for_task # late import to avoid circular reference
if not self.email_recipients:
frappe.msgprint("Task Schedule does not have any Email Recipients; no emails can be tested.")
return
try:
result_obj = Result(success=True, message="This test demonstrates how Task Logs can trigger an email on completion.")
log_key = write_log_for_task(task_id=self.task,
result=result_obj,
schedule_id=self.name)
frappe.db.commit()
frappe.delete_doc("BTU Task Log", log_key)
frappe.msgprint("Log written; emails should arrive shortly.")
except Exception as ex:
frappe.msgprint(f"Errors while testing Task Emails: {ex}")
raise ex
# ----------------
# STATIC FUNCTIONS
# ----------------
def check_minutes(minute):
if not minute or not 0 <= minute < 60:
raise ValueError(_("Minute value must be between 0 and 59"))
def check_hours(hour):
if not hour or not hour.isdigit() or not 0 <= int(hour) < 24:
raise ValueError(_("Hour value must be between 0 and 23"))
def check_day_of_week(day_of_week):
if not day_of_week or day_of_week is None:
raise ValueError(_("Please select a day of the week"))
def check_day_of_month(run_frequency, day, month=None):
if run_frequency == "Monthly" and not day:
raise ValueError(_("Please select a day of the month"))
if run_frequency == "Yearly":
if day and month:
m = {value: key for key, value in enumerate(calendar.month_abbr)}
last = monthrange(datetime.now().year,
m.get(str(month).title()))[1]
if int(day) > last:
raise ValueError(
_("Day value for {0} must be between 1 and {1}").format(month, last))
else:
raise ValueError(_("Please select a day of the week and a month"))
def schedule_to_cron_string(doc_schedule):
"""
Purpose of this function is to convert individual SQL columns (Hour, Day, Minute, etc.)
into a valid Unix cron string.
Input: A BTU Task Schedule document class.
Output: A Unix cron string.
"""
def get_utc_time_diff():
current_time = localtime()
return (timegm(current_time) - timegm(gmtime(mktime(current_time)))) / 3600
if not isinstance(doc_schedule, BTUTaskSchedule):
raise ValueError("Function argument 'doc_schedule' should be a BTU Task Schedule document.")
if doc_schedule.run_frequency == 'Cron Style':
return doc_schedule.cron_string
cron = [None] * 5
cron[0] = "*" if doc_schedule.minute is None else str(doc_schedule.minute)
cron[1] = "*" if doc_schedule.hour is None else str(
int(doc_schedule.hour) - get_utc_time_diff())
cron[2] = "*" if doc_schedule.day_of_month is None else str(doc_schedule.day_of_month)
cron[3] = "*" if doc_schedule.month is None else doc_schedule.month
cron[4] = "*" if doc_schedule.day_of_week is None else doc_schedule.day_of_week[:3]
result = " ".join(cron)
validate_cron_string(result, error_on_invalid=True)
return result
@frappe.whitelist()
def resubmit_all_task_schedules():
"""
Purpose: Loop through all enabled Task Schedules, and ask the BTU Scheduler daemon to resubmit them for scheduling.
NOTE: This does -not- immediately execute an RQ Job; it only schedules it.
"""
filters = { "enabled": True }
task_schedule_ids = frappe.db.get_all("BTU Task Schedule", filters=filters, pluck='name')
for task_schedule_id in task_schedule_ids:
try:
doc_schedule = frappe.get_doc("BTU Task Schedule", task_schedule_id)
doc_schedule.validate()
doc_schedule.resubmit_task_schedule()
except Exception as ex:
message = f"Error from BTU Scheduler while submitting Task {doc_schedule.name} : {ex}"
frappe.msgprint(message)
print(message)
doc_schedule.enabled = False
doc_schedule.save()
| 33.109848
| 122
| 0.7363
|
c5aaa8ec556cfc07a7ab789e9586e4ec49ca75fd
| 16,844
|
py
|
Python
|
sdk/python/pulumi_gcp/storage/bucket_acl.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/storage/bucket_acl.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/storage/bucket_acl.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['BucketACLArgs', 'BucketACL']
@pulumi.input_type
class BucketACLArgs:
def __init__(__self__, *,
bucket: pulumi.Input[str],
default_acl: Optional[pulumi.Input[str]] = None,
predefined_acl: Optional[pulumi.Input[str]] = None,
role_entities: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a BucketACL resource.
:param pulumi.Input[str] bucket: The name of the bucket it applies to.
:param pulumi.Input[str] default_acl: Configure this ACL to be the default ACL.
:param pulumi.Input[str] predefined_acl: The [canned GCS ACL](https://cloud.google.com/storage/docs/access-control/lists#predefined-acl) to apply. Must be set if `role_entity` is not.
:param pulumi.Input[Sequence[pulumi.Input[str]]] role_entities: List of role/entity pairs in the form `ROLE:entity`. See [GCS Bucket ACL documentation](https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls) for more details. Must be set if `predefined_acl` is not.
"""
pulumi.set(__self__, "bucket", bucket)
if default_acl is not None:
pulumi.set(__self__, "default_acl", default_acl)
if predefined_acl is not None:
pulumi.set(__self__, "predefined_acl", predefined_acl)
if role_entities is not None:
pulumi.set(__self__, "role_entities", role_entities)
@property
@pulumi.getter
def bucket(self) -> pulumi.Input[str]:
"""
The name of the bucket it applies to.
"""
return pulumi.get(self, "bucket")
@bucket.setter
def bucket(self, value: pulumi.Input[str]):
pulumi.set(self, "bucket", value)
@property
@pulumi.getter(name="defaultAcl")
def default_acl(self) -> Optional[pulumi.Input[str]]:
"""
Configure this ACL to be the default ACL.
"""
return pulumi.get(self, "default_acl")
@default_acl.setter
def default_acl(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "default_acl", value)
@property
@pulumi.getter(name="predefinedAcl")
def predefined_acl(self) -> Optional[pulumi.Input[str]]:
"""
The [canned GCS ACL](https://cloud.google.com/storage/docs/access-control/lists#predefined-acl) to apply. Must be set if `role_entity` is not.
"""
return pulumi.get(self, "predefined_acl")
@predefined_acl.setter
def predefined_acl(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "predefined_acl", value)
@property
@pulumi.getter(name="roleEntities")
def role_entities(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of role/entity pairs in the form `ROLE:entity`. See [GCS Bucket ACL documentation](https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls) for more details. Must be set if `predefined_acl` is not.
"""
return pulumi.get(self, "role_entities")
@role_entities.setter
def role_entities(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "role_entities", value)
@pulumi.input_type
class _BucketACLState:
def __init__(__self__, *,
bucket: Optional[pulumi.Input[str]] = None,
default_acl: Optional[pulumi.Input[str]] = None,
predefined_acl: Optional[pulumi.Input[str]] = None,
role_entities: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering BucketACL resources.
:param pulumi.Input[str] bucket: The name of the bucket it applies to.
:param pulumi.Input[str] default_acl: Configure this ACL to be the default ACL.
:param pulumi.Input[str] predefined_acl: The [canned GCS ACL](https://cloud.google.com/storage/docs/access-control/lists#predefined-acl) to apply. Must be set if `role_entity` is not.
:param pulumi.Input[Sequence[pulumi.Input[str]]] role_entities: List of role/entity pairs in the form `ROLE:entity`. See [GCS Bucket ACL documentation](https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls) for more details. Must be set if `predefined_acl` is not.
"""
if bucket is not None:
pulumi.set(__self__, "bucket", bucket)
if default_acl is not None:
pulumi.set(__self__, "default_acl", default_acl)
if predefined_acl is not None:
pulumi.set(__self__, "predefined_acl", predefined_acl)
if role_entities is not None:
pulumi.set(__self__, "role_entities", role_entities)
@property
@pulumi.getter
def bucket(self) -> Optional[pulumi.Input[str]]:
"""
The name of the bucket it applies to.
"""
return pulumi.get(self, "bucket")
@bucket.setter
def bucket(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "bucket", value)
@property
@pulumi.getter(name="defaultAcl")
def default_acl(self) -> Optional[pulumi.Input[str]]:
"""
Configure this ACL to be the default ACL.
"""
return pulumi.get(self, "default_acl")
@default_acl.setter
def default_acl(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "default_acl", value)
@property
@pulumi.getter(name="predefinedAcl")
def predefined_acl(self) -> Optional[pulumi.Input[str]]:
"""
The [canned GCS ACL](https://cloud.google.com/storage/docs/access-control/lists#predefined-acl) to apply. Must be set if `role_entity` is not.
"""
return pulumi.get(self, "predefined_acl")
@predefined_acl.setter
def predefined_acl(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "predefined_acl", value)
@property
@pulumi.getter(name="roleEntities")
def role_entities(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of role/entity pairs in the form `ROLE:entity`. See [GCS Bucket ACL documentation](https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls) for more details. Must be set if `predefined_acl` is not.
"""
return pulumi.get(self, "role_entities")
@role_entities.setter
def role_entities(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "role_entities", value)
class BucketACL(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
bucket: Optional[pulumi.Input[str]] = None,
default_acl: Optional[pulumi.Input[str]] = None,
predefined_acl: Optional[pulumi.Input[str]] = None,
role_entities: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
"""
Authoritatively manages a bucket's ACLs in Google cloud storage service (GCS). For more information see
[the official documentation](https://cloud.google.com/storage/docs/access-control/lists)
and
[API](https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls).
Bucket ACLs can be managed non authoritatively using the `storage_bucket_access_control` resource. Do not use these two resources in conjunction to manage the same bucket.
Permissions can be granted either by ACLs or Cloud IAM policies. In general, permissions granted by Cloud IAM policies do not appear in ACLs, and permissions granted by ACLs do not appear in Cloud IAM policies. The only exception is for ACLs applied directly on a bucket and certain bucket-level Cloud IAM policies, as described in [Cloud IAM relation to ACLs](https://cloud.google.com/storage/docs/access-control/iam#acls).
**NOTE** This resource will not remove the `project-owners-<project_id>` entity from the `OWNER` role.
## Example Usage
Example creating an ACL on a bucket with one owner, and one reader.
```python
import pulumi
import pulumi_gcp as gcp
image_store = gcp.storage.Bucket("image-store", location="EU")
image_store_acl = gcp.storage.BucketACL("image-store-acl",
bucket=image_store.name,
role_entities=[
"OWNER:user-my.email@gmail.com",
"READER:group-mygroup",
])
```
## Import
This resource does not support import.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] bucket: The name of the bucket it applies to.
:param pulumi.Input[str] default_acl: Configure this ACL to be the default ACL.
:param pulumi.Input[str] predefined_acl: The [canned GCS ACL](https://cloud.google.com/storage/docs/access-control/lists#predefined-acl) to apply. Must be set if `role_entity` is not.
:param pulumi.Input[Sequence[pulumi.Input[str]]] role_entities: List of role/entity pairs in the form `ROLE:entity`. See [GCS Bucket ACL documentation](https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls) for more details. Must be set if `predefined_acl` is not.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: BucketACLArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Authoritatively manages a bucket's ACLs in Google cloud storage service (GCS). For more information see
[the official documentation](https://cloud.google.com/storage/docs/access-control/lists)
and
[API](https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls).
Bucket ACLs can be managed non authoritatively using the `storage_bucket_access_control` resource. Do not use these two resources in conjunction to manage the same bucket.
Permissions can be granted either by ACLs or Cloud IAM policies. In general, permissions granted by Cloud IAM policies do not appear in ACLs, and permissions granted by ACLs do not appear in Cloud IAM policies. The only exception is for ACLs applied directly on a bucket and certain bucket-level Cloud IAM policies, as described in [Cloud IAM relation to ACLs](https://cloud.google.com/storage/docs/access-control/iam#acls).
**NOTE** This resource will not remove the `project-owners-<project_id>` entity from the `OWNER` role.
## Example Usage
Example creating an ACL on a bucket with one owner, and one reader.
```python
import pulumi
import pulumi_gcp as gcp
image_store = gcp.storage.Bucket("image-store", location="EU")
image_store_acl = gcp.storage.BucketACL("image-store-acl",
bucket=image_store.name,
role_entities=[
"OWNER:user-my.email@gmail.com",
"READER:group-mygroup",
])
```
## Import
This resource does not support import.
:param str resource_name: The name of the resource.
:param BucketACLArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(BucketACLArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
bucket: Optional[pulumi.Input[str]] = None,
default_acl: Optional[pulumi.Input[str]] = None,
predefined_acl: Optional[pulumi.Input[str]] = None,
role_entities: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = BucketACLArgs.__new__(BucketACLArgs)
if bucket is None and not opts.urn:
raise TypeError("Missing required property 'bucket'")
__props__.__dict__["bucket"] = bucket
__props__.__dict__["default_acl"] = default_acl
__props__.__dict__["predefined_acl"] = predefined_acl
__props__.__dict__["role_entities"] = role_entities
super(BucketACL, __self__).__init__(
'gcp:storage/bucketACL:BucketACL',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
bucket: Optional[pulumi.Input[str]] = None,
default_acl: Optional[pulumi.Input[str]] = None,
predefined_acl: Optional[pulumi.Input[str]] = None,
role_entities: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'BucketACL':
"""
Get an existing BucketACL resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] bucket: The name of the bucket it applies to.
:param pulumi.Input[str] default_acl: Configure this ACL to be the default ACL.
:param pulumi.Input[str] predefined_acl: The [canned GCS ACL](https://cloud.google.com/storage/docs/access-control/lists#predefined-acl) to apply. Must be set if `role_entity` is not.
:param pulumi.Input[Sequence[pulumi.Input[str]]] role_entities: List of role/entity pairs in the form `ROLE:entity`. See [GCS Bucket ACL documentation](https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls) for more details. Must be set if `predefined_acl` is not.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _BucketACLState.__new__(_BucketACLState)
__props__.__dict__["bucket"] = bucket
__props__.__dict__["default_acl"] = default_acl
__props__.__dict__["predefined_acl"] = predefined_acl
__props__.__dict__["role_entities"] = role_entities
return BucketACL(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def bucket(self) -> pulumi.Output[str]:
"""
The name of the bucket it applies to.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter(name="defaultAcl")
def default_acl(self) -> pulumi.Output[Optional[str]]:
"""
Configure this ACL to be the default ACL.
"""
return pulumi.get(self, "default_acl")
@property
@pulumi.getter(name="predefinedAcl")
def predefined_acl(self) -> pulumi.Output[Optional[str]]:
"""
The [canned GCS ACL](https://cloud.google.com/storage/docs/access-control/lists#predefined-acl) to apply. Must be set if `role_entity` is not.
"""
return pulumi.get(self, "predefined_acl")
@property
@pulumi.getter(name="roleEntities")
def role_entities(self) -> pulumi.Output[Sequence[str]]:
"""
List of role/entity pairs in the form `ROLE:entity`. See [GCS Bucket ACL documentation](https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls) for more details. Must be set if `predefined_acl` is not.
"""
return pulumi.get(self, "role_entities")
| 48.125714
| 432
| 0.662076
|
e18dc4a97db7201d7a2fbdf99363a33c2a4329a8
| 2,048
|
py
|
Python
|
plugins/change_settings/start_wavelength.py
|
manuelprogramming/OSA
|
3a57ea944eef3e8680055a35e8cebd36b93dac51
|
[
"MIT",
"Unlicense"
] | 1
|
2022-01-06T21:00:01.000Z
|
2022-01-06T21:00:01.000Z
|
plugins/change_settings/start_wavelength.py
|
manuelprogramming/OSA
|
3a57ea944eef3e8680055a35e8cebd36b93dac51
|
[
"MIT",
"Unlicense"
] | null | null | null |
plugins/change_settings/start_wavelength.py
|
manuelprogramming/OSA
|
3a57ea944eef3e8680055a35e8cebd36b93dac51
|
[
"MIT",
"Unlicense"
] | null | null | null |
from dataclasses import dataclass
from osa import factory
from handlers.file import set_setting, get_setting
from handlers.result import BaseResult
@dataclass
class ChangeStartWavelength:
"""
Change the start_wavelength in the settings.json file,
wavelength (nm): 600.0 to 1750.0 Specify smaller value than Stop wavelength
"""
command: str
result: BaseResult
def do_work(self) -> BaseResult:
start_wavelength = self.ask_start_wavelength()
if isinstance(start_wavelength, str):
self._fail_result(start_wavelength)
return self.result
self._change_start_wavelength(start_wavelength)
return self.result
def _change_start_wavelength(self, start_wavelength: float) -> None:
stop_wavelength = get_setting("stop_wavelength")
if not self._wavelength_is_valid(start_wavelength, stop_wavelength):
self._fail_result(start_wavelength)
else:
set_setting("start_wavelength", start_wavelength)
self._success_result(start_wavelength)
def _success_result(self, start_wavelength: float) -> None:
self.result.msg = f"number for start_wavelength changed to '{start_wavelength}'"
self.result.value = start_wavelength
def _fail_result(self, start_wavelength: float or str) -> None:
self.result.msg = f"number for start_wavelength invalid '{start_wavelength}'"
self.result.value = start_wavelength
@staticmethod
def _wavelength_is_valid(start_wavelength: float, stop_wavelength: float) -> bool:
return (600.0 <= start_wavelength <= 1750.0) and (start_wavelength < stop_wavelength)
@staticmethod
def ask_start_wavelength() -> int or str:
ans = input("#### Start wavelength (nm) 600.0 to 1750.0 Specify smaller value than Stop wavelength.\n")
try:
return round(float(ans), 1)
except ValueError:
return ans
def initialize() -> None:
factory.register("change_start_wavelength", ChangeStartWavelength)
| 37.236364
| 111
| 0.702637
|
3088d42badf362d7b71c5a63dc3e1a3f2a1f8fa5
| 5,063
|
py
|
Python
|
CNN_Datasets/O_A/datasets/MFPTFFT.py
|
MasterXin2020/DL-based-Intelligent-Diagnosis-Benchmark
|
dd49b077984d15802681a9be723937342d6b59e3
|
[
"MIT"
] | 200
|
2019-12-01T16:20:14.000Z
|
2022-03-31T05:52:06.000Z
|
CNN_Datasets/O_A/datasets/MFPTFFT.py
|
MasterXin2020/DL-based-Intelligent-Diagnosis-Benchmark
|
dd49b077984d15802681a9be723937342d6b59e3
|
[
"MIT"
] | 9
|
2020-06-03T02:30:25.000Z
|
2022-03-18T08:50:10.000Z
|
CNN_Datasets/O_A/datasets/MFPTFFT.py
|
MasterXin2020/DL-based-Intelligent-Diagnosis-Benchmark
|
dd49b077984d15802681a9be723937342d6b59e3
|
[
"MIT"
] | 84
|
2019-12-12T03:46:19.000Z
|
2022-03-21T09:11:31.000Z
|
import os
import numpy as np
import pandas as pd
from scipy.io import loadmat
from sklearn.model_selection import train_test_split
from datasets.SequenceDatasets import dataset
from datasets.sequence_aug import *
from tqdm import tqdm
#Digital data was collected at 50,000 samples per second
signal_size=1024
#label
label1 = [1,2,3,4,5,6,7]
label2 = [ 8,9,10,11,12,13,14] #The failure data is labeled 1-14
#generate Training Dataset and Testing Dataset
def get_files(root, test=False):
'''
This function is used to generate the final training set and test set.
root:The location of the data set
'''
m = os.listdir(root)
datasetname = os.listdir(os.path.join("/tmp", root, m[0])) # '1 - Three Baseline Conditions'
# '2 - Three Outer Race Fault Conditions'
# '3 - Seven More Outer Race Fault Conditions'
# '4 - Seven Inner Race Fault Conditions'
# '5 - Analyses',
# '6 - Real World Examples
# Generate a list of data
dataset1 = os.listdir(os.path.join("/tmp", root, m[0], datasetname[0])) # 'Three Baseline Conditions'
dataset2 = os.listdir(os.path.join("/tmp", root, m[0], datasetname[2])) # 'Seven More Outer Race Fault Conditions'
dataset3 = os.listdir(os.path.join("/tmp", root, m[0], datasetname[3])) # 'Seven Inner Race Fault Conditions'
data_root1 = os.path.join('/tmp',root,m[0],datasetname[0]) #Path of Three Baseline Conditions
data_root2 = os.path.join('/tmp',root,m[0],datasetname[2]) #Path of Seven More Outer Race Fault Conditions
data_root3 = os.path.join('/tmp',root,m[0],datasetname[3]) #Path of Seven Inner Race Fault Conditions
path1=os.path.join('/tmp',data_root1,dataset1[0])
data, lab = data_load(path1,label=0) #The label for normal data is 0
for i in tqdm(range(len(dataset2))):
path2=os.path.join('/tmp',data_root2,dataset2[i])
data1, lab1 = data_load(path2,label=label1[i])
data += data1
lab += lab1
for j in tqdm(range(len(dataset3))):
path3=os.path.join('/tmp',data_root3,dataset3[j])
data2, lab2 = data_load(path3,label=label2[j])
data += data2
lab += lab2
return [data,lab]
def data_load(filename,label):
'''
This function is mainly used to generate test data and training data.
filename:Data location
'''
if label==0:
fl = (loadmat(filename)["bearing"][0][0][1]) #Take out the data
else:
fl = (loadmat(filename)["bearing"][0][0][2]) #Take out the data
fl = fl.reshape(-1,)
data=[]
lab=[]
start,end=0,signal_size
while end<=fl.shape[0]:
x = fl[start:end]
x = np.fft.fft(x)
x = np.abs(x) / len(x)
x = x[range(int(x.shape[0] / 2))]
x = x.reshape(-1,1)
data.append(x)
lab.append(label)
start +=signal_size
end +=signal_size
return data,lab
def data_transforms(dataset_type="train", normlize_type="-1-1"):
transforms = {
'train': Compose([
Reshape(),
Normalize(normlize_type),
RandomAddGaussian(),
RandomScale(),
RandomStretch(),
RandomCrop(),
Retype()
]),
'val': Compose([
Reshape(),
Normalize(normlize_type),
Retype()
])
}
return transforms[dataset_type]
def train_test_split_order(data_pd, test_size=0.8, num_classes=10):
train_pd = pd.DataFrame(columns=('data', 'label'))
val_pd = pd.DataFrame(columns=('data', 'label'))
for i in range(num_classes):
data_pd_tmp = data_pd[data_pd['label'] == i].reset_index(drop=True)
train_pd = train_pd.append(data_pd_tmp.loc[:int((1-test_size)*data_pd_tmp.shape[0]), ['data', 'label']], ignore_index=True)
val_pd = val_pd.append(data_pd_tmp.loc[int((1-test_size)*data_pd_tmp.shape[0]):, ['data', 'label']], ignore_index=True)
return train_pd,val_pd
#--------------------------------------------------------------------------------------------------------------------
class MFPTFFT(object):
num_classes = 15
inputchannel = 1
def __init__(self, data_dir,normlizetype):
self.data_dir = data_dir
self.normlizetype = normlizetype
def data_preprare(self, test=False):
list_data = get_files(self.data_dir, test)
if test:
test_dataset = dataset(list_data=list_data, test=True, transform=None)
return test_dataset
else:
data_pd = pd.DataFrame({"data": list_data[0], "label": list_data[1]})
train_pd, val_pd = train_test_split_order(data_pd, test_size=0.2, num_classes= 15)
train_dataset = dataset(list_data=train_pd, transform=data_transforms('train',self.normlizetype))
val_dataset = dataset(list_data=val_pd, transform=data_transforms('val',self.normlizetype))
return train_dataset, val_dataset
| 38.067669
| 132
| 0.603595
|
4f1627c2087d411d912e9988640bc9359a267222
| 1,589
|
py
|
Python
|
test/core_parallel_actions.py
|
MaxSac/build
|
482c25f3a26171073c7e6c59f0427f2259a63fec
|
[
"BSL-1.0"
] | 11,356
|
2017-12-08T19:42:32.000Z
|
2022-03-31T16:55:25.000Z
|
test/core_parallel_actions.py
|
MaxSac/build
|
482c25f3a26171073c7e6c59f0427f2259a63fec
|
[
"BSL-1.0"
] | 2,402
|
2017-12-08T22:31:01.000Z
|
2022-03-28T19:25:52.000Z
|
test/core_parallel_actions.py
|
MaxSac/build
|
482c25f3a26171073c7e6c59f0427f2259a63fec
|
[
"BSL-1.0"
] | 1,343
|
2017-12-08T19:47:19.000Z
|
2022-03-26T11:31:36.000Z
|
#!/usr/bin/python
# Copyright 2006 Rene Rivera.
# Copyright 2011 Steven Watanabe
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import BoostBuild
t = BoostBuild.Tester(["-d1"], pass_toolset=0)
t.write("sleep.bat", """\
::@timeout /T %1 /NOBREAK >nul
@ping 127.0.0.1 -n 2 -w 1000 >nul
@ping 127.0.0.1 -n %1 -w 1000 >nul
@exit /B 0
""")
t.write("file.jam", """\
if $(NT)
{
actions sleeper
{
echo [$(<:S)] 0
call sleep.bat 1
echo [$(<:S)] 1
call sleep.bat 1
echo [$(<:S)] 2
call sleep.bat $(<:B)
}
}
else
{
actions sleeper
{
echo "[$(<:S)] 0"
sleep 1
echo "[$(<:S)] 1"
sleep 1
echo "[$(<:S)] 2"
sleep $(<:B)
}
}
rule sleeper
{
DEPENDS $(<) : $(>) ;
}
NOTFILE front ;
sleeper 1.a : front ;
sleeper 2.a : front ;
sleeper 3.a : front ;
sleeper 4.a : front ;
NOTFILE choke ;
DEPENDS choke : 1.a 2.a 3.a 4.a ;
sleeper 1.b : choke ;
sleeper 2.b : choke ;
sleeper 3.b : choke ;
sleeper 4.b : choke ;
DEPENDS bottom : 1.b 2.b 3.b 4.b ;
DEPENDS all : bottom ;
""")
t.run_build_system(["-ffile.jam", "-j4"], stdout="""\
...found 12 targets...
...updating 8 targets...
sleeper 1.a
[.a] 0
[.a] 1
[.a] 2
sleeper 2.a
[.a] 0
[.a] 1
[.a] 2
sleeper 3.a
[.a] 0
[.a] 1
[.a] 2
sleeper 4.a
[.a] 0
[.a] 1
[.a] 2
sleeper 1.b
[.b] 0
[.b] 1
[.b] 2
sleeper 2.b
[.b] 0
[.b] 1
[.b] 2
sleeper 3.b
[.b] 0
[.b] 1
[.b] 2
sleeper 4.b
[.b] 0
[.b] 1
[.b] 2
...updated 8 targets...
""")
t.cleanup()
| 15.278846
| 81
| 0.541221
|
bed4b22c8f87383d22acb11ba932121590c75cf0
| 25,562
|
py
|
Python
|
utils/atlasUtility.py
|
varun-invent/feature-extractor
|
82a4f2e0a0523e0e6bcd5d7cd0d434e6d76951f0
|
[
"MIT"
] | null | null | null |
utils/atlasUtility.py
|
varun-invent/feature-extractor
|
82a4f2e0a0523e0e6bcd5d7cd0d434e6d76951f0
|
[
"MIT"
] | null | null | null |
utils/atlasUtility.py
|
varun-invent/feature-extractor
|
82a4f2e0a0523e0e6bcd5d7cd0d434e6d76951f0
|
[
"MIT"
] | null | null | null |
# coding: utf-8
import pandas as pd
import nibabel as nib
import numpy as np
import math
import xml.etree.ElementTree as ET
from tqdm import tqdm
import argparse
# In[66]:
# Improved
class queryAtlas:
'''
atlasPaths : List of paths to the nii files to be considered in order of preference
atlasLabelsPaths : List of paths to the xml files containing labels to be considered in order of preference
coord : List [x,y,z] of MNI coordinates
Usage:
>>> atlasPaths1 = ['hoAtlas/HarvardOxford-cort-maxprob-thr25-2mm.nii.gz',\
'hoAtlas/HarvardOxford-sub-maxprob-thr25-2mm.nii.gz',
'cerebellumAtlas/Cerebellum-MNIflirt-maxprob-thr25-1mm.nii.gz']
>>> atlasLabelsPaths1 = ['hoAtlas/HarvardOxford-Cortical.xml','hoAtlas/HarvardOxford-Subcortical.xml',\
'cerebellumAtlas/Cerebellum_MNIflirt.xml']
>>> q1 = queryAtlas(atlasPaths1, atlasLabelsPaths1)
>>> q1.getAtlasRegions([-6,62,-2])
(1, 'Frontal Pole', 0)
Also, this function supports the use of 4D probability map of the atlas to find the nearby
maximim probability region in the vicinity of 3 voxel cube if the region at a particular voxel is not
present in the atlas.
Just provide a 4D nii.gz file path as atlas_path
To include the HO_subcortical atlas's WM or Cerebral cortex regions
in the region prediction set WM_HEMISPHERE_INFO=True while initializing the
class object:
>>> q1 = queryAtlas(atlasPaths1, atlasLabelsPaths1, True)
'''
def __init__(self,atlasPaths,atlasLabelsPaths,WM_HEMISPHERE_INFO=False,atlas_xml_zero_start_index=True):
self.atlasPaths = atlasPaths
self.atlasLabelsPaths = atlasLabelsPaths
self.itr = [0,1,-1,2,-2,3,-3] # represents the neighbourhood to search the queryVoxel
self.atlas_list = []
self.pixdim_list = []
self.WM_HEMISPHERE_INFO = WM_HEMISPHERE_INFO
""" When the atlas labels xml file contains index starting from zero and
increasing then this flag is set to True otherwise False. This is
useful while creating the atlas region name dictionary in roiName().
In the atlas provided in FSL such as Harvard oxford and cerebellum atlas
, the index starts from zero and in incremented for each region whereas
in AAL atlas, the index is same as label in Atlas
"""
self.ATLAS_XML_ZERO_START_INDEX = atlas_xml_zero_start_index
for index,atlasPath in enumerate(self.atlasPaths):
_atlas = nib.load(atlasPath)
atlas = _atlas.get_data()
self.atlas_list.append(atlas)
"""
Following Code to set the prob variable to True if user has
entered probability maps and False if fixed labeled atlas is entered
TODO Raise Exception if all the atlases do not have common dimensions.
"""
atlas_shape_len = len(atlas.shape)
if atlas_shape_len == 4:
self.prob = True
elif atlas_shape_len == 3:
self.prob = False
else:
print('Exception: Atlas of unknown shape')
raise Exception('Exception: Atlas of unknown shape. Exiting!')
print('Atlas read')
if _atlas.header['pixdim'][1] == 3:
pixdim = 3
elif _atlas.header['pixdim'][1] == 2:
pixdim = 2
elif _atlas.header['pixdim'][1] == 1:
pixdim = 1
else:
print('Unknown Pixel Dimension', _atlas.header['pixdim'][1])
raise Exception('Unknown Pixel Dimension', _atlas.header['pixdim'][1] )
self.pixdim_list.append(pixdim)
print('checked Pixel dimension')
@staticmethod
def MNI2XYZ1mm(mni):
"""
Converts the given MNI coordinates to X,Y,Z cartesian coordinates corresponding to the 1mm atlas
"""
x = - mni[0] + 90
y = mni[1] + 126
z = mni[2] + 72
return [x,y,z]
@staticmethod
def MNI2XYZ2mm(mni):
"""
Converts the given MNI coordinates to X,Y,Z cartesian coordinates corresponding to the 2mm atlas
"""
x = math.floor((- mni[0] + 90)/2.0)
y = math.floor((mni[1] + 126)/2.0)
z = math.floor((mni[2] + 72)/2.0)
return [x,y,z]
@staticmethod
def MNI2XYZ3mm(mni):
"""
Converts the given MNI coordinates to X,Y,Z cartesian coordinates corresponding to the 2mm atlas
"""
x = math.floor((- mni[0] + 90)/3.0)
y = math.floor((mni[1] + 126)/3.0)
z = math.floor((mni[2] + 72)/3.0)
return [x,y,z]
@staticmethod
def XYZ2MNI1mm(xyz):
"""
Converts the given X,Y,Z cartesian coordinates to MNI coordinates corresponding to the 1mm atlas
"""
mni_x = - xyz[0] + 90
mni_y = xyz[1] - 126
mni_z = xyz[2] -72
return [mni_x, mni_y, mni_z]
@staticmethod
def XYZ2MNI2mm(xyz):
"""
Converts the given X,Y,Z cartesian coordinates to MNI coordinates corresponding to the 2mm atlas
"""
mni_x = - 2*xyz[0] + 90
mni_y = 2*xyz[1] - 126
mni_z = 2*xyz[2] -72
return [mni_x, mni_y, mni_z]
@staticmethod
def XYZ2MNI3mm(xyz):
"""
Converts the given X,Y,Z cartesian coordinates to MNI coordinates corresponding to the 2mm atlas
"""
mni_x = - 3*xyz[0] + 90
mni_y = 3*xyz[1] - 126
mni_z = 3*xyz[2] -72
return [mni_x, mni_y, mni_z]
def roiName(self, atlasLabelsPath, roiNum):
'''
Takes as input the Atlas labels path and ROI Number and outputs the ROI name for that atlas
'''
atlasDict = {}
root = ET.parse(atlasLabelsPath).getroot()
elem = root.find('data')
for regionRow in elem.getchildren():
# roiNumber = int(regionRow.items()[2][1]) + 1 .items()
# gives the items in arbitary order so indexing changes therefore use key= 'index'
if self.ATLAS_XML_ZERO_START_INDEX:
roiNumber = int(regionRow.get(key='index')) + 1
else:
roiNumber = int(regionRow.get(key='index'))
roiName = regionRow.text
atlasDict[roiNumber] = roiName
return atlasDict[roiNum]
def getAtlasRegions(self, coordMni):
'''
Takes as input MNI coordinates and returns a tuple (ROI_number, Region_name, Atlas_index_used)
Algorithm:
Loops over multiple atlases
For each atlas get the maximum probability regions (2 here)
Select the max prob region with its increment vector which will be used later to calculate the distance
of each voxel fromt the query voxel.
Discard the first one if it is WM or Cerebral cortex and select the second if it is not WW or CCortex.
Save the [roiNumber, roiName ,final_coord ,final_itr, max_prob, index(representing atlas)] in an 'out' list
(Traverse this list to find the closest max probabiliy region across atlases)
Loop over the 'out' list
select the atlases that contain the closest regions
Loop over those regions to find the max probability region
return roiNumber, roiName, atlasIndex
'''
roiNumber = None
roiName = None
atlasIndex = 0
_roiNumber, _final_coord, _final_itr, _max_prob = [], None, None, None
out = []
for index,atlasPath in enumerate(self.atlasPaths):
if roiNumber == None or self.prob == True:
if self.pixdim_list[index] == 3:
x,y,z = self.MNI2XYZ3mm(coordMni)
elif self.pixdim_list[index] == 2:
x,y,z = self.MNI2XYZ2mm(coordMni)
elif self.pixdim_list[index] == 1:
x,y,z = self.MNI2XYZ1mm(coordMni)
else:
raise Exception('Unknown Pixel Dimension', _atlas.header['pixdim'][1] )
if self.prob == False:
roiNumber = self.atlas_list[index][x,y,z]
if roiNumber == 0: # Coordinate lies outside the atlas
_roiNumber, _final_coord, _final_itr, _max_prob = self.get_neighbouring_coordinates(x,y,z,self.itr,index)
if _roiNumber != None:
roiNumber=_roiNumber[0]
final_coord=_final_coord[0]
final_itr=_final_itr[0]
max_prob=_max_prob[0]
else:
roiNumber = None
continue
else:
final_coord = [x,y,z]
final_itr = [0,0,0]
max_prob = 1
else:
# vec_of_prob = self.atlas_list[index][x,y,z,:]
# roiNumber = np.argmax(vec_of_prob) + 1 # [1 ... num_roi's]
# max_prob = vec_of_prob[roiNumber - 1]
# if True:#max_prob == 0: # # Coordinate is outside the atlas
_roiNumber, _final_coord, _final_itr, _max_prob = self.get_neighbouring_coordinates(x,y,z,self.itr,index, largest= 3)
if _roiNumber == None:
continue
# Getting the Highest probability region from 2 largest returned
roiNumber=_roiNumber[0]
final_coord=_final_coord[0]
final_itr=_final_itr[0]
max_prob=_max_prob[0]
# else:
# final_coord = [x,y,z]
# final_itr = [0,0,0]
# max_prob = 1
if roiNumber != 0:
roiName = self.roiName(self.atlasLabelsPaths[index], roiNumber)
roiName = roiName.strip()
# ----------------- When White Matter and Hemispheric information is not needed --------------------
# TODO Take as input from user about which ROI to ignore
if not self.WM_HEMISPHERE_INFO:
if self.prob == True:
if roiName == 'Right Cerebral White Matter' or roiName == 'Left Cerebral White Matter'\
or roiName == 'Left Cerebral Cortex' or roiName == 'Right Cerebral Cortex':
# Look for second largest in the same atlas
if len(_roiNumber) > 1: # If second highest prob region exists
# Getting the second Highest probability region
roiNumber=_roiNumber[1]
final_coord=_final_coord[1]
final_itr=_final_itr[1]
max_prob=_max_prob[1]
roiName = self.roiName(self.atlasLabelsPaths[index], roiNumber)
roiName = roiName.strip()
if roiName == 'Right Cerebral White Matter'\
or roiName == 'Left Cerebral White Matter'\
or roiName == 'Left Cerebral Cortex'\
or roiName == 'Right Cerebral Cortex':
if len(_roiNumber) > 2: # If Third highest prob region exists
roiNumber=_roiNumber[2]
final_coord=_final_coord[2]
final_itr=_final_itr[2]
max_prob=_max_prob[2]
roiName = self.roiName(self.atlasLabelsPaths[index], roiNumber)
roiName = roiName.strip()
if roiName == 'Right Cerebral White Matter'\
or roiName == 'Left Cerebral White Matter'\
or roiName == 'Left Cerebral Cortex'\
or roiName == 'Right Cerebral Cortex':
roiNumber = None # Nothing is assigned
else:
# When the third Highest probability region is relevant
out.append([roiNumber, roiName ,final_coord ,final_itr, max_prob, index])
else:
# When the second Highest probability region is relevant
out.append([roiNumber, roiName ,final_coord ,final_itr, max_prob, index])
else:
# Second highest probability region does not exist
continue
else:
# Coordinate region is neither WM nor Hemispheric Info
out.append([roiNumber, roiName ,final_coord ,final_itr, max_prob, index])
else:
# 3D Brain atlas is present i.e prob == False
if roiName == 'Right Cerebral White Matter' or roiName == 'Left Cerebral White Matter'\
or roiName == 'Left Cerebral Cortex' or roiName == 'Right Cerebral Cortex':
roiNumber = None
continue
else:
out.append([roiNumber, roiName ,final_coord ,final_itr, max_prob, index])
roiNumber = None # To move to next atlas
# ------------------------------------------------------------------------------------------------------
else:
# WM_HEMISPHERE_INFO is True i.e. this info is required
if self.prob:
out.append([roiNumber, roiName ,final_coord ,final_itr, max_prob, index])
else:
out.append([roiNumber, roiName ,final_coord ,final_itr, max_prob, index])
roiNumber = None # To move to next atlas
else:
# roiNumber is zero so set it to None and move to next atlas
roiNumber = None
"""
Loop over the 'out' list
select the atlases that contain the closest regions
Loop over those regions to find the max probability region
"""
# To find the minimum distance among all the arrays
final_output_idx = 0
final_min_dist = float('Inf')
for idx,output in enumerate(out):
dist = abs(output[3][0]) + abs(output[3][1]) + abs(output[3][2])
if final_min_dist > dist:
final_min_dist = dist
final_max_prob = 0
for idx,output in enumerate(out): # To find the max probability if there are multiple min arrays dist
dist = abs(output[3][0]) + abs(output[3][1]) + abs(output[3][2])
if dist == final_min_dist:
if final_max_prob < output[4]:
final_max_prob = output[4]
final_output_idx = idx
if len(out) == 0:
roiNumber, roiName, atlasIndex = 0, None, None
else:
roiNumber, roiName, atlasIndex = out[final_output_idx][0], out[final_output_idx][1], out[final_output_idx][5]
return int(roiNumber), roiName, atlasIndex
def _XYZ_2_MNI(self,xyz_list,atlas_index):
"""
Input: List of Lists. Each list is x,y,z is cartesian coordinates
Converts the cartesian coordinates to MNI
Output: List of Lists. Each list is x,y,z is MNI coordinates
"""
mni_list = []
for xyz in xyz_list:
if self.pixdim_list[atlas_index] == 3:
mni_list.append(self.XYZ2MNI3mm(xyz))
elif self.pixdim_list[atlas_index] == 2:
mni_list.append(self.XYZ2MNI2mm(xyz))
elif self.pixdim_list[atlas_index] == 1:
mni_list.append(self.XYZ2MNI1mm(xyz))
return mni_list
def _voxel_2_mm(self,voxel_coord_list,atlas_index):
"""
Input: List of Lists. Each list is x,y,z displacement in terms of voxels
Converts the number of voxels shift to mm shift. That is basically
it converts the voxels to milimeter.
Output: List of Lists. Each list is x,y,z displacement in terms of mm
"""
mm_cord_list = []
for voxel_coord in voxel_coord_list:
if self.pixdim_list[atlas_index] == 3:
mm_cord_list.append([i * 3 for i in voxel_coord]) # One Voxel = 3mm
elif self.pixdim_list[atlas_index] == 2:
mm_cord_list.append([i * 2 for i in voxel_coord]) # One Voxel = 2mm
elif self.pixdim_list[atlas_index] == 1:
mm_cord_list.append(voxel_coord)
return mm_cord_list
def get_neighbouring_coordinates(self,x,y,z, itr, atlas_index, largest=1):
"""
Takes X,Y,Z brain coordinates in image space and finds the region according to the given atlas.
Using the max(itr) number of voxels cube arounf the query voxel, it tries to find the most
appropriate region according to the following criteria:
The top 'largest' largest regions are extracted for each atlas at the given voxel.
If the given voxel doesnot have any information in the atlas, then the nearby voxels are looked for region
information. The result is the nearest voxel's region. The nearest voxel is found by employing a distance
metric given by `dist = abs(xi) + abs(yi) + abs(zi)` where xi,yi,zi is the increments in the location of
the neighbourhood of the voxel under query.
Inputs:
Outputs:
final_roi_list: The top 2 ROI numbers
final_coord_list: Corresponding MNI Coordinates given as a list of lists
[[MNI_coordinates1],[MNI_coordinates2]]
final_itr_list: List of increments to reach at the target voxel
final_max_prob_list: List if probabilities of the corresponding regions selected
Algorithm: (for prob = True)
Loop over all voxel increments
Loop over how many largest regions are needed
Get the max region corresponding to the given voxel
If the max region lies inside the atlas
Make sure we are not far from the region we have already found (i.e. pritorizing over
the diatance feom the quesry voxel over the probability of the neighbouring voxels).
Populate the roi_list.
if roi_list is not empty
Pupulate the final_roi_list with the above got ROI_list
Similarly populate the other final lists
"""
final_coord = [x,y,z]
old_dist = float('Inf')
final_itr = [0,0,0]
final_roi = 0
final_roi_list = []
final_coord_list = []
final_itr_list = []
final_max_prob_list = []
roi_list = []
coord_list = []
itr_list = []
max_prob_list = []
for xi in itr:
for yi in itr:
for zi in itr:
roi_list = []
coord_list = []
itr_list = []
max_prob_list = []
if self.prob:
vec_of_prob = np.array(
self.atlas_list[atlas_index][x-xi,y-yi,z-zi,:])
"""
It's an array of multiple probability values. A new
array was created using np.array() as setting the largest val
in vec_of_prob to 0 was not allowing getting the name
of the same coordinates multiple times using the same
class object and was giving None second time
onwards. """
else:
vec_of_prob = np.array(
[self.atlas_list[atlas_index][x-xi,y-yi,z-zi]])
# It's an array of a single value i.e the ROI number
largest = 1
"""Forced assignment as we are dealing with a
static atlas and not region probability maps """
for counter in range(largest):
roiNumber = np.argmax(vec_of_prob) + 1 # [1 ... num_roi's]
#TODO:
"""
Take as input from the user the ROI_ignore list to ignore.
As the roiNumber in ROI_ignore:
continue #(keyword) This will make the program counter to jump to the next
# iteration of loop. Need to check if it works.
"""
max_prob = vec_of_prob[roiNumber - 1]
if max_prob != 0: # the max roi lies inside the atlas
dist = abs(xi) + abs(yi) + abs(zi) # Distance metric
# check if the new distance 'dist' is less than the previously seen distance or not
if dist <= old_dist: #or final_itr == [0,0,0]
old_dist = dist
coord_list.append([x-xi, y-yi, z-zi])
final_itr = [xi,yi,zi]
itr_list.append([xi,yi,zi])
if self.prob:
roi_list.append(roiNumber)
max_prob_list.append(max_prob)
else:
roi_list.append(max_prob) # In fixed atlas, the probability/label value of the voxel denotes the region name
max_prob_list.append(1) # assign probability 1 to the fixed atlas region
vec_of_prob[roiNumber - 1] = 0 # to find second highest region
if len(roi_list) != 0:
final_roi_list = roi_list
final_coord_list = self._XYZ_2_MNI(coord_list, atlas_index)
final_itr_list = self._voxel_2_mm(itr_list, atlas_index)
final_max_prob_list = max_prob_list
if len(final_roi_list) == 0:
final_roi_list, final_coord_list, final_itr_list, final_max_prob_list = None, None, None, None
return final_roi_list, final_coord_list, final_itr_list, final_max_prob_list
if __name__ == "__main__":
# Parser to parse commandline arguments
ap = argparse.ArgumentParser()
ap.add_argument("-mni", "--mni", nargs='+', required=True,
help="MNI Coordinates space seperated")
ap.add_argument("-p", "--prob", action='store_true', required=False,
help="-p True for using the probability maps")
args = vars(ap.parse_args())
# Reading the arguments
prob = args["prob"]
# Converting the numbers read as string to integers
MNI = list(map(int, args["mni"]))
base_path = '/home/varun/Projects/fmri/Autism-survey-connectivity-links-analysis/'
if prob:
atlas_paths = [
base_path + 'hoAtlas/HarvardOxford-cort-prob-1mm.nii.gz',
base_path + 'hoAtlas/HarvardOxford-sub-prob-1mm.nii.gz',
base_path + 'cerebellumAtlas/Cerebellum-MNIflirt-prob-1mm.nii.gz'
]
else:
atlas_paths = [
base_path + 'hoAtlas/HarvardOxford-cort-maxprob-thr25-1mm.nii.gz',
base_path + 'hoAtlas/HarvardOxford-sub-maxprob-thr25-1mm.nii.gz',
base_path + 'cerebellumAtlas/Cerebellum-MNIflirt-maxprob-thr25-1mm.nii.gz'
]
atlas_labels_paths = [
base_path + 'hoAtlas/HarvardOxford-Cortical.xml',
base_path + 'hoAtlas/HarvardOxford-Subcortical.xml',
base_path + 'cerebellumAtlas/Cerebellum_MNIflirt.xml'
]
obj = queryAtlas(atlas_paths,atlas_labels_paths)
print(obj.getAtlasRegions(MNI))
# Get the region from the above defined atlas
cont = True
while(cont):
MNI = input('Type space seperated MNI (Or Type q to quit): ')
if MNI == 'q':
cont = False
continue
# Converting the numbers read as string to integers
MNI = list(map(int, MNI.split()))
print(obj.getAtlasRegions(MNI))
# # In[72]:
#
# # atlasPath2 = ['juelichAtlas/Juelich-maxprob-thr25-1mm.nii.gz']
# atlasPath2 = ['juelichAtlas/Juelich-maxprob-thr0-1mm.nii.gz']
# # atlasPath2 = ['juelichAtlas/Juelich-prob-1mm.nii.gz']
#
# atlasLabelsPath2 = ['juelichAtlas/Juelich.xml']
# q2 = queryAtlas(atlasPath2,atlasLabelsPath2,False)
#
# # In[73]:
# q2.getAtlasRegions([33, -6, -6])
| 41.767974
| 144
| 0.539081
|
42301322b281030a4aad4bc3bf6865f483fb6049
| 3,550
|
py
|
Python
|
sdem/experiment.py
|
defaultobject/sdem
|
3e2c913454d45eb8c9d111fd9b80eae18905d164
|
[
"MIT"
] | null | null | null |
sdem/experiment.py
|
defaultobject/sdem
|
3e2c913454d45eb8c9d111fd9b80eae18905d164
|
[
"MIT"
] | null | null | null |
sdem/experiment.py
|
defaultobject/sdem
|
3e2c913454d45eb8c9d111fd9b80eae18905d164
|
[
"MIT"
] | null | null | null |
""" Wrapper around SacredExperiment. """
from sacred import Experiment as SacredExperiment
from sacred.observers import FileStorageObserver
import argparse
from pathlib import Path
import sys
from .computation import manager
from .computation import metrics
import inspect
import os
class Experiment(SacredExperiment):
def __init__(self, name="exp"):
caller_globals = inspect.stack()[1][0].f_globals
# sacred used inspect.stack() to see where the main function has been called from.
# This is awkward when trying to call an experiment from a different class.
# To get around this we check if the the experiment has been run from the command line.
# If not then we change the working dir to the experiments directory.
if caller_globals["__name__"] == "__main__":
super(Experiment, self).__init__(name)
else:
prev_cwd = os.getcwd()
os.chdir("../")
super(Experiment, self).__init__(name)
os.chdir(prev_cwd)
self.config_function = None
def configs(self, function):
self.config_function = function
def run_config(self, function, config, use_observer=True):
self.observers = [] # reset observed
self.configurations = []
if use_observer:
self.observers.append(FileStorageObserver("runs"))
self.add_config(config)
captured_function = self.main(lambda: function(config))
#call sacred run method
self.run(captured_function.__name__)
def log_metrics(self, X, Y, prediction_fn, var_flag=True, log=True, prefix=None):
return metrics.log_regression_scalar_metrics(
self, X, Y, prediction_fn, var_flag=var_flag, log=log, prefix=prefix
)
def automain(self, function):
"""
automain, copies the functionality of sacred automain. Will run the experiment using a specific input
when the file is run from command line we support the follow command line arguments:
int>=0: run specific config id [default = 0]
-1: run all experiment configs
"""
# check if the function was run through the command line or imported
# if imported we do not run the experiments
# TODO: check if required
# captured = self.main(function)
if function.__module__ != "__main__":
return
if self.config_function is None:
raise RuntimeError("No config function registered. ")
filename = Path(inspect.getfile(function))
parser = argparse.ArgumentParser()
parser.add_argument('i', type=int, default=-1, help='Experiment id to run')
parser.add_argument('--no-observer', action='store_true', default=False, help='Run without observer')
input_args = parser.parse_args()
use_observer = not(input_args.no_observer)
i = input_args.i
configs = self.config_function()
if i == -1:
# Run all experiments
for i, config in enumerate(configs):
config = manager.ensure_correct_fields_for_model_file_config(
filename, config, i
)
self.run_config(function, config, use_observer=use_observer)
else:
# Run specific experiment
config = manager.ensure_correct_fields_for_model_file_config(
filename, configs[i], i
)
self.run_config(function, config, use_observer=use_observer)
| 34.803922
| 109
| 0.644507
|
fc5614afb08b8b51e3aef599927897ec62a58f4c
| 6,648
|
py
|
Python
|
origin/origin_service.py
|
DanAustinGH/fHDHR_Locast
|
002117b666ad650c523aedb0209f1c996d576169
|
[
"WTFPL"
] | null | null | null |
origin/origin_service.py
|
DanAustinGH/fHDHR_Locast
|
002117b666ad650c523aedb0209f1c996d576169
|
[
"WTFPL"
] | null | null | null |
origin/origin_service.py
|
DanAustinGH/fHDHR_Locast
|
002117b666ad650c523aedb0209f1c996d576169
|
[
"WTFPL"
] | null | null | null |
import datetime
import re
import fHDHR.tools
import fHDHR.exceptions
class OriginService():
def __init__(self, fhdhr):
self.fhdhr = fhdhr
self.status_dict = {"donateExp": None}
self.location = {
"latitude": None,
"longitude": None,
"DMA": None,
"city": None,
"active": False
}
self.zipcode = self.fhdhr.config.dict["origin"]["override_zipcode"]
self.mock_location = self.format_mock_location()
self.login()
def login(self):
self.fhdhr.logger.info("Logging into Locast using username %s..." % self.fhdhr.config.dict["origin"]["username"])
self.token = self.get_token()
if not self.token:
raise fHDHR.exceptions.OriginSetupError("Locast Login Failed")
else:
self.fhdhr.logger.info("Locast Login Success")
self.status_dict["Login"] = "Success"
self.fhdhr.config.write(self.fhdhr.config.dict["main"]["dictpopname"], 'token', self.token)
return True
def get_token(self):
login_url = "https://api.locastnet.org/api/user/login"
login_headers = {'Content-Type': 'application/json'}
login_json = ('{"username":"' + self.fhdhr.config.dict["origin"]["username"] +
'","password":"' + self.fhdhr.config.dict["origin"]["password"] +
'"}').encode("utf-8")
try:
loginReq = self.fhdhr.web.session.post(login_url, data=login_json, headers=login_headers)
loginReq.raise_for_status()
except self.fhdhr.web.exceptions.HTTPError as err:
self.fhdhr.logger.error('Login Failed: %s' % err)
return None
loginRes = loginReq.json()
token = loginRes["token"]
try:
user_url = "https://api.locastnet.org/api/user/me"
user_headers = {
'Content-Type': 'application/json',
'authorization': 'Bearer %s' % token}
userReq = self.fhdhr.web.session.get(user_url, headers=user_headers)
userReq.raise_for_status()
except self.fhdhr.web.exceptions.HTTPError as err:
self.fhdhr.logger.error('Login Failed: %s' % err)
return None
userresp = userReq.json()
self.fhdhr.logger.info("User Info obtained.")
if (userresp['didDonate'] and
datetime.datetime.now() > datetime.datetime.fromtimestamp(userresp['donationExpire'] / 1000)):
self.fhdhr.logger.error("Donation expired")
return False
elif not userresp['didDonate']:
self.fhdhr.logger.error("User didn't donate")
return False
else:
donateExp = datetime.datetime.fromtimestamp(userresp['donationExpire'] / 1000)
self.fhdhr.logger.info("User donationExpire: {}".format(donateExp))
self.status_dict["donateExp"] = donateExp
if self.find_location():
self.fhdhr.logger.info("Got location as {} - DMA {} - Lat\Lon {}\{}"
.format(self.location['city'],
self.location['DMA'],
self.location['latitude'],
self.location['longitude']))
else:
return False
# Check that Locast reports this market is currently active and available.
if not self.location['active']:
self.fhdhr.logger.info("Locast reports that this DMA\Market area is not currently active!")
return False
return token
def set_location(self, geoRes):
self.location["latitude"] = str(geoRes['latitude'])
self.location["longitude"] = str(geoRes['longitude'])
self.location["DMA"] = str(geoRes['DMA'])
self.location["active"] = geoRes['active']
self.location["city"] = str(geoRes['name'])
def find_location(self):
zip_format = re.compile(r'^[0-9]{5}$')
# Check if the user provided override coords.
if self.mock_location:
return self.get_coord_location()
# Check if the user provided an override zipcode, and that it's valid.
elif self.zipcode and zip_format.match(self.zipcode):
return self.get_zip_location()
else:
# If no override zip, or not a valid ZIP, fallback to IP location.
return self.get_ip_location()
def get_geores_json(self, location_url, location_type):
location_headers = {'Content-Type': 'application/json'}
try:
req = self.fhdhr.web.session.get(location_url, headers=location_headers)
req.raise_for_status()
except self.fhdhr.web.exceptions.HTTPError:
self.fhdhr.logger.error("Unable to retrieve %s location." % location_type)
return False
geoRes = req.json()
self.set_location(geoRes)
return True
def get_zip_location(self):
"""Get geolocation via Locast, based on user provided zipcode."""
self.fhdhr.logger.info("Getting location via provided zipcode {}".format(self.zipcode))
location_url = 'https://api.locastnet.org/api/watch/dma/zip/{}'.format(self.zipcode)
return self.get_geores_json(location_url, "ZIP")
def get_ip_location(self):
"""Get geolocation via Locast by IP."""
self.fhdhr.logger.info("Getting location via IP Address.")
location_url = 'https://api.locastnet.org/api/watch/dma/ip'
return self.get_geores_json(location_url, "IP")
def get_coord_location(self):
self.fhdhr.logger.info("Getting location via provided lat\lon coordinates.")
# Get geolocation via Locast, using lat\lon coordinates.
lat = self.mock_location['latitude']
lon = self.mock_location['longitude']
location_url = 'https://api.locastnet.org/api/watch/dma/{}/{}'.format(lat, lon)
return self.get_geores_json(location_url, "Coordinate")
def format_mock_location(self):
if (not self.fhdhr.config.dict["origin"]["override_latitude"] or
not self.fhdhr.config.dict["origin"]["override_longitude"]):
return None
else:
loc_dict = {
"latitude": self.fhdhr.config.dict["origin"]["override_latitude"],
"longitude": self.fhdhr.config.dict["origin"]["override_longitude"],
}
return loc_dict
| 41.55
| 121
| 0.584988
|
c1547b1d9c4f119b7180d9d34765805bdfb88e87
| 548
|
py
|
Python
|
lesson_2_task_3.py
|
anihorta/selenium_st
|
57a8e205ba1bc745b5b34e1bd6c0b0362f3c52f9
|
[
"Apache-2.0"
] | null | null | null |
lesson_2_task_3.py
|
anihorta/selenium_st
|
57a8e205ba1bc745b5b34e1bd6c0b0362f3c52f9
|
[
"Apache-2.0"
] | null | null | null |
lesson_2_task_3.py
|
anihorta/selenium_st
|
57a8e205ba1bc745b5b34e1bd6c0b0362f3c52f9
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
@pytest.fixture
def driver(request):
wd = webdriver.Chrome()
request.addfinalizer(wd.quit)
return wd
def test_example(driver):
driver.get("http://localhost/litecart/admin/login.php")
driver.find_element_by_name('username').send_keys('admin')
driver.find_element_by_name('password').send_keys('admin')
driver.find_element_by_name('login').click()
| 26.095238
| 64
| 0.770073
|
4e9b0a70a57111f838130aa599ceb877557bf664
| 1,003
|
py
|
Python
|
pandapower/control/__init__.py
|
junmuz/pandapower
|
06dac12afb5725332ec497c2eda239d178e4882b
|
[
"BSD-3-Clause"
] | 104
|
2017-02-21T17:13:51.000Z
|
2022-03-21T13:52:27.000Z
|
pandapower/control/__init__.py
|
junmuz/pandapower
|
06dac12afb5725332ec497c2eda239d178e4882b
|
[
"BSD-3-Clause"
] | 126
|
2017-02-15T17:09:08.000Z
|
2018-07-16T13:25:15.000Z
|
pandapower/control/__init__.py
|
junmuz/pandapower
|
06dac12afb5725332ec497c2eda239d178e4882b
|
[
"BSD-3-Clause"
] | 57
|
2017-03-08T13:49:32.000Z
|
2022-02-28T10:36:55.000Z
|
import pandapower.control.basic_controller
import pandapower.control.controller
# --- Controller ---
from pandapower.control.controller.const_control import ConstControl
from pandapower.control.controller.characteristic_control import CharacteristicControl
from pandapower.control.controller.trafo.ContinuousTapControl import ContinuousTapControl
from pandapower.control.controller.trafo.DiscreteTapControl import DiscreteTapControl
from pandapower.control.controller.trafo.USetTapControl import USetTapControl
from pandapower.control.controller.trafo.TapDependentImpedance import TapDependentImpedance
from pandapower.control.controller.trafo_control import TrafoController
# --- Other ---
from pandapower.control.run_control import *
from pandapower.control.run_control import ControllerNotConverged
from pandapower.control.util.characteristic import Characteristic
from pandapower.control.util.auxiliary import get_controller_index
from pandapower.control.util.diagnostic import control_diagnostic
| 52.789474
| 91
| 0.881356
|
4e9237ea55e094e003b07c9e70821c06d7103535
| 132,417
|
py
|
Python
|
autotest/ogr/ogr_geojson.py
|
ajolma/gdal
|
19d847c8519919fcd1e7e7247644d28771034317
|
[
"MIT"
] | null | null | null |
autotest/ogr/ogr_geojson.py
|
ajolma/gdal
|
19d847c8519919fcd1e7e7247644d28771034317
|
[
"MIT"
] | null | null | null |
autotest/ogr/ogr_geojson.py
|
ajolma/gdal
|
19d847c8519919fcd1e7e7247644d28771034317
|
[
"MIT"
] | 1
|
2019-11-01T15:17:09.000Z
|
2019-11-01T15:17:09.000Z
|
#!/usr/bin/env pytest
# -*- coding: utf-8 -*-
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: GeoJSON driver test suite.
# Author: Mateusz Loskot <mateusz@loskot.net>
#
###############################################################################
# Copyright (c) 2007, Mateusz Loskot <mateusz@loskot.net>
# Copyright (c) 2009-2014, Even Rouault <even dot rouault at mines-paris dot org>
# Copyright (c) 2013, Kyle Shannon <kyle at pobox dot com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
###############################################################################
import json
import math
import os
from osgeo import osr
from osgeo import ogr
from osgeo import gdal
import gdaltest
import ogrtest
import pytest
###############################################################################
# Test utilities
def validate_layer(lyr, name, features, typ, fields, box):
if name is not None and name != lyr.GetName():
print('Wrong layer name')
return False
if features != lyr.GetFeatureCount():
print('Wrong number of features')
return False
lyrDefn = lyr.GetLayerDefn()
if lyrDefn is None:
print('Layer definition is none')
return False
if typ != lyrDefn.GetGeomType():
print('Wrong geometry type')
print(lyrDefn.GetGeomType())
return False
if fields != lyrDefn.GetFieldCount():
print('Wrong number of fields')
return False
extent = lyr.GetExtent()
minx = abs(extent[0] - box[0])
maxx = abs(extent[1] - box[1])
miny = abs(extent[2] - box[2])
maxy = abs(extent[3] - box[3])
if max(minx, maxx, miny, maxy) > 0.0001:
print('Wrong spatial extent of layer')
print(extent)
return False
return True
def verify_geojson_copy(name, fids, names):
if gdaltest.gjpoint_feat is None:
print('Missing features collection')
return False
fname = os.path.join('tmp', name + '.geojson')
ds = ogr.Open(fname)
if ds is None:
print('Can not open \'' + fname + '\'')
return False
lyr = ds.GetLayer(0)
if lyr is None:
print('Missing layer')
return False
######################################################
# Test attributes
ret = ogrtest.check_features_against_list(lyr, 'FID', fids)
if ret != 1:
print('Wrong values in \'FID\' field')
return False
lyr.ResetReading()
ret = ogrtest.check_features_against_list(lyr, 'NAME', names)
if ret != 1:
print('Wrong values in \'NAME\' field')
return False
######################################################
# Test geometries
lyr.ResetReading()
for i in range(len(gdaltest.gjpoint_feat)):
orig_feat = gdaltest.gjpoint_feat[i]
feat = lyr.GetNextFeature()
if feat is None:
print('Failed trying to read feature')
return False
if ogrtest.check_feature_geometry(feat, orig_feat.GetGeometryRef(),
max_error=0.001) != 0:
print('Geometry test failed')
gdaltest.gjpoint_feat = None
return False
gdaltest.gjpoint_feat = None
lyr = None
return True
def copy_shape_to_geojson(gjname, compress=None):
if gdaltest.geojson_drv is None:
return False
if compress is not None:
if compress[0:5] == '/vsig':
dst_name = os.path.join('/vsigzip/', 'tmp', gjname + '.geojson' + '.gz')
elif compress[0:4] == '/vsiz':
dst_name = os.path.join('/vsizip/', 'tmp', gjname + '.geojson' + '.zip')
elif compress == '/vsistdout/':
dst_name = compress
else:
return False
else:
dst_name = os.path.join('tmp', gjname + '.geojson')
ds = gdaltest.geojson_drv.CreateDataSource(dst_name)
if ds is None:
return False
######################################################
# Create layer
lyr = ds.CreateLayer(gjname)
if lyr is None:
return False
######################################################
# Setup schema (all test shapefiles use common schmea)
ogrtest.quick_create_layer_def(lyr,
[('FID', ogr.OFTReal),
('NAME', ogr.OFTString)])
######################################################
# Copy in gjpoint.shp
dst_feat = ogr.Feature(feature_def=lyr.GetLayerDefn())
src_name = os.path.join('data', gjname + '.shp')
shp_ds = ogr.Open(src_name)
shp_lyr = shp_ds.GetLayer(0)
feat = shp_lyr.GetNextFeature()
gdaltest.gjpoint_feat = []
while feat is not None:
gdaltest.gjpoint_feat.append(feat)
dst_feat.SetFrom(feat)
lyr.CreateFeature(dst_feat)
feat = shp_lyr.GetNextFeature()
shp_lyr = None
lyr = None
ds = None
return True
###############################################################################
# Find GeoJSON driver
def test_ogr_geojson_1():
gdaltest.geojson_drv = ogr.GetDriverByName('GeoJSON')
if gdaltest.geojson_drv is not None:
return
pytest.fail()
###############################################################################
# Test file-based DS with standalone "Point" feature object.
def test_ogr_geojson_2():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/point.geojson')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayerByName('point')
assert lyr is not None, 'Missing layer called point'
extent = (100.0, 100.0, 0.0, 0.0)
rc = validate_layer(lyr, 'point', 1, ogr.wkbPoint, 0, extent)
assert rc
lyr = None
###############################################################################
# Test file-based DS with standalone "LineString" feature object.
def test_ogr_geojson_3():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/linestring.geojson')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayerByName('linestring')
assert lyr is not None, 'Missing layer called linestring'
extent = (100.0, 101.0, 0.0, 1.0)
rc = validate_layer(lyr, 'linestring', 1, ogr.wkbLineString, 0, extent)
assert rc
lyr = None
##############################################################################
# Test file-based DS with standalone "Polygon" feature object.
def test_ogr_geojson_4():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/polygon.geojson')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayerByName('polygon')
assert lyr is not None, 'Missing layer called polygon'
extent = (100.0, 101.0, 0.0, 1.0)
rc = validate_layer(lyr, 'polygon', 1, ogr.wkbPolygon, 0, extent)
assert rc
lyr = None
##############################################################################
# Test file-based DS with standalone "GeometryCollection" feature object.
def test_ogr_geojson_5():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/geometrycollection.geojson')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayerByName('geometrycollection')
assert lyr is not None, 'Missing layer called geometrycollection'
extent = (100.0, 102.0, 0.0, 1.0)
rc = validate_layer(lyr, 'geometrycollection', 1, ogr.wkbGeometryCollection, 0, extent)
assert rc
lyr = None
##############################################################################
# Test file-based DS with standalone "MultiPoint" feature object.
def test_ogr_geojson_6():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/multipoint.geojson')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayerByName('multipoint')
assert lyr is not None, 'Missing layer called multipoint'
extent = (100.0, 101.0, 0.0, 1.0)
rc = validate_layer(lyr, 'multipoint', 1, ogr.wkbMultiPoint, 0, extent)
assert rc
lyr = None
##############################################################################
# Test file-based DS with standalone "MultiLineString" feature object.
def test_ogr_geojson_7():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/multilinestring.geojson')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayerByName('multilinestring')
assert lyr is not None, 'Missing layer called multilinestring'
extent = (100.0, 103.0, 0.0, 3.0)
rc = validate_layer(lyr, 'multilinestring', 1, ogr.wkbMultiLineString, 0, extent)
assert rc
lyr = None
##############################################################################
# Test file-based DS with standalone "MultiPolygon" feature object.
def test_ogr_geojson_8():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/multipolygon.geojson')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayerByName('multipolygon')
assert lyr is not None, 'Missing layer called multipolygon'
extent = (100.0, 103.0, 0.0, 3.0)
rc = validate_layer(lyr, 'multipolygon', 1, ogr.wkbMultiPolygon, 0, extent)
assert rc
lyr = None
##############################################################################
# Test translation of data/gjpoint.shp to GeoJSON file
def test_ogr_geojson_9():
if gdaltest.geojson_drv is None:
pytest.skip()
gdaltest.tests = [
['gjpoint', [1], ['Point 1']],
['gjline', [1], ['Line 1']],
['gjpoly', [1], ['Polygon 1']],
['gjmultipoint', [1], ['MultiPoint 1']],
['gjmultiline', [2], ['MultiLine 1']],
['gjmultipoly', [2], ['MultiPoly 1']]
]
for i in range(len(gdaltest.tests)):
test = gdaltest.tests[i]
rc = copy_shape_to_geojson(test[0])
assert rc, ('Failed making copy of ' + test[0] + '.shp')
rc = verify_geojson_copy(test[0], test[1], test[2])
assert rc, ('Verification of copy of ' + test[0] + '.shp failed')
##############################################################################
# Test translation of data/gjpoint.shp to GZip compressed GeoJSON file
def test_ogr_geojson_10():
if gdaltest.geojson_drv is None:
pytest.skip()
gdaltest.tests = [
['gjpoint', [1], ['Point 1']],
['gjline', [1], ['Line 1']],
['gjpoly', [1], ['Polygon 1']],
['gjmultipoint', [1], ['MultiPoint 1']],
['gjmultiline', [2], ['MultiLine 1']],
['gjmultipoly', [2], ['MultiPoly 1']]
]
for i in range(len(gdaltest.tests)):
test = gdaltest.tests[i]
rc = copy_shape_to_geojson(test[0], '/vsigzip/')
assert rc, ('Failed making copy of ' + test[0] + '.shp')
rc = verify_geojson_copy(test[0], test[1], test[2])
assert rc, ('Verification of copy of ' + test[0] + '.shp failed')
###############################################################################
def test_ogr_geojson_11():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/srs_name.geojson')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayerByName('srs_name')
assert lyr is not None, 'Missing layer called srs_name'
extent = (100.0, 102.0, 0.0, 1.0)
rc = validate_layer(lyr, 'srs_name', 1, ogr.wkbGeometryCollection, 0, extent)
assert rc
ref = lyr.GetSpatialRef()
pcs = int(ref.GetAuthorityCode('PROJCS'))
assert pcs == 26915, 'Spatial reference was not valid'
feature = lyr.GetNextFeature()
geometry = feature.GetGeometryRef().GetGeometryRef(0)
srs = geometry.GetSpatialReference()
pcs = int(srs.GetAuthorityCode('PROJCS'))
assert pcs == 26916, 'Spatial reference for individual geometry was not valid'
lyr = None
###############################################################################
# Test DS passed as name with standalone "Point" feature object (#3377)
def test_ogr_geojson_12():
if gdaltest.geojson_drv is None:
pytest.skip()
if os.name == 'nt':
pytest.skip()
import test_cli_utilities
if test_cli_utilities.get_ogrinfo_path() is None:
pytest.skip()
ret = gdaltest.runexternal(test_cli_utilities.get_ogrinfo_path() + ' -ro -al \'{"type": "Point","coordinates": [100.0, 0.0]}\'')
assert ret.find(' POINT (100 0)') != -1
###############################################################################
# Test writing to stdout (#3381)
def test_ogr_geojson_13():
if gdaltest.geojson_drv is None:
pytest.skip()
test = ['gjpoint', [1], ['Point 1']]
rc = copy_shape_to_geojson(test[0], '/vsistdout/')
assert rc, ('Failed making copy of ' + test[0] + '.shp')
###############################################################################
# Test reading & writing various degenerated geometries
def test_ogr_geojson_14():
if gdaltest.geojson_drv is None:
pytest.skip()
if int(gdal.VersionInfo('VERSION_NUM')) < 1800:
pytest.skip()
with gdaltest.error_handler():
ds = ogr.Open('data/ogr_geojson_14.geojson')
lyr = ds.GetLayer(0)
out_ds = gdaltest.geojson_drv.CreateDataSource('tmp/out_ogr_geojson_14.geojson')
out_lyr = out_ds.CreateLayer('lyr')
with gdaltest.error_handler():
for feat in lyr:
geom = feat.GetGeometryRef()
if geom is not None:
# print(geom)
out_feat = ogr.Feature(feature_def=out_lyr.GetLayerDefn())
out_feat.SetGeometry(geom)
out_lyr.CreateFeature(out_feat)
out_ds = None
###############################################################################
# Test Feature.ExportToJson (#3870)
def test_ogr_geojson_15():
feature_defn = ogr.FeatureDefn()
feature_defn.AddFieldDefn(ogr.FieldDefn("foo"))
field_defn = ogr.FieldDefn("boolfield", ogr.OFTInteger)
field_defn.SetSubType(ogr.OFSTBoolean)
feature_defn.AddFieldDefn(field_defn)
feature = ogr.Feature(feature_defn)
feature.SetField("foo", "bar")
feature.SetField("boolfield", True)
feature.SetFID(0)
geom = ogr.CreateGeometryFromWkt("POINT(1 2)")
feature.SetGeometry(geom)
try:
out = feature.ExportToJson()
except ImportError:
pytest.skip()
expected_out = """{"geometry": {"type": "Point", "coordinates": [1.0, 2.0]}, "type": "Feature", "properties": {"foo": "bar", "boolfield": true}, "id": 0}"""
if out != expected_out:
out_json = json.loads(out)
expected_out_json = json.loads(expected_out)
assert out_json == expected_out_json, out
out = feature.ExportToJson(as_object=True)
expected_out = {'geometry': {'type': 'Point', 'coordinates': [1.0, 2.0]}, 'type': 'Feature', 'properties': {'foo': 'bar', "boolfield": True}, 'id': 0}
assert out == expected_out
###############################################################################
# Test reading ESRI point file
def test_ogr_geojson_16():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/esripoint.json')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayerByName('esripoint')
assert lyr is not None, 'Missing layer called esripoint'
extent = (2, 2, 49, 49)
rc = validate_layer(lyr, 'esripoint', 1, ogr.wkbPoint, 4, extent)
assert rc
ref = lyr.GetSpatialRef()
gcs = int(ref.GetAuthorityCode('GEOGCS'))
assert gcs == 4326, "Spatial reference was not valid"
feature = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('POINT(2 49)')
if ogrtest.check_feature_geometry(feature, ref_geom) != 0:
feature.DumpReadable()
pytest.fail()
if feature.GetFID() != 1:
feature.DumpReadable()
pytest.fail()
if feature.GetFieldAsInteger('fooInt') != 2:
feature.DumpReadable()
pytest.fail()
if feature.GetFieldAsDouble('fooDouble') != 3.4:
feature.DumpReadable()
pytest.fail()
if feature.GetFieldAsString('fooString') != '56':
feature.DumpReadable()
pytest.fail()
lyr = None
ds = None
###############################################################################
# Test reading ESRI linestring file
def test_ogr_geojson_17():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/esrilinestring.json')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayer(0)
extent = (2, 3, 49, 50)
rc = validate_layer(lyr, None, 1, ogr.wkbLineString, 0, extent)
assert rc
feature = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('LINESTRING (2 49,3 50)')
if ogrtest.check_feature_geometry(feature, ref_geom) != 0:
feature.DumpReadable()
pytest.fail()
lyr = None
ds = None
# MultiLineString
ds = ogr.Open("""{
"geometryType": "esriGeometryPolyline",
"fields": [],
"features": [
{
"geometry": {
"paths" : [
[ [2,49],[2.1,49.1] ],
[ [3,50],[3.1,50.1] ]
]
}
}
]
}""")
lyr = ds.GetLayer(0)
feature = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('MULTILINESTRING ((2 49,2.1 49.1),(3 50,3.1 50.1))')
if ogrtest.check_feature_geometry(feature, ref_geom) != 0:
feature.DumpReadable()
pytest.fail()
###############################################################################
# Test reading ESRI polygon file
def test_ogr_geojson_18():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/esripolygon.json')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayer(0)
extent = (-3, 3, 49, 50)
rc = validate_layer(lyr, None, 1, ogr.wkbPolygon, 0, extent)
assert rc
feature = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('MULTIPOLYGON (((2 49,2 50,3 50,3 49,2 49),(2.1 49.1,2.1 49.9,2.9 49.9,2.9 49.1,2.1 49.1)),((-2 49,-2 50,-3 50,-3 49,-2 49)))')
if ogrtest.check_feature_geometry(feature, ref_geom) != 0:
feature.DumpReadable()
pytest.fail()
lyr = None
ds = None
ds = ogr.Open('data/esripolygonempty.json')
assert ds is not None, 'Failed to open datasource'
lyr = ds.GetLayer(0)
feature = lyr.GetNextFeature()
if feature.GetGeometryRef().ExportToWkt() != 'POLYGON EMPTY':
feature.DumpReadable()
pytest.fail()
lyr = None
ds = None
###############################################################################
# Test reading ESRI multipoint file
def test_ogr_geojson_19():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/esrimultipoint.json')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayer(0)
extent = (2, 3, 49, 50)
rc = validate_layer(lyr, None, 1, ogr.wkbMultiPoint, 4, extent)
assert rc
feature = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('MULTIPOINT (2 49,3 50)')
if ogrtest.check_feature_geometry(feature, ref_geom) != 0:
feature.DumpReadable()
pytest.fail()
lyr = None
ds = None
###############################################################################
# Test reading files with no extension (#4314)
def test_ogr_geojson_20():
if gdaltest.geojson_drv is None:
pytest.skip()
from glob import glob
geojson_files = glob('data/*.json')
geojson_files.extend(glob('data/*.geojson'))
for gj in geojson_files:
# create tmp file with no file extension
data = open(gj, 'rb').read()
f = gdal.VSIFOpenL('/vsimem/testgj', 'wb')
gdal.VSIFWriteL(data, 1, len(data), f)
gdal.VSIFCloseL(f)
gdal.PushErrorHandler('CPLQuietErrorHandler')
ds = ogr.Open('/vsimem/testgj')
gdal.PopErrorHandler()
if ds is None:
print(gj)
print(data.decode('LATIN1'))
pytest.fail('Failed to open datasource')
ds = None
gdal.Unlink('/vsimem/testgj')
###############################################################################
# Test reading output of geocouch spatiallist
def test_ogr_geojson_21():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open("""{"type": "FeatureCollection", "features":[
{"type": "Feature",
"geometry": {"type":"Point","coordinates":[1,2]},
"properties": {"_id":"aid", "_rev":"arev", "type":"Feature",
"properties":{"intvalue" : 2, "floatvalue" : 3.2, "strvalue" : "foo", "properties": { "foo": "bar"}}}}]}""")
assert ds is not None, 'Failed to open datasource'
lyr = ds.GetLayerByName('OGRGeoJSON')
feature = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('POINT (1 2)')
if feature.GetFieldAsString("_id") != 'aid' or \
feature.GetFieldAsString("_rev") != 'arev' or \
feature.GetFieldAsInteger("intvalue") != 2 or \
ogrtest.check_feature_geometry(feature, ref_geom) != 0:
feature.DumpReadable()
pytest.fail()
lyr = None
ds = None
###############################################################################
# Same as ogr_geojson_21 with several features
def test_ogr_geojson_22():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open("""{"type": "FeatureCollection", "features":[
{"type": "Feature",
"geometry": {"type":"Point","coordinates":[1,2]},
"properties": {"_id":"aid", "_rev":"arev", "type":"Feature",
"properties":{"intvalue" : 2, "floatvalue" : 3.2, "strvalue" : "foo"}}},
{"type": "Feature",
"geometry": {"type":"Point","coordinates":[3,4]},
"properties": {"_id":"aid2", "_rev":"arev2", "type":"Feature",
"properties":{"intvalue" : 3.5, "str2value" : "bar"}}}]}""")
assert ds is not None, 'Failed to open datasource'
lyr = ds.GetLayerByName('OGRGeoJSON')
feature = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('POINT (1 2)')
if feature.GetFieldAsString("_id") != 'aid' or \
feature.GetFieldAsString("_rev") != 'arev' or \
feature.GetFieldAsDouble("intvalue") != 2 or \
ogrtest.check_feature_geometry(feature, ref_geom) != 0:
feature.DumpReadable()
pytest.fail()
feature = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('POINT (3 4)')
if feature.GetFieldAsString("_id") != 'aid2' or \
feature.GetFieldAsString("_rev") != 'arev2' or \
feature.GetFieldAsDouble("intvalue") != 3.5 or \
feature.GetFieldAsString("str2value") != 'bar' or \
ogrtest.check_feature_geometry(feature, ref_geom) != 0:
feature.DumpReadable()
pytest.fail()
lyr = None
ds = None
###############################################################################
# Write GeoJSON with bbox and test SRS writing&reading back
def test_ogr_geojson_23():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = gdaltest.geojson_drv.CreateDataSource('/vsimem/ogr_geojson_23.json')
sr = osr.SpatialReference()
sr.ImportFromEPSG(4322)
lyr = ds.CreateLayer('foo', srs=sr, options=['WRITE_BBOX=YES'])
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetGeometry(ogr.CreateGeometryFromWkt('POINT(1 10)'))
lyr.CreateFeature(feat)
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetGeometry(ogr.CreateGeometryFromWkt('POINT(2 20)'))
lyr.CreateFeature(feat)
lyr = None
ds = None
ds = ogr.Open('/vsimem/ogr_geojson_23.json')
lyr = ds.GetLayer(0)
sr_got = lyr.GetSpatialRef()
ds = None
sr.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
assert sr_got.IsSame(sr), 'did not get expected SRS'
fp = gdal.VSIFOpenL('/vsimem/ogr_geojson_23.json', 'rb')
data = gdal.VSIFReadL(1, 10000, fp).decode('ascii')
gdal.VSIFCloseL(fp)
gdal.Unlink('/vsimem/ogr_geojson_23.json')
assert data.find('"bbox": [ 1, 10, 2, 20 ]') != -1, 'did not find global bbox'
assert data.find('"bbox": [ 1.0, 10.0, 1.0, 10.0 ]') != -1, \
'did not find first feature bbox'
###############################################################################
# Test alternate form of geojson
def test_ogr_geojson_24():
if gdaltest.geojson_drv is None:
pytest.skip()
content = """loadGeoJSON({"layerFoo": { "type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [2, 49]
},
"name": "bar"
},
"layerBar": { "type": "FeatureCollection", "features" : [ { "type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [2, 49]
},
"other_name": "baz"
}]}})"""
for i in range(2):
if i == 0:
ds = ogr.Open(content)
else:
gdal.FileFromMemBuffer('/vsimem/ogr_geojson_24.js', content)
ds = ogr.Open('/vsimem/ogr_geojson_24.js')
gdal.Unlink('/vsimem/ogr_geojson_24.js')
assert ds is not None, 'Failed to open datasource'
lyr = ds.GetLayerByName('layerFoo')
assert lyr is not None, 'cannot find layer'
feature = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('POINT (2 49)')
if feature.GetFieldAsString("name") != 'bar' or \
ogrtest.check_feature_geometry(feature, ref_geom) != 0:
feature.DumpReadable()
pytest.fail()
lyr = ds.GetLayerByName('layerBar')
assert lyr is not None, 'cannot find layer'
feature = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('POINT (2 49)')
if feature.GetFieldAsString("other_name") != 'baz' or \
ogrtest.check_feature_geometry(feature, ref_geom) != 0:
feature.DumpReadable()
pytest.fail()
ds = None
###############################################################################
# Test TopoJSON
def test_ogr_geojson_25():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/topojson1.topojson')
lyr = ds.GetLayer(0)
assert lyr.GetName() == 'a_layer'
feat = lyr.GetNextFeature()
assert ogrtest.check_feature_geometry(feat, 'LINESTRING (100 1000,110 1000,110 1100)') == 0
lyr = ds.GetLayer(1)
assert lyr.GetName() == 'TopoJSON'
expected_results = [
(None, None, 'POINT EMPTY'),
(None, None, 'POINT EMPTY'),
(None, None, 'POINT EMPTY'),
(None, None, 'POINT (100 1010)'),
(None, None, 'LINESTRING EMPTY'),
(None, None, 'LINESTRING EMPTY'),
(None, None, 'LINESTRING EMPTY'),
(None, None, 'LINESTRING EMPTY'),
(None, None, 'LINESTRING EMPTY'),
(None, None, 'LINESTRING EMPTY'),
(None, None, 'LINESTRING EMPTY'),
(None, None, 'LINESTRING EMPTY'),
(None, '0', 'LINESTRING EMPTY'),
(None, 'foo', 'LINESTRING EMPTY'),
('1', None, 'LINESTRING (100 1000,110 1000,110 1100)'),
('2', None, 'LINESTRING (110 1100,110 1000,100 1000)'),
(None, None, 'POLYGON EMPTY'),
(None, None, 'POLYGON EMPTY'),
(None, None, 'POLYGON EMPTY'),
(None, None, 'POLYGON ((100 1000,110 1000,110 1100,100 1100,100 1000),(101 1010,101 1090,109 1090,109 1010,101 1010))'),
(None, None, 'POLYGON ((110 1100,110 1000,100 1000,100 1100,110 1100),(101 1010,109 1010,109 1090,101 1090,101 1010))'),
(None, None, 'MULTIPOINT EMPTY'),
(None, None, 'MULTIPOINT EMPTY'),
(None, None, 'MULTIPOINT EMPTY'),
(None, None, 'MULTIPOINT EMPTY'),
(None, None, 'MULTIPOINT (100 1010,101 1020)'),
(None, None, 'MULTIPOLYGON EMPTY'),
(None, None, 'MULTIPOLYGON EMPTY'),
(None, None, 'MULTIPOLYGON EMPTY'),
(None, None, 'MULTIPOLYGON (((110 1100,110 1000,100 1000,100 1100,110 1100)),((101 1010,109 1010,109 1090,101 1090,101 1010)))'),
(None, None, 'MULTILINESTRING EMPTY'),
(None, None, 'MULTILINESTRING EMPTY'),
(None, None, 'MULTILINESTRING ((100 1000,110 1000,110 1100))'),
(None, None, 'MULTILINESTRING ((100 1000,110 1000,110 1100,100 1100,100 1000))'),
(None, None, 'MULTILINESTRING ((100 1000,110 1000,110 1100,100 1100,100 1000),(101 1010,101 1090,109 1090,109 1010,101 1010))'),
]
assert lyr.GetFeatureCount() == len(expected_results)
for i, exp_result in enumerate(expected_results):
feat = lyr.GetNextFeature()
if feat.GetField('id') != exp_result[0] or \
feat.GetField('name') != exp_result[1] or \
feat.GetGeometryRef().ExportToWkt() != exp_result[2]:
feat.DumpReadable()
print(exp_result)
print(feat.GetField('name'))
pytest.fail('failure at feat index %d' % i)
ds = None
ds = ogr.Open('data/topojson2.topojson')
lyr = ds.GetLayer(0)
assert lyr.GetName() == 'a_layer'
feat = lyr.GetNextFeature()
assert ogrtest.check_feature_geometry(feat, 'LINESTRING (100 1000,110 1000,110 1100)') == 0
lyr = ds.GetLayer(1)
assert lyr.GetName() == 'TopoJSON'
feat = lyr.GetNextFeature()
assert ogrtest.check_feature_geometry(feat, 'LINESTRING (100 1000,110 1000,110 1100)') == 0
ds = None
ds = ogr.Open('data/topojson3.topojson')
lyr = ds.GetLayer(0)
assert lyr.GetName() == 'a_layer'
feat = lyr.GetNextFeature()
assert ogrtest.check_feature_geometry(feat, 'LINESTRING (0 0,10 0,0 10,10 0,0 0)') == 0
lyr = ds.GetLayer(1)
assert lyr.GetName() == 'TopoJSON'
feat = lyr.GetNextFeature()
assert ogrtest.check_feature_geometry(feat, 'LINESTRING (0 0,10 0,0 10,10 0,0 0)') == 0
ds = None
###############################################################################
# Test 64bit support
def test_ogr_geojson_26():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open("""{"type": "FeatureCollection", "features":[
{"type": "Feature", "id": 1,
"geometry": {"type":"Point","coordinates":[1,2]},
"properties": { "intvalue" : 1, "int64" : 1234567890123, "intlist" : [1] }},
{"type": "Feature", "id": 1234567890123,
"geometry": {"type":"Point","coordinates":[3,4]},
"properties": { "intvalue" : 1234567890123, "intlist" : [1, 1234567890123] }},
]}""")
assert ds is not None, 'Failed to open datasource'
lyr = ds.GetLayerByName('OGRGeoJSON')
assert lyr.GetMetadataItem(ogr.OLMD_FID64) is not None
feature = lyr.GetNextFeature()
if feature.GetFID() != 1:
feature.DumpReadable()
pytest.fail()
if feature.GetField("intvalue") != 1:
feature.DumpReadable()
pytest.fail()
if feature.GetField("int64") != 1234567890123:
feature.DumpReadable()
pytest.fail()
feature = lyr.GetNextFeature()
if feature.GetFID() != 1234567890123:
feature.DumpReadable()
pytest.fail()
if feature.GetField("intvalue") != 1234567890123:
feature.DumpReadable()
pytest.fail()
if feature.GetField("intlist") != [1, 1234567890123]:
feature.DumpReadable()
pytest.fail()
lyr = None
ds = None
ds = gdaltest.geojson_drv.CreateDataSource('/vsimem/ogr_geojson_26.json')
lyr = ds.CreateLayer('test')
lyr.CreateField(ogr.FieldDefn('int64', ogr.OFTInteger64))
lyr.CreateField(ogr.FieldDefn('int64list', ogr.OFTInteger64List))
f = ogr.Feature(lyr.GetLayerDefn())
f.SetFID(1234567890123)
f.SetField(0, 1234567890123)
f.SetFieldInteger64List(1, [1234567890123])
lyr.CreateFeature(f)
f = None
ds = None
fp = gdal.VSIFOpenL('/vsimem/ogr_geojson_26.json', 'rb')
data = gdal.VSIFReadL(1, 10000, fp).decode('ascii')
gdal.VSIFCloseL(fp)
gdal.Unlink('/vsimem/ogr_geojson_26.json')
assert '{ "type": "Feature", "id": 1234567890123, "properties": { "int64": 1234567890123, "int64list": [ 1234567890123 ] }, "geometry": null }' in data
###############################################################################
# Test workaround for 64bit values (returned as strings)
def test_ogr_geojson_27():
if gdaltest.geojson_drv is None:
pytest.skip()
gdal.PushErrorHandler('CPLQuietErrorHandler')
# Warning 1: Integer values probably ranging out of 64bit integer range
# have been found. Will be clamped to INT64_MIN/INT64_MAX
ds = ogr.Open("""{"type": "FeatureCollection", "features":[
{"type": "Feature",
"geometry": {"type":"Point","coordinates":[1,2]},
"properties": { "intvalue" : 1 }},
{"type": "Feature",
"geometry": {"type":"Point","coordinates":[3,4]},
"properties": { "intvalue" : 12345678901231234567890123 }},
]}""")
gdal.PopErrorHandler()
assert ds is not None, 'Failed to open datasource'
lyr = ds.GetLayerByName('OGRGeoJSON')
feature = lyr.GetNextFeature()
if feature.GetField("intvalue") != 1:
feature.DumpReadable()
pytest.fail()
feature = lyr.GetNextFeature()
if feature.GetField("intvalue") != 9223372036854775807:
feature.DumpReadable()
pytest.fail()
lyr = None
ds = None
###############################################################################
# Test reading ESRI point file with z value
def test_ogr_geojson_28():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/esrizpoint.json')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayer(0)
# validate layer doesn't check z, but put it in
extent = (2, 2, 49, 49, 1, 1)
rc = validate_layer(lyr, None, 1, ogr.wkbPoint, 4, extent)
assert rc
ref = lyr.GetSpatialRef()
gcs = int(ref.GetAuthorityCode('GEOGCS'))
assert gcs == 4326, "Spatial reference was not valid"
feature = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('POINT(2 49 1)')
if ogrtest.check_feature_geometry(feature, ref_geom) != 0:
feature.DumpReadable()
pytest.fail()
if feature.GetFID() != 1:
feature.DumpReadable()
pytest.fail()
if feature.GetFieldAsInteger('fooInt') != 2:
feature.DumpReadable()
pytest.fail()
if feature.GetFieldAsDouble('fooDouble') != 3.4:
feature.DumpReadable()
pytest.fail()
if feature.GetFieldAsString('fooString') != '56':
feature.DumpReadable()
pytest.fail()
lyr = None
ds = None
###############################################################################
# Test reading ESRI linestring file with z
def test_ogr_geojson_29():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/esrizlinestring.json')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayer(0)
# validate layer doesn't check z, but put it in
extent = (2, 3, 49, 50, 1, 2)
rc = validate_layer(lyr, None, 1, ogr.wkbLineString, 0, extent)
assert rc
feature = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('LINESTRING (2 49 1,3 50 2)')
if ogrtest.check_feature_geometry(feature, ref_geom) != 0:
feature.DumpReadable()
pytest.fail()
lyr = None
ds = None
###############################################################################
# Test reading ESRI multipoint file with z
def test_ogr_geojson_30():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/esrizmultipoint.json')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayer(0)
# validate layer doesn't check z, but put it in
extent = (2, 3, 49, 50, 1, 2)
rc = validate_layer(lyr, None, 1, ogr.wkbMultiPoint, 4, extent)
assert rc
feature = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('MULTIPOINT (2 49 1,3 50 2)')
if ogrtest.check_feature_geometry(feature, ref_geom) != 0:
feature.DumpReadable()
pytest.fail()
lyr = None
ds = None
###############################################################################
# Test reading ESRI polygon file with z
def test_ogr_geojson_31():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/esrizpolygon.json')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayer(0)
# validate layer doesn't check z, but put it in
extent = (2, 3, 49, 50, 1, 4)
rc = validate_layer(lyr, None, 1, ogr.wkbPolygon, 0, extent)
assert rc
feature = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('POLYGON ((2 49 1,2 50 2,3 50 3,3 49 4,2 49 1))')
if ogrtest.check_feature_geometry(feature, ref_geom) != 0:
feature.DumpReadable()
pytest.fail()
lyr = None
ds = None
###############################################################################
# Test reading ESRI multipoint file with m, but no z (hasM=true, hasZ omitted)
def test_ogr_geojson_32():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/esrihasmnozmultipoint.json')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayer(0)
extent = (2, 3, 49, 50)
rc = validate_layer(lyr, None, 1, ogr.wkbMultiPoint, 4, extent)
assert rc
feature = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('MULTIPOINT M ((2 49 1),(3 50 2))')
if ogrtest.check_feature_geometry(feature, ref_geom) != 0:
feature.DumpReadable()
pytest.fail()
lyr = None
ds = None
###############################################################################
# Test reading ESRI multipoint file with hasZ=true, but only 2 components.
def test_ogr_geojson_33():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/esriinvalidhaszmultipoint.json')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayer(0)
extent = (2, 3, 49, 50)
rc = validate_layer(lyr, None, 1, ogr.wkbMultiPoint, 4, extent)
assert rc
feature = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('MULTIPOINT (2 49,3 50)')
if ogrtest.check_feature_geometry(feature, ref_geom) != 0:
feature.DumpReadable()
pytest.fail()
lyr = None
ds = None
###############################################################################
# Test reading ESRI multipoint file with z and m
def test_ogr_geojson_34():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/esrizmmultipoint.json')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayer(0)
extent = (2, 3, 49, 50)
rc = validate_layer(lyr, None, 1, ogr.wkbMultiPoint, 4, extent)
assert rc
feature = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('MULTIPOINT ZM ((2 49 1 100),(3 50 2 100))')
if ogrtest.check_feature_geometry(feature, ref_geom) != 0:
feature.DumpReadable()
pytest.fail()
lyr = None
ds = None
###############################################################################
# Test handling of huge coordinates (#5377)
def test_ogr_geojson_35():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = gdaltest.geojson_drv.CreateDataSource('/vsimem/ogr_geojson_35.json')
lyr = ds.CreateLayer('foo')
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetFID(1)
geom = ogr.Geometry(ogr.wkbPoint)
geom.AddPoint_2D(-1.79769313486231571e+308, -1.79769313486231571e+308)
feat.SetGeometry(geom)
lyr.CreateFeature(feat)
gdal.PushErrorHandler('CPLQuietErrorHandler')
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetFID(2)
geom = ogr.Geometry(ogr.wkbPoint)
geom.AddPoint(-1.7e308 * 2, 1.7e308 * 2, 1.7e308 * 2) # evaluates to -inf, inf
feat.SetGeometry(geom)
lyr.CreateFeature(feat)
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetFID(3)
geom = ogr.Geometry(ogr.wkbLineString)
geom.AddPoint_2D(0, 0)
geom.AddPoint_2D(-1.7e308 * 2, 1.7e308 * 2) # evaluates to -inf, inf
feat.SetGeometry(geom)
lyr.CreateFeature(feat)
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetFID(4)
geom = ogr.Geometry(ogr.wkbPolygon)
geom2 = ogr.Geometry(ogr.wkbLinearRing)
geom2.AddPoint_2D(0, 0)
geom2.AddPoint_2D(-1.7e308 * 2, 1.7e308 * 2) # evaluates to -inf, inf
geom.AddGeometry(geom2)
feat.SetGeometry(geom)
lyr.CreateFeature(feat)
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetFID(5)
geom = ogr.Geometry(ogr.wkbMultiPoint)
geom2 = ogr.Geometry(ogr.wkbPoint)
geom2.AddPoint_2D(0, 0)
geom2 = ogr.Geometry(ogr.wkbPoint)
geom2.AddPoint_2D(-1.7e308 * 2, 1.7e308 * 2) # evaluates to -inf, inf
geom.AddGeometry(geom2)
feat.SetGeometry(geom)
lyr.CreateFeature(feat)
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetFID(6)
geom = ogr.Geometry(ogr.wkbMultiLineString)
geom2 = ogr.Geometry(ogr.wkbLineString)
geom2.AddPoint_2D(0, 0)
geom2 = ogr.Geometry(ogr.wkbLineString)
geom2.AddPoint_2D(-1.7e308 * 2, 1.7e308 * 2) # evaluates to -inf, inf
geom.AddGeometry(geom2)
feat.SetGeometry(geom)
lyr.CreateFeature(feat)
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetFID(7)
geom = ogr.Geometry(ogr.wkbMultiPolygon)
geom2 = ogr.Geometry(ogr.wkbPolygon)
geom3 = ogr.Geometry(ogr.wkbLinearRing)
geom3.AddPoint_2D(0, 0)
geom2.AddGeometry(geom3)
geom2 = ogr.Geometry(ogr.wkbPolygon)
geom3 = ogr.Geometry(ogr.wkbLinearRing)
geom3.AddPoint_2D(-1.7e308 * 2, 1.7e308 * 2) # evaluates to -inf, inf
geom2.AddGeometry(geom3)
geom.AddGeometry(geom2)
feat.SetGeometry(geom)
lyr.CreateFeature(feat)
gdal.PopErrorHandler()
ds = None
fp = gdal.VSIFOpenL('/vsimem/ogr_geojson_35.json', 'rb')
data = gdal.VSIFReadL(1, 10000, fp).decode('ascii')
gdal.VSIFCloseL(fp)
gdal.Unlink('/vsimem/ogr_geojson_35.json')
assert not (data.find('-1.79') == -1 and data.find('e+308') == -1)
for ident in range(2, 8):
assert data.find('{ "type": "Feature", "id": %d, "properties": { }, "geometry": null }' % ident) != -1
###############################################################################
# Test reading file with UTF-8 BOM (which is supposed to be illegal in JSON...) (#5630)
def test_ogr_geojson_36():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/point_with_utf8bom.json')
assert ds is not None, 'Failed to open datasource'
ds = None
#########################################################################
# Test boolean type support
def test_ogr_geojson_37():
if gdaltest.geojson_drv is None:
pytest.skip()
# Test read support
ds = ogr.Open("""{"type": "FeatureCollection","features": [
{ "type": "Feature", "properties": { "bool" : false, "not_bool": false, "bool_list" : [false, true], "notbool_list" : [false, 3]}, "geometry": null },
{ "type": "Feature", "properties": { "bool" : true, "not_bool": 2, "bool_list" : [true] }, "geometry": null },
] }""")
lyr = ds.GetLayer(0)
feat_defn = lyr.GetLayerDefn()
assert (feat_defn.GetFieldDefn(feat_defn.GetFieldIndex('bool')).GetType() == ogr.OFTInteger and \
feat_defn.GetFieldDefn(feat_defn.GetFieldIndex('bool')).GetSubType() == ogr.OFSTBoolean)
assert feat_defn.GetFieldDefn(feat_defn.GetFieldIndex('not_bool')).GetSubType() == ogr.OFSTNone
assert (feat_defn.GetFieldDefn(feat_defn.GetFieldIndex('bool_list')).GetType() == ogr.OFTIntegerList and \
feat_defn.GetFieldDefn(feat_defn.GetFieldIndex('bool_list')).GetSubType() == ogr.OFSTBoolean)
assert feat_defn.GetFieldDefn(feat_defn.GetFieldIndex('notbool_list')).GetSubType() == ogr.OFSTNone
f = lyr.GetNextFeature()
if f.GetField('bool') != 0 or f.GetField('bool_list') != [0, 1]:
f.DumpReadable()
pytest.fail()
out_ds = ogr.GetDriverByName('GeoJSON').CreateDataSource('/vsimem/ogr_geojson_37.json')
out_lyr = out_ds.CreateLayer('test')
for i in range(feat_defn.GetFieldCount()):
out_lyr.CreateField(feat_defn.GetFieldDefn(i))
out_f = ogr.Feature(out_lyr.GetLayerDefn())
out_f.SetFrom(f)
out_lyr.CreateFeature(out_f)
out_ds = None
fp = gdal.VSIFOpenL('/vsimem/ogr_geojson_37.json', 'rb')
data = gdal.VSIFReadL(1, 10000, fp).decode('ascii')
gdal.VSIFCloseL(fp)
gdal.Unlink('/vsimem/ogr_geojson_37.json')
assert '"bool": false, "not_bool": 0, "bool_list": [ false, true ], "notbool_list": [ 0, 3 ]' in data
###############################################################################
# Test datetime/date/time type support
def test_ogr_geojson_38():
if gdaltest.geojson_drv is None:
pytest.skip()
# Test read support
ds = ogr.Open("""{"type": "FeatureCollection", "features": [
{ "type": "Feature", "properties": { "dt": "2014-11-20 12:34:56+0100", "dt2": "2014\/11\/20", "date":"2014\/11\/20", "time":"12:34:56", "no_dt": "2014-11-20 12:34:56+0100", "no_dt2": "2014-11-20 12:34:56+0100" }, "geometry": null },
{ "type": "Feature", "properties": { "dt": "2014\/11\/20", "dt2": "2014\/11\/20T12:34:56Z", "date":"2014-11-20", "time":"12:34:56", "no_dt": "foo", "no_dt2": 1 }, "geometry": null }
] }""")
lyr = ds.GetLayer(0)
feat_defn = lyr.GetLayerDefn()
assert feat_defn.GetFieldDefn(feat_defn.GetFieldIndex('dt')).GetType() == ogr.OFTDateTime
assert feat_defn.GetFieldDefn(feat_defn.GetFieldIndex('dt2')).GetType() == ogr.OFTDateTime
assert feat_defn.GetFieldDefn(feat_defn.GetFieldIndex('date')).GetType() == ogr.OFTDate
assert feat_defn.GetFieldDefn(feat_defn.GetFieldIndex('time')).GetType() == ogr.OFTTime
assert feat_defn.GetFieldDefn(feat_defn.GetFieldIndex('no_dt')).GetType() == ogr.OFTString
assert feat_defn.GetFieldDefn(feat_defn.GetFieldIndex('no_dt2')).GetType() == ogr.OFTString
f = lyr.GetNextFeature()
if f.GetField('dt') != '2014/11/20 12:34:56+01' or f.GetField('dt2') != '2014/11/20 00:00:00' or \
f.GetField('date') != '2014/11/20' or f.GetField('time') != '12:34:56':
f.DumpReadable()
pytest.fail()
f = lyr.GetNextFeature()
if f.GetField('dt') != '2014/11/20 00:00:00' or f.GetField('dt2') != '2014/11/20 12:34:56+00' or \
f.GetField('date') != '2014/11/20' or f.GetField('time') != '12:34:56':
f.DumpReadable()
pytest.fail()
###############################################################################
# Test id top-object level
def test_ogr_geojson_39():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open("""{"type": "FeatureCollection", "features": [
{ "type": "Feature", "id" : "foo", "properties": { "bar" : "baz" }, "geometry": null },
] }""")
lyr = ds.GetLayer(0)
feat_defn = lyr.GetLayerDefn()
assert feat_defn.GetFieldDefn(0).GetName() == 'id' and feat_defn.GetFieldDefn(0).GetType() == ogr.OFTString
feat = lyr.GetNextFeature()
if feat.GetField('id') != 'foo' or feat.GetField('bar') != 'baz':
feat.DumpReadable()
pytest.fail()
# Crazy case: properties.id has the precedence because we arbitrarily decided that...
ds = ogr.Open("""{"type": "FeatureCollection", "features": [
{ "type": "Feature", "id" : "foo", "properties": { "id" : 6 }, "geometry": null },
] }""")
lyr = ds.GetLayer(0)
feat_defn = lyr.GetLayerDefn()
assert feat_defn.GetFieldDefn(0).GetName() == 'id' and feat_defn.GetFieldDefn(0).GetType() == ogr.OFTInteger
feat = lyr.GetNextFeature()
if feat.GetField('id') != 6:
feat.DumpReadable()
pytest.fail()
# Same with 2 features
ds = ogr.Open("""{"type": "FeatureCollection", "features": [
{ "type": "Feature", "id" : "foo", "properties": { "id" : 6 }, "geometry": null },
{ "type": "Feature", "id" : "bar", "properties": { "id" : 7 }, "geometry": null }
] }""")
lyr = ds.GetLayer(0)
feat_defn = lyr.GetLayerDefn()
assert feat_defn.GetFieldDefn(0).GetName() == 'id' and feat_defn.GetFieldDefn(0).GetType() == ogr.OFTInteger
feat = lyr.GetNextFeature()
if feat.GetField('id') != 6:
feat.DumpReadable()
pytest.fail()
# Crazy case: properties.id has the precedence because we arbitrarily decided that...
ds = ogr.Open("""{"type": "FeatureCollection", "features": [
{ "type": "Feature", "id" : "foo", "properties": { "id" : "baz" }, "geometry": null },
] }""")
lyr = ds.GetLayer(0)
feat_defn = lyr.GetLayerDefn()
assert feat_defn.GetFieldDefn(0).GetName() == 'id' and feat_defn.GetFieldDefn(0).GetType() == ogr.OFTString
feat = lyr.GetNextFeature()
if feat.GetField('id') != 'baz':
feat.DumpReadable()
pytest.fail()
# id and properties.ID (#6538)
ds = ogr.Open("""{"type": "FeatureCollection", "features": [
{ "type": "Feature", "id" : 1, "properties": { "ID": 2 }, "geometry": null },
] }""")
lyr = ds.GetLayer(0)
feat_defn = lyr.GetLayerDefn()
assert feat_defn.GetFieldDefn(0).GetName() == 'ID' and feat_defn.GetFieldDefn(0).GetType() == ogr.OFTInteger
feat = lyr.GetNextFeature()
if feat.GetFID() != 1 or feat.GetField('ID') != 2:
feat.DumpReadable()
pytest.fail()
# Test handling of duplicated id
gdal.ErrorReset()
with gdaltest.error_handler():
ds = ogr.Open("""{"type": "FeatureCollection", "features": [
{ "type": "Feature", "id" : 1, "properties": { "foo": "bar" }, "geometry": null },
{ "type": "Feature", "id" : 1, "properties": { "foo": "baz" }, "geometry": null },
{ "type": "Feature", "id" : 2, "properties": { "foo": "baw" }, "geometry": null }
] }""")
assert gdal.GetLastErrorMsg() != '', 'expected warning'
lyr = ds.GetLayer(0)
feat_defn = lyr.GetLayerDefn()
feat = lyr.GetNextFeature()
if feat.GetFID() != 1 or feat.GetField('foo') != 'bar':
feat.DumpReadable()
pytest.fail()
feat = lyr.GetNextFeature()
if feat.GetFID() != 2 or feat.GetField('foo') != 'baz':
feat.DumpReadable()
pytest.fail()
feat = lyr.GetNextFeature()
if feat.GetFID() != 3 or feat.GetField('foo') != 'baw':
feat.DumpReadable()
pytest.fail()
# negative id
ds = ogr.Open("""{"type": "FeatureCollection", "features": [
{ "type": "Feature", "id" : -1, "properties": { "foo": "bar" }, "geometry": null },
] }""")
lyr = ds.GetLayer(0)
feat_defn = lyr.GetLayerDefn()
assert feat_defn.GetFieldDefn(0).GetName() == 'id' and feat_defn.GetFieldDefn(0).GetType() == ogr.OFTInteger
feat = lyr.GetNextFeature()
if feat.GetField('id') != -1:
feat.DumpReadable()
pytest.fail()
# negative id 64bit
ds = ogr.Open("""{"type": "FeatureCollection", "features": [
{ "type": "Feature", "id" : -1234567890123, "properties": { "foo": "bar" }, "geometry": null },
{ "type": "Feature", "id" : -2, "properties": { "foo": "baz" }, "geometry": null },
] }""")
lyr = ds.GetLayer(0)
feat_defn = lyr.GetLayerDefn()
assert feat_defn.GetFieldDefn(0).GetName() == 'id' and feat_defn.GetFieldDefn(0).GetType() == ogr.OFTInteger64
feat = lyr.GetNextFeature()
if feat.GetField('id') != -1234567890123:
feat.DumpReadable()
pytest.fail()
# negative id
ds = ogr.Open("""{"type": "FeatureCollection", "features": [
{ "type": "Feature", "id" : -2, "properties": { "foo": "baz" }, "geometry": null },
{ "type": "Feature", "id" : -1234567890123, "properties": { "foo": "bar" }, "geometry": null },
] }""")
lyr = ds.GetLayer(0)
feat_defn = lyr.GetLayerDefn()
assert feat_defn.GetFieldDefn(0).GetName() == 'id' and feat_defn.GetFieldDefn(0).GetType() == ogr.OFTInteger64
feat = lyr.GetNextFeature()
if feat.GetField('id') != -2:
feat.DumpReadable()
pytest.fail()
# positive and then negative id
ds = ogr.Open("""{"type": "FeatureCollection", "features": [
{ "type": "Feature", "id" : 1, "properties": { "foo": "baz" }, "geometry": null },
{ "type": "Feature", "id" : -1, "properties": { "foo": "bar" }, "geometry": null },
] }""")
lyr = ds.GetLayer(0)
feat_defn = lyr.GetLayerDefn()
assert feat_defn.GetFieldDefn(1).GetName() == 'id' and feat_defn.GetFieldDefn(1).GetType() == ogr.OFTInteger
feat = lyr.GetNextFeature()
if feat.GetField('id') != 1:
feat.DumpReadable()
pytest.fail()
# mix of int and string id
ds = ogr.Open("""{"type": "FeatureCollection", "features": [
{ "type": "Feature", "id" : -2, "properties": { "foo": "baz" }, "geometry": null },
{ "type": "Feature", "id" : "str", "properties": { "foo": "bar" }, "geometry": null },
{ "type": "Feature", "id" : -3, "properties": { "foo": "baz" }, "geometry": null },
] }""")
lyr = ds.GetLayer(0)
feat_defn = lyr.GetLayerDefn()
assert feat_defn.GetFieldDefn(0).GetName() == 'id' and feat_defn.GetFieldDefn(0).GetType() == ogr.OFTString
feat = lyr.GetNextFeature()
if feat.GetField('id') != '-2':
feat.DumpReadable()
pytest.fail()
###############################################################################
# Test nested attributes
def test_ogr_geojson_40():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = gdal.OpenEx("""{
"type": "FeatureCollection",
"features" :
[
{
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [ 2, 49 ]
},
"properties": {
"a_property": 1,
"some_object": {
"a_property": 1,
"another_property": 2
}
}
},
{
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [ 2, 49 ]
},
"properties": {
"a_property": "foo",
"some_object": {
"a_property": 1,
"another_property": 2.34
}
}
}
]
}""", gdal.OF_VECTOR, open_options=['FLATTEN_NESTED_ATTRIBUTES=YES', 'NESTED_ATTRIBUTE_SEPARATOR=.'])
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
feat = lyr.GetNextFeature()
if feat.GetField('a_property') != 'foo' or feat.GetField('some_object.a_property') != 1 or \
feat.GetField('some_object.another_property') != 2.34:
feat.DumpReadable()
pytest.fail()
###############################################################################
# Test ogr.CreateGeometryFromJson()
def test_ogr_geojson_41():
if gdaltest.geojson_drv is None:
pytest.skip()
# Check that by default we return a WGS 84 SRS
g = ogr.CreateGeometryFromJson("{ 'type': 'Point', 'coordinates' : [ 2, 49] }")
assert g.ExportToWkt() == 'POINT (2 49)'
srs = g.GetSpatialReference()
g = None
assert srs.ExportToWkt().find('WGS 84') >= 0
# But if a crs object is set (allowed originally, but not recommended!), we use it
g = ogr.CreateGeometryFromJson('{ "type": "Point", "coordinates" : [ 2, 49], "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::4322" } } }')
srs = g.GetSpatialReference()
assert srs.ExportToWkt().find('4322') >= 0
# But if a crs object is set to null, set no crs
g = ogr.CreateGeometryFromJson('{ "type": "Point", "coordinates" : [ 2, 49], "crs": null }')
srs = g.GetSpatialReference()
assert not srs
###############################################################################
# Test ESRI FeatureService scrolling
def test_ogr_geojson_42():
if gdaltest.geojson_drv is None:
pytest.skip()
gdal.SetConfigOption('CPL_CURL_ENABLE_VSIMEM', 'YES')
resultOffset0 = """
{ "type":"FeatureCollection",
"properties" : {
"exceededTransferLimit" : true
},
"features" :
[
{
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [ 2, 49 ]
},
"properties": {
"id": 1,
"a_property": 1,
}
} ] }"""
gdal.FileFromMemBuffer('/vsimem/geojson/test.json?resultRecordCount=1', resultOffset0)
ds = ogr.Open('/vsimem/geojson/test.json?resultRecordCount=1')
lyr = ds.GetLayer(0)
f = lyr.GetNextFeature()
assert f is not None and f.GetFID() == 1
f = lyr.GetNextFeature()
assert f is None
ds = None
gdal.Unlink('/vsimem/geojson/test.json?resultRecordCount=1')
gdal.FileFromMemBuffer('/vsimem/geojson/test.json?resultRecordCount=10', resultOffset0)
gdal.PushErrorHandler()
ds = ogr.Open('/vsimem/geojson/test.json?resultRecordCount=10')
gdal.PopErrorHandler()
lyr = ds.GetLayer(0)
f = lyr.GetNextFeature()
assert f is not None and f.GetFID() == 1
f = lyr.GetNextFeature()
assert f is None
ds = None
gdal.Unlink('/vsimem/geojson/test.json?resultRecordCount=10')
gdal.FileFromMemBuffer('/vsimem/geojson/test.json?', resultOffset0)
gdal.FileFromMemBuffer('/vsimem/geojson/test.json?resultRecordCount=1&resultOffset=0', resultOffset0)
ds = ogr.Open('/vsimem/geojson/test.json?')
lyr = ds.GetLayer(0)
f = lyr.GetNextFeature()
assert f is not None and f.GetFID() == 1
f = lyr.GetNextFeature()
assert f is None
lyr.ResetReading()
f = lyr.GetNextFeature()
assert f is not None and f.GetFID() == 1
resultOffset1 = """
{ "type":"FeatureCollection",
"features" :
[
{
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [ 2, 49 ]
},
"properties": {
"id": 2,
"a_property": 1,
}
} ] }"""
gdal.FileFromMemBuffer('/vsimem/geojson/test.json?resultRecordCount=1&resultOffset=1', resultOffset1)
f = lyr.GetNextFeature()
assert f is not None and f.GetFID() == 2
f = lyr.GetNextFeature()
assert f is None
gdal.PushErrorHandler()
fc = lyr.GetFeatureCount()
gdal.PopErrorHandler()
assert fc == 2
gdal.FileFromMemBuffer('/vsimem/geojson/test.json?resultRecordCount=1&returnCountOnly=true',
"""{ "count": 123456}""")
fc = lyr.GetFeatureCount()
assert fc == 123456
gdal.PushErrorHandler()
extent = lyr.GetExtent()
gdal.PopErrorHandler()
assert extent == (2, 2, 49, 49)
gdal.FileFromMemBuffer('/vsimem/geojson/test.json?resultRecordCount=1&returnExtentOnly=true&f=geojson',
"""{"type":"FeatureCollection","bbox":[1, 2, 3, 4],"features":[]}""")
extent = lyr.GetExtent()
assert extent == (1.0, 3.0, 2.0, 4.0)
assert lyr.TestCapability(ogr.OLCFastFeatureCount) == 1
assert lyr.TestCapability(ogr.OLCFastGetExtent) == 0
assert lyr.TestCapability('foo') == 0
# Test scrolling with ESRI json
resultOffset0 = """
{
"objectIdFieldName" : "objectid",
"geometryType" : "esriGeometryPoint",
"fields" : [
{
"name" : "objectid",
"alias" : "Object ID",
"type" : "esriFieldTypeOID"
},
],
"features" : [
{
"geometry" : {
"x" : 2,
"y" : 49,
"z" : 1
},
"attributes" : {
"objectid" : 1
}
}
],
"exceededTransferLimit": true
}
"""
resultOffset1 = """
{
"objectIdFieldName" : "objectid",
"geometryType" : "esriGeometryPoint",
"fields" : [
{
"name" : "objectid",
"alias" : "Object ID",
"type" : "esriFieldTypeOID"
},
],
"features" : [
{
"geometry": null,
"attributes" : {
"objectid" : 20
}
}
]
}
"""
gdal.FileFromMemBuffer('/vsimem/geojson/test.json?resultRecordCount=1', resultOffset0)
gdal.FileFromMemBuffer('/vsimem/geojson/test.json?resultRecordCount=1&resultOffset=1', resultOffset1)
ds = ogr.Open('/vsimem/geojson/test.json?resultRecordCount=1')
lyr = ds.GetLayer(0)
f = lyr.GetNextFeature()
assert f is not None and f.GetFID() == 1
f = lyr.GetNextFeature()
assert f is not None and f.GetFID() == 20
ds = None
gdal.Unlink('/vsimem/geojson/test.json?resultRecordCount=1')
gdal.Unlink('/vsimem/geojson/test.json?resultRecordCount=1&resultOffset=1')
###############################################################################
# Test Feature without geometry
def test_ogr_geojson_43():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open("""{"type": "FeatureCollection", "features":[
{"type": "Feature", "properties": {"foo": "bar"}}]}""")
assert ds is not None, 'Failed to open datasource'
lyr = ds.GetLayerByName('OGRGeoJSON')
feature = lyr.GetNextFeature()
if feature.GetFieldAsString("foo") != 'bar':
feature.DumpReadable()
pytest.fail()
lyr = None
ds = None
###############################################################################
# Test null Feature (#6166)
def test_ogr_geojson_44():
if gdaltest.geojson_drv is None:
pytest.skip()
with gdaltest.error_handler():
ogr.Open("""{"type": "FeatureCollection", "features":[ null ]}""")
###############################################################################
# Test native data support
def test_ogr_geojson_45():
if gdaltest.geojson_drv is None:
pytest.skip()
# Test read support
content = """{"type": "FeatureCollection", "foo": "bar", "bar": "baz",
"features":[ { "type": "Feature", "foo": ["bar", "baz", 1.0, true, false,[],{}], "properties": { "myprop": "myvalue" }, "geometry": null } ]}"""
for i in range(2):
if i == 0:
ds = gdal.OpenEx(content, gdal.OF_VECTOR, open_options=['NATIVE_DATA=YES'])
else:
gdal.FileFromMemBuffer('/vsimem/ogr_geojson_45.json', content)
ds = gdal.OpenEx('/vsimem/ogr_geojson_45.json', gdal.OF_VECTOR, open_options=['NATIVE_DATA=YES'])
lyr = ds.GetLayer(0)
native_data = lyr.GetMetadataItem("NATIVE_DATA", "NATIVE_DATA")
assert native_data == '{ "foo": "bar", "bar": "baz" }'
native_media_type = lyr.GetMetadataItem("NATIVE_MEDIA_TYPE", "NATIVE_DATA")
assert native_media_type == 'application/vnd.geo+json'
f = lyr.GetNextFeature()
native_data = f.GetNativeData()
if i == 0:
expected = [
'{ "type": "Feature", "foo": [ "bar", "baz", 1.000000, true, false, [ ], { } ], "properties": { "myprop": "myvalue" }, "geometry": null }',
'{ "type": "Feature", "foo": [ "bar", "baz", 1.0, true, false, [ ], { } ], "properties": { "myprop": "myvalue" }, "geometry": null }']
else:
expected = ['{"type":"Feature","foo":["bar","baz",1.0,true,false,[],{}],"properties":{"myprop":"myvalue"},"geometry":null}']
assert native_data in expected
native_media_type = f.GetNativeMediaType()
assert native_media_type == 'application/vnd.geo+json'
ds = None
if i == 1:
gdal.Unlink('/vsimem/ogr_geojson_45.json')
ds = ogr.GetDriverByName('GeoJSON').CreateDataSource('/vsimem/ogr_geojson_45.json')
lyr = ds.CreateLayer('test', options=[
'NATIVE_DATA={ "type": "ignored", "bbox": [ 0, 0, 0, 0 ], "foo": "bar", "bar": "baz", "features": "ignored" }',
'NATIVE_MEDIA_TYPE=application/vnd.geo+json'])
f = ogr.Feature(lyr.GetLayerDefn())
json_geom = """{ "type": "GeometryCollection", "foo_gc": "bar_gc", "geometries" : [
{ "type": "Point", "foo_point": "bar_point", "coordinates": [0,1,2, 3] },
{ "type": "LineString", "foo_linestring": "bar_linestring", "coordinates": [[0,1,2, 4]] },
{ "type": "MultiPoint", "foo_multipoint": "bar_multipoint", "coordinates": [[0,1,2, 5]] },
{ "type": "MultiLineString", "foo_multilinestring": "bar_multilinestring", "coordinates": [[[0,1,2, 6]]] },
{ "type": "Polygon", "foo_polygon": "bar_polygon", "coordinates": [[[0,1,2, 7]]] },
{ "type": "MultiPolygon", "foo_multipolygon": "bar_multipolygon", "coordinates": [[[[0,1,2, 8]]]] }
] }"""
f.SetNativeData('{ "type": "ignored", "bbox": "ignored", "properties" : "ignored", "foo_feature": "bar_feature", "geometry": %s }' % json_geom)
f.SetNativeMediaType('application/vnd.geo+json')
f.SetGeometry(ogr.CreateGeometryFromJson(json_geom))
lyr.CreateFeature(f)
ds = None
fp = gdal.VSIFOpenL('/vsimem/ogr_geojson_45.json', 'rb')
data = gdal.VSIFReadL(1, 10000, fp).decode('ascii')
gdal.VSIFCloseL(fp)
gdal.Unlink('/vsimem/ogr_geojson_45.json')
assert ('"bbox": [ 0, 1, 2, 0, 1, 2 ],' in data and \
'"foo": "bar"' in data and '"bar": "baz"' in data and \
'"foo_feature": "bar_feature"' in data and \
'"foo_gc": "bar_gc"' in data and \
'"foo_point": "bar_point"' in data and '3' in data and \
'"foo_linestring": "bar_linestring"' in data and '4' in data and \
'"foo_multipoint": "bar_multipoint"' in data and '5' in data and \
'"foo_multilinestring": "bar_multilinestring"' in data and '6' in data and \
'"foo_polygon": "bar_polygon"' in data and '7' in data and \
'"foo_multipolygon": "bar_multipolygon"' in data and '8' in data)
# Test native support with string id
src_ds = gdal.OpenEx("""{
"type": "FeatureCollection",
"features": [
{ "type": "Feature",
"id": "foobarbaz",
"properties": {},
"geometry": null
}
]
}
""", open_options=['NATIVE_DATA=YES'])
gdal.VectorTranslate('/vsimem/out.json', src_ds, format='GeoJSON')
got = read_file('/vsimem/out.json')
gdal.Unlink('/vsimem/out.json')
expected = """{
"type": "FeatureCollection",
"features": [
{ "type": "Feature", "id": "foobarbaz", "properties": { }, "geometry": null }
]
}
"""
assert json.loads(got) == json.loads(expected)
# Test native support with numeric id
src_ds = gdal.OpenEx("""{
"type": "FeatureCollection",
"features": [
{ "type": "Feature",
"id": 1234657890123,
"properties": {},
"geometry": null
}
]
}
""", open_options=['NATIVE_DATA=YES'])
gdal.VectorTranslate('/vsimem/out.json', src_ds, format='GeoJSON')
got = read_file('/vsimem/out.json')
gdal.Unlink('/vsimem/out.json')
expected = """{
"type": "FeatureCollection",
"features": [
{ "type": "Feature", "id": 1234657890123, "properties": { }, "geometry": null }
]
}
"""
assert json.loads(got) == json.loads(expected)
###############################################################################
# Test that writing JSon content as value of a string field is serialized as it
def test_ogr_geojson_46():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.GetDriverByName('GeoJSON').CreateDataSource('/vsimem/ogr_geojson_46.json')
lyr = ds.CreateLayer('test')
lyr.CreateField(ogr.FieldDefn('myprop'))
f = ogr.Feature(lyr.GetLayerDefn())
f['myprop'] = '{ "a": "b" }'
lyr.CreateFeature(f)
ds = None
fp = gdal.VSIFOpenL('/vsimem/ogr_geojson_46.json', 'rb')
data = gdal.VSIFReadL(1, 10000, fp).decode('ascii')
gdal.VSIFCloseL(fp)
gdal.Unlink('/vsimem/ogr_geojson_46.json')
assert '{ "myprop": { "a": "b" } }' in data
###############################################################################
# Test update support
def test_ogr_geojson_47():
if gdaltest.geojson_drv is None:
pytest.skip()
# ERROR 6: Update from inline definition not supported
with gdaltest.error_handler():
ds = ogr.Open('{"type": "FeatureCollection", "features":[]}', update=1)
assert ds is None
gdal.FileFromMemBuffer('/vsimem/ogr_geojson_47.json',
"""{"type": "FeatureCollection", "foo": "bar",
"features":[ { "type": "Feature", "bar": "baz", "properties": { "myprop": "myvalue" }, "geometry": null } ]}""")
# Test read support
ds = ogr.Open('/vsimem/ogr_geojson_47.json', update=1)
lyr = ds.GetLayer(0)
f = lyr.GetNextFeature()
f.SetField("myprop", "another_value")
lyr.SetFeature(f)
ds = None
fp = gdal.VSIFOpenL('/vsimem/ogr_geojson_47.json', 'rb')
if fp is not None:
data = gdal.VSIFReadL(1, 10000, fp).decode('ascii')
gdal.VSIFCloseL(fp)
else:
data = None
# we don't want crs if there's no in the source
assert ('"foo": "bar"' in data and '"bar": "baz"' in data and \
'crs' not in data and \
'"myprop": "another_value"' in data)
# Test append support
ds = ogr.Open('/vsimem/ogr_geojson_47.json', update=1)
lyr = ds.GetLayer(0)
f = ogr.Feature(lyr.GetLayerDefn())
f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(1 2)'))
lyr.CreateFeature(f)
if f.GetFID() != 1:
f.DumpReadable()
pytest.fail()
f = ogr.Feature(lyr.GetLayerDefn())
f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(2 3)'))
lyr.CreateFeature(f)
f = lyr.GetNextFeature()
if f.GetFID() != 0:
f.DumpReadable()
pytest.fail()
ds = None
# Test append support
ds = ogr.Open('/vsimem/ogr_geojson_47.json', update=1)
lyr = ds.GetLayer(0)
f = ogr.Feature(lyr.GetLayerDefn())
f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(4 5)'))
lyr.CreateFeature(f)
f.SetField("myprop", "value_of_point_4_5")
lyr.SetFeature(f)
ds = None
ds = ogr.Open('/vsimem/ogr_geojson_47.json')
lyr = ds.GetLayer(0)
assert lyr.GetFeatureCount() == 4
ds = None
fp = gdal.VSIFOpenL('/vsimem/ogr_geojson_47.json', 'rb')
if fp is not None:
data = gdal.VSIFReadL(1, 10000, fp).decode('ascii')
gdal.VSIFCloseL(fp)
else:
data = None
# we don't want crs if there's no in the source
assert ('"foo": "bar"' in data and '"bar": "baz"' in data and \
'crs' not in data and \
'"myprop": "another_value"' in data and \
'"myprop": "value_of_point_4_5"' in data and \
'id' not in data)
gdal.Unlink('/vsimem/ogr_geojson_47.json')
# Test appending to empty features array
gdal.FileFromMemBuffer('/vsimem/ogr_geojson_47.json', """{ "type": "FeatureCollection", "features": []}""")
ds = ogr.Open('/vsimem/ogr_geojson_47.json', update=1)
lyr = ds.GetLayer(0)
f = ogr.Feature(lyr.GetLayerDefn())
lyr.CreateFeature(f)
ds = None
ds = ogr.Open('/vsimem/ogr_geojson_47.json')
lyr = ds.GetLayer(0)
assert lyr.GetFeatureCount() == 1
ds = None
# Test appending to array ending with non feature
gdal.FileFromMemBuffer('/vsimem/ogr_geojson_47.json', """{ "type": "FeatureCollection", "features": [ null ]}""")
ds = ogr.Open('/vsimem/ogr_geojson_47.json', update=1)
lyr = ds.GetLayer(0)
f = ogr.Feature(lyr.GetLayerDefn())
lyr.CreateFeature(f)
ds = None
ds = ogr.Open('/vsimem/ogr_geojson_47.json')
lyr = ds.GetLayer(0)
assert lyr.GetFeatureCount() == 1
ds = None
# Test appending to feature collection not ending with "features"
gdal.FileFromMemBuffer('/vsimem/ogr_geojson_47.json', """{ "type": "FeatureCollection", "features": [], "something": "else"}""")
ds = ogr.Open('/vsimem/ogr_geojson_47.json', update=1)
lyr = ds.GetLayer(0)
f = ogr.Feature(lyr.GetLayerDefn())
lyr.CreateFeature(f)
ds = None
ds = ogr.Open('/vsimem/ogr_geojson_47.json')
lyr = ds.GetLayer(0)
assert lyr.GetFeatureCount() == 1
ds = None
fp = gdal.VSIFOpenL('/vsimem/ogr_geojson_47.json', 'rb')
if fp is not None:
data = gdal.VSIFReadL(1, 10000, fp).decode('ascii')
gdal.VSIFCloseL(fp)
else:
data = None
assert 'something' in data
# Test appending to feature collection with "bbox"
gdal.FileFromMemBuffer('/vsimem/ogr_geojson_47.json', """{ "type": "FeatureCollection", "bbox": [0,0,0,0], "features": [ { "type": "Feature", "geometry": { "type": "Point", "coordinates": [0,0]} } ]}""")
ds = ogr.Open('/vsimem/ogr_geojson_47.json', update=1)
lyr = ds.GetLayer(0)
f = ogr.Feature(lyr.GetLayerDefn())
lyr.CreateFeature(f)
ds = None
ds = ogr.Open('/vsimem/ogr_geojson_47.json')
lyr = ds.GetLayer(0)
assert lyr.GetFeatureCount() == 2
ds = None
fp = gdal.VSIFOpenL('/vsimem/ogr_geojson_47.json', 'rb')
if fp is not None:
data = gdal.VSIFReadL(1, 10000, fp).decode('ascii')
gdal.VSIFCloseL(fp)
else:
data = None
assert 'bbox' in data
gdal.Unlink('/vsimem/ogr_geojson_47.json')
###############################################################################
# Test update support with file that has a single feature not in a FeatureCollection
def test_ogr_geojson_48():
if gdaltest.geojson_drv is None:
pytest.skip()
gdal.FileFromMemBuffer('/vsimem/ogr_geojson_48.json',
"""{ "type": "Feature", "bar": "baz", "bbox": [2,49,2,49], "properties": { "myprop": "myvalue" }, "geometry": {"type": "Point", "coordinates": [ 2, 49]} }""")
# Test read support
ds = ogr.Open('/vsimem/ogr_geojson_48.json', update=1)
lyr = ds.GetLayer(0)
f = lyr.GetNextFeature()
f.SetField("myprop", "another_value")
f.SetGeometry(ogr.CreateGeometryFromWkt('POINT (3 50)'))
lyr.SetFeature(f)
ds = None
fp = gdal.VSIFOpenL('/vsimem/ogr_geojson_48.json', 'rb')
if fp is not None:
data = gdal.VSIFReadL(1, 10000, fp).decode('ascii')
gdal.VSIFCloseL(fp)
else:
data = None
gdal.Unlink('/vsimem/ogr_geojson_48.json')
# we don't want crs if there's no in the source
assert ('"bar": "baz"' in data and \
'"bbox": [ 3.0, 50.0, 3.0, 50.0 ]' in data and \
'crs' not in data and \
'FeatureCollection' not in data and \
'"myprop": "another_value"' in data)
###############################################################################
# Test ARRAY_AS_STRING
def test_ogr_geojson_49():
if gdaltest.geojson_drv is None:
pytest.skip()
gdal.FileFromMemBuffer('/vsimem/ogr_geojson_49.json',
"""{ "type": "Feature", "properties": { "foo": ["bar"] }, "geometry": null }""")
# Test read support
ds = gdal.OpenEx('/vsimem/ogr_geojson_49.json', open_options=['ARRAY_AS_STRING=YES'])
lyr = ds.GetLayer(0)
assert lyr.GetLayerDefn().GetFieldDefn(0).GetType() == ogr.OFTString
f = lyr.GetNextFeature()
if f['foo'] != '[ "bar" ]':
f.DumpReadable()
pytest.fail()
ds = None
gdal.Unlink('/vsimem/ogr_geojson_49.json')
###############################################################################
# Test that we serialize floating point values with enough significant figures
def test_ogr_geojson_50():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.GetDriverByName('GeoJSON').CreateDataSource('/vsimem/ogr_geojson_50.json')
lyr = ds.CreateLayer('test')
lyr.CreateField(ogr.FieldDefn('val', ogr.OFTReal))
f = ogr.Feature(lyr.GetLayerDefn())
f['val'] = 1.23456789012456
lyr.CreateFeature(f)
# To test smart rounding
f = ogr.Feature(lyr.GetLayerDefn())
f['val'] = 5268.813
lyr.CreateFeature(f)
f = None
ds = None
fp = gdal.VSIFOpenL('/vsimem/ogr_geojson_50.json', 'rb')
data = gdal.VSIFReadL(1, 10000, fp).decode('ascii')
gdal.VSIFCloseL(fp)
gdal.Unlink('/vsimem/ogr_geojson_50.json')
assert '1.23456789012456' in data or '5268.813 ' in data
# If SIGNIFICANT_FIGURES is explicitly specified, and COORDINATE_PRECISION not,
# then it also applies to coordinates
ds = ogr.GetDriverByName('GeoJSON').CreateDataSource('/vsimem/ogr_geojson_50.json')
lyr = ds.CreateLayer('test', options=['SIGNIFICANT_FIGURES=17'])
lyr.CreateField(ogr.FieldDefn('val', ogr.OFTReal))
f = ogr.Feature(lyr.GetLayerDefn())
f.SetGeometry(ogr.CreateGeometryFromWkt('POINT (0.0000123456789012456 0)'))
lyr.CreateFeature(f)
f = None
ds = None
fp = gdal.VSIFOpenL('/vsimem/ogr_geojson_50.json', 'rb')
data = gdal.VSIFReadL(1, 10000, fp).decode('ascii')
gdal.VSIFCloseL(fp)
gdal.Unlink('/vsimem/ogr_geojson_50.json')
assert '1.23456789012456' in data or '-5' in data
# If SIGNIFICANT_FIGURES is explicitly specified, and COORDINATE_PRECISION too,
# then SIGNIFICANT_FIGURES only applies to non-coordinates floating point values.
ds = ogr.GetDriverByName('GeoJSON').CreateDataSource('/vsimem/ogr_geojson_50.json')
lyr = ds.CreateLayer('test', options=['COORDINATE_PRECISION=15', 'SIGNIFICANT_FIGURES=17'])
lyr.CreateField(ogr.FieldDefn('val', ogr.OFTReal))
f = ogr.Feature(lyr.GetLayerDefn())
f['val'] = 1.23456789012456
f.SetGeometry(ogr.CreateGeometryFromWkt('POINT (0.0000123456789012456 0)'))
lyr.CreateFeature(f)
f = None
ds = None
fp = gdal.VSIFOpenL('/vsimem/ogr_geojson_50.json', 'rb')
data = gdal.VSIFReadL(1, 10000, fp).decode('ascii')
gdal.VSIFCloseL(fp)
gdal.Unlink('/vsimem/ogr_geojson_50.json')
assert '0.00001234' in data and '1.23456789012456' in data
###############################################################################
# Test writing empty geometries
def test_ogr_geojson_51():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.GetDriverByName('GeoJSON').CreateDataSource('/vsimem/ogr_geojson_51.json')
lyr = ds.CreateLayer('test')
lyr.CreateField(ogr.FieldDefn('id', ogr.OFTInteger))
f = ogr.Feature(lyr.GetLayerDefn())
f['id'] = 1
f.SetGeometry(ogr.CreateGeometryFromWkt('POINT EMPTY'))
lyr.CreateFeature(f)
f = ogr.Feature(lyr.GetLayerDefn())
f['id'] = 2
f.SetGeometry(ogr.CreateGeometryFromWkt('LINESTRING EMPTY'))
lyr.CreateFeature(f)
f = ogr.Feature(lyr.GetLayerDefn())
f['id'] = 3
f.SetGeometry(ogr.CreateGeometryFromWkt('POLYGON EMPTY'))
lyr.CreateFeature(f)
f = ogr.Feature(lyr.GetLayerDefn())
f['id'] = 4
f.SetGeometry(ogr.CreateGeometryFromWkt('MULTIPOINT EMPTY'))
lyr.CreateFeature(f)
f = ogr.Feature(lyr.GetLayerDefn())
f['id'] = 5
f.SetGeometry(ogr.CreateGeometryFromWkt('MULTILINESTRING EMPTY'))
lyr.CreateFeature(f)
f = ogr.Feature(lyr.GetLayerDefn())
f['id'] = 6
f.SetGeometry(ogr.CreateGeometryFromWkt('MULTIPOLYGON EMPTY'))
lyr.CreateFeature(f)
f = ogr.Feature(lyr.GetLayerDefn())
f['id'] = 7
f.SetGeometry(ogr.CreateGeometryFromWkt('GEOMETRYCOLLECTION EMPTY'))
lyr.CreateFeature(f)
f = ogr.Feature(lyr.GetLayerDefn())
ds = None
fp = gdal.VSIFOpenL('/vsimem/ogr_geojson_51.json', 'rb')
data = gdal.VSIFReadL(1, 10000, fp).decode('ascii')
gdal.VSIFCloseL(fp)
gdal.Unlink('/vsimem/ogr_geojson_51.json')
assert '{ "id": 1 }, "geometry": null' in data
assert '{ "id": 2 }, "geometry": { "type": "LineString", "coordinates": [ ] } }' in data
assert '{ "id": 3 }, "geometry": { "type": "Polygon", "coordinates": [ ] } }' in data
assert '{ "id": 4 }, "geometry": { "type": "MultiPoint", "coordinates": [ ] } }' in data
assert '{ "id": 5 }, "geometry": { "type": "MultiLineString", "coordinates": [ ] } }' in data
assert '{ "id": 6 }, "geometry": { "type": "MultiPolygon", "coordinates": [ ] } }' in data
assert '{ "id": 7 }, "geometry": { "type": "GeometryCollection", "geometries": [ ] } }' in data
###############################################################################
# Test NULL type detection
def test_ogr_geojson_52():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('data/nullvalues.geojson')
assert ds is not None, 'Failed to open datasource'
assert ds.GetLayerCount() is 1, 'Wrong number of layers'
lyr = ds.GetLayerByName('nullvalues')
assert lyr is not None, 'Missing layer called nullvalues'
fld = lyr.GetLayerDefn().GetFieldDefn(0)
assert fld.GetNameRef() == 'int'
assert fld.GetType() == ogr.OFTInteger
fld = lyr.GetLayerDefn().GetFieldDefn(1)
assert fld.GetNameRef() == 'string'
assert fld.GetType() == ogr.OFTString
fld = lyr.GetLayerDefn().GetFieldDefn(2)
assert fld.GetNameRef() == 'double'
assert fld.GetType() == ogr.OFTReal
###############################################################################
# Test that M is ignored (this is a test of OGRLayer::CreateFeature() actually)
def test_ogr_geojson_53():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.GetDriverByName('GeoJSON').CreateDataSource('/vsimem/ogr_geojson_53.json')
lyr = ds.CreateLayer('test')
f = ogr.Feature(lyr.GetLayerDefn())
f.SetGeometry(ogr.CreateGeometryFromWkt('POINT ZM (1 2 3 4)'))
lyr.CreateFeature(f)
ds = None
fp = gdal.VSIFOpenL('/vsimem/ogr_geojson_53.json', 'rb')
data = gdal.VSIFReadL(1, 10000, fp).decode('ascii')
gdal.VSIFCloseL(fp)
gdal.Unlink('/vsimem/ogr_geojson_53.json')
assert '{ "type": "Point", "coordinates": [ 1.0, 2.0, 3.0 ] }' in data
###############################################################################
# Test NULL type detection when first value is null
def test_ogr_geojson_54():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open("""{
"type": "FeatureCollection",
"features": [
{ "type": "Feature", "properties": { "int": null, "string": null, "double": null, "dt" : null, "boolean": null, "null": null }, "geometry": null },
{ "type": "Feature", "properties": { "int": 168, "string": "string", "double": 1.23, "dt" : "2016-05-18T12:34:56Z", "boolean": true }, "geometry": null }
]
}
""")
lyr = ds.GetLayer(0)
fld = lyr.GetLayerDefn().GetFieldDefn(0)
assert fld.GetType() == ogr.OFTInteger
fld = lyr.GetLayerDefn().GetFieldDefn(1)
assert fld.GetType() == ogr.OFTString
fld = lyr.GetLayerDefn().GetFieldDefn(2)
assert fld.GetType() == ogr.OFTReal
fld = lyr.GetLayerDefn().GetFieldDefn(3)
assert fld.GetType() == ogr.OFTDateTime
fld = lyr.GetLayerDefn().GetFieldDefn(4)
assert fld.GetType() == ogr.OFTInteger
assert fld.GetSubType() == ogr.OFSTBoolean
assert fld.GetWidth() == 1
fld = lyr.GetLayerDefn().GetFieldDefn(5)
assert fld.GetType() == ogr.OFTString
###############################################################################
# Test RFC 7946
def read_file(filename):
f = gdal.VSIFOpenL(filename, "rb")
if f is None:
return None
content = gdal.VSIFReadL(1, 10000, f).decode('UTF-8')
gdal.VSIFCloseL(f)
return content
def test_ogr_geojson_55():
if gdaltest.geojson_drv is None:
pytest.skip()
# Basic test for standard bbox and coordinate truncation
gdal.VectorTranslate('/vsimem/out.json', """{
"type": "FeatureCollection",
"features": [
{ "type": "Feature", "id": 123, "properties": {}, "geometry": { "type": "Point", "coordinates": [2.123456789, 49] } },
{ "type": "Feature", "id": 124, "properties": {}, "geometry": { "type": "Point", "coordinates": [3, 50] } }
]
}""", options='-f GeoJSON -lco RFC7946=YES -lco WRITE_BBOX=YES -preserve_fid')
got = read_file('/vsimem/out.json')
gdal.Unlink('/vsimem/out.json')
expected = """{
"type": "FeatureCollection",
"bbox": [ 2.1234568, 49.0000000, 3.0000000, 50.0000000 ],
"features": [
{ "type": "Feature", "id": 123, "properties": { }, "bbox": [ 2.1234568, 49.0, 2.1234568, 49.0 ], "geometry": { "type": "Point", "coordinates": [ 2.1234568, 49.0 ] } },
{ "type": "Feature", "id": 124, "properties": { }, "bbox": [ 3.0, 50.0, 3.0, 50.0 ], "geometry": { "type": "Point", "coordinates": [ 3.0, 50.0 ] } }
]
}
"""
assert json.loads(got) == json.loads(expected)
# Test polygon winding order
gdal.VectorTranslate('/vsimem/out.json', """{
"type": "FeatureCollection",
"features": [
{ "type": "Feature", "geometry": { "type": "Polygon", "coordinates": [[[2,49],[3,49],[3,50],[2,50],[2,49]],[[2.1,49.1],[2.1,49.9],[2.9,49.9],[2.9,49.1],[2.1,49.1]]] } },
{ "type": "Feature", "geometry": { "type": "Polygon", "coordinates": [[[2,49],[2,50],[3,50],[3,49],[2,49]],[[2.1,49.1],[2.9,49.1],[2.9,49.9],[2.1,49.9],[2.1,49.1]]] } },
]
}
""", format='GeoJSON', layerCreationOptions=['RFC7946=YES', 'WRITE_BBOX=YES'])
got = read_file('/vsimem/out.json')
gdal.Unlink('/vsimem/out.json')
expected = """{
"type": "FeatureCollection",
"bbox": [ 2.0000000, 49.0000000, 3.0000000, 50.0000000 ],
"features": [
{ "type": "Feature", "properties": { }, "bbox": [ 2.0, 49.0, 3.0, 50.0 ], "geometry": { "type": "Polygon", "coordinates": [ [ [ 2.0, 49.0 ], [ 3.0, 49.0 ], [ 3.0, 50.0 ], [ 2.0, 50.0 ], [ 2.0, 49.0 ] ], [ [ 2.1, 49.1 ], [ 2.1, 49.9 ], [ 2.9, 49.9 ], [ 2.9, 49.1 ], [ 2.1, 49.1 ] ] ] } },
{ "type": "Feature", "properties": { }, "bbox": [ 2.0, 49.0, 3.0, 50.0 ], "geometry": { "type": "Polygon", "coordinates": [ [ [ 2.0, 49.0 ], [ 3.0, 49.0 ], [ 3.0, 50.0 ], [ 2.0, 50.0 ], [ 2.0, 49.0 ] ], [ [ 2.1, 49.1 ], [ 2.1, 49.9 ], [ 2.9, 49.9 ], [ 2.9, 49.1 ], [ 2.1, 49.1 ] ] ] } }
]
}
"""
assert json.loads(got) == json.loads(expected)
# Test foreign member
src_ds = gdal.OpenEx("""{
"type": "FeatureCollection",
"coordinates": "should not be found in output",
"geometries": "should not be found in output",
"geometry": "should not be found in output",
"properties": "should not be found in output",
"valid": "should be in output",
"crs": "should not be found in output",
"bbox": [0,0,0,0],
"features": [
{ "type": "Feature",
"id": ["not expected as child of features"],
"coordinates": "should not be found in output",
"geometries": "should not be found in output",
"features": "should not be found in output",
"valid": "should be in output",
"properties": { "foo": "bar" },
"geometry": {
"type": "Point",
"bbox": [0,0,0,0],
"geometry": "should not be found in output",
"properties": "should not be found in output",
"features": "should not be found in output",
"valid": "should be in output",
"coordinates": [2,49]
}
}
]
}
""", open_options=['NATIVE_DATA=YES'])
gdal.VectorTranslate('/vsimem/out.json', src_ds, format='GeoJSON', layerCreationOptions=['RFC7946=YES'])
got = read_file('/vsimem/out.json')
gdal.Unlink('/vsimem/out.json')
expected = """{
"type": "FeatureCollection",
"valid": "should be in output",
"bbox": [ 2.0000000, 49.0000000, 2.0000000, 49.0000000 ],
"features": [
{ "type": "Feature",
"valid": "should be in output",
"properties": { "id": [ "not expected as child of features" ], "foo": "bar" },
"geometry": { "type": "Point", "coordinates": [ 2.0, 49.0 ], "valid": "should be in output" } }
]
}
"""
assert json.loads(got) == json.loads(expected)
###############################################################################
# Test RFC 7946 (that require geos)
def test_ogr_geojson_56():
if gdaltest.geojson_drv is None:
pytest.skip()
if not ogrtest.have_geos():
pytest.skip()
# Test offsetting longitudes beyond antimeridian
gdal.VectorTranslate('/vsimem/out.json', """{
"type": "FeatureCollection",
"features": [
{ "type": "Feature", "geometry": { "type": "Point", "coordinates": [182, 49] } },
{ "type": "Feature", "geometry": { "type": "Point", "coordinates": [-183, 50] } },
{ "type": "Feature", "geometry": { "type": "LineString", "coordinates": [[-183, 51],[-182, 48]] } },
{ "type": "Feature", "geometry": { "type": "LineString", "coordinates": [[182, 52],[183, 47]] } },
{ "type": "Feature", "geometry": { "type": "Polygon", "coordinates": [[[-183, 51],[-183, 48],[-182, 48],[-183, 48],[-183, 51]]] } },
{ "type": "Feature", "geometry": { "type": "Polygon", "coordinates": [[[183, 51],[183, 48],[182, 48],[183, 48],[183, 51]]] } },
]
}""", format='GeoJSON', layerCreationOptions=['RFC7946=YES', 'WRITE_BBOX=YES'])
got = read_file('/vsimem/out.json')
gdal.Unlink('/vsimem/out.json')
expected = """{
"type": "FeatureCollection",
"bbox": [ -178.0000000, 47.0000000, 178.0000000, 52.0000000 ],
"features": [
{ "type": "Feature", "properties": { }, "bbox": [ -178.0, 49.0, -178.0, 49.0 ], "geometry": { "type": "Point", "coordinates": [ -178.0, 49.0 ] } },
{ "type": "Feature", "properties": { }, "bbox": [ 177.0, 50.0, 177.0, 50.0 ], "geometry": { "type": "Point", "coordinates": [ 177.0, 50.0 ] } },
{ "type": "Feature", "properties": { }, "bbox": [ 177.0, 48.0, 178.0, 51.0 ], "geometry": { "type": "LineString", "coordinates": [ [ 177.0, 51.0 ], [ 178.0, 48.0 ] ] } },
{ "type": "Feature", "properties": { }, "bbox": [ -178.0, 47.0, -177.0, 52.0 ], "geometry": { "type": "LineString", "coordinates": [ [ -178.0, 52.0 ], [ -177.0, 47.0 ] ] } },
{ "type": "Feature", "properties": { }, "bbox": [ 177.0, 48.0, 178.0, 51.0 ], "geometry": { "type": "Polygon", "coordinates": [ [ [ 177.0, 51.0 ], [ 177.0, 48.0 ], [ 178.0, 48.0 ], [ 177.0, 48.0 ], [ 177.0, 51.0 ] ] ] } },
{ "type": "Feature", "properties": { }, "bbox": [ -178.0, 48.0, -177.0, 51.0 ], "geometry": { "type": "Polygon", "coordinates": [ [ [ -177.0, 51.0 ], [ -177.0, 48.0 ], [ -178.0, 48.0 ], [ -177.0, 48.0 ], [ -177.0, 51.0 ] ] ] } }
]
}
"""
assert json.loads(got) == json.loads(expected)
# Test geometries across the antimeridian
gdal.VectorTranslate('/vsimem/out.json', """{
"type": "FeatureCollection",
"features": [
{ "type": "Feature", "geometry": { "type": "LineString", "coordinates": [[179, 51],[-179, 48]] } },
{ "type": "Feature", "geometry": { "type": "LineString", "coordinates": [[-179, 52],[179, 47]] } },
{ "type": "Feature", "geometry": { "type": "MultiLineString", "coordinates": [ [ [ 179.0, 51.0 ], [ 180.0, 49.5 ] ], [ [ -180.0, 49.5 ], [ -179.0, 48.0 ] ] ] } },
{ "type": "Feature", "geometry": { "type": "Polygon", "coordinates": [[[177, 51],[-175, 51],[-175, 48],[177, 48],[177, 51]]] } },
{ "type": "Feature", "geometry": { "type": "MultiPolygon", "coordinates": [ [ [ [ 177.0, 51.0 ], [ 177.0, 48.0 ], [ 180.0, 48.0 ], [ 180.0, 51.0 ], [ 177.0, 51.0 ] ] ], [ [ [ -180.0, 51.0 ], [ -180.0, 48.0 ], [ -175.0, 48.0 ], [ -175.0, 51.0 ], [ -180.0, 51.0 ] ] ] ] } }
]
}""", format='GeoJSON', layerCreationOptions=['RFC7946=YES', 'WRITE_BBOX=YES'])
got = read_file('/vsimem/out.json')
gdal.Unlink('/vsimem/out.json')
expected = """{
"type": "FeatureCollection",
"bbox": [ 177.0000000, 47.0000000, -175.0000000, 52.0000000 ],
"features": [
{ "type": "Feature", "properties": { }, "bbox": [ 179.0, 48.0, -179.0, 51.0 ], "geometry": { "type": "MultiLineString", "coordinates": [ [ [ 179.0, 51.0 ], [ 180.0, 49.5 ] ], [ [ -180.0, 49.5 ], [ -179.0, 48.0 ] ] ] } },
{ "type": "Feature", "properties": { }, "bbox": [ 179.0, 47.0, -179.0, 52.0 ], "geometry": { "type": "MultiLineString", "coordinates": [ [ [ -179.0, 52.0 ], [ -180.0, 49.5 ] ], [ [ 180.0, 49.5 ], [ 179.0, 47.0 ] ] ] } },
{ "type": "Feature", "properties": { }, "bbox": [ 179.0, 48.0, -179.0, 51.0 ], "geometry": { "type": "MultiLineString", "coordinates": [ [ [ 179.0, 51.0 ], [ 180.0, 49.5 ] ], [ [ -180.0, 49.5 ], [ -179.0, 48.0 ] ] ] } },
{ "type": "Feature", "properties": { }, "bbox": [ 177.0, 48.0, -175.0, 51.0 ], "geometry": { "type": "MultiPolygon", "coordinates": [ [ [ [ 177.0, 51.0 ], [ 177.0, 48.0 ], [ 180.0, 48.0 ], [ 180.0, 51.0 ], [ 177.0, 51.0 ] ] ], [ [ [ -180.0, 51.0 ], [ -180.0, 48.0 ], [ -175.0, 48.0 ], [ -175.0, 51.0 ], [ -180.0, 51.0 ] ] ] ] } },
{ "type": "Feature", "properties": { }, "bbox": [ 177.0, 48.0, -175.0, 51.0 ], "geometry": { "type": "MultiPolygon", "coordinates": [ [ [ [ 177.0, 51.0 ], [ 177.0, 48.0 ], [ 180.0, 48.0 ], [ 180.0, 51.0 ], [ 177.0, 51.0 ] ] ], [ [ [ -180.0, 51.0 ], [ -180.0, 48.0 ], [ -175.0, 48.0 ], [ -175.0, 51.0 ], [ -180.0, 51.0 ] ] ] ] } }
]
}
"""
assert json.loads(got) == json.loads(expected)
###############################################################################
# Test RFC 7946 and reprojection
def test_ogr_geojson_57():
if gdaltest.geojson_drv is None:
pytest.skip()
if not ogrtest.have_geos():
pytest.skip()
# Standard case: EPSG:32662: WGS 84 / Plate Carre
src_ds = gdal.GetDriverByName('Memory').Create('', 0, 0, 0)
sr = osr.SpatialReference()
sr.SetFromUserInput('+proj=eqc +lat_ts=0 +lat_0=0 +lon_0=0 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs')
lyr = src_ds.CreateLayer('test', srs=sr)
f = ogr.Feature(lyr.GetLayerDefn())
f.SetGeometry(ogr.CreateGeometryFromWkt('POLYGON((2000000 2000000,2000000 -2000000,-2000000 -2000000,-2000000 2000000,2000000 2000000))'))
lyr.CreateFeature(f)
gdal.VectorTranslate('/vsimem/out.json', src_ds, format='GeoJSON', layerCreationOptions=['WRITE_NAME=NO', 'RFC7946=YES', 'WRITE_BBOX=YES'])
got = read_file('/vsimem/out.json')
gdal.Unlink('/vsimem/out.json')
expected = """{
"type": "FeatureCollection",
"bbox": [ -17.9663057, -17.9663057, 17.9663057, 17.9663057 ],
"features": [
{ "type": "Feature", "properties": { }, "bbox": [ -17.9663057, -17.9663057, 17.9663057, 17.9663057 ], "geometry": { "type": "Polygon", "coordinates": [ [ [ 17.9663057, 17.9663057 ], [ -17.9663057, 17.9663057 ], [ -17.9663057, -17.9663057 ], [ 17.9663057, -17.9663057 ], [ 17.9663057, 17.9663057 ] ] ] } }
]
}
"""
assert json.loads(got) == json.loads(expected)
# Polar case: EPSG:3995: WGS 84 / Arctic Polar Stereographic
src_ds = gdal.GetDriverByName('Memory').Create('', 0, 0, 0)
sr = osr.SpatialReference()
sr.SetFromUserInput('+proj=stere +lat_0=90 +lat_ts=71 +lon_0=0 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs')
lyr = src_ds.CreateLayer('test', srs=sr)
f = ogr.Feature(lyr.GetLayerDefn())
f.SetGeometry(ogr.CreateGeometryFromWkt('POLYGON((2000000 2000000,2000000 -2000000,-2000000 -2000000,-2000000 2000000,2000000 2000000))'))
lyr.CreateFeature(f)
f = ogr.Feature(lyr.GetLayerDefn())
f.SetGeometry(ogr.CreateGeometryFromWkt('POLYGON((-2000000 -2000000,-1000000 -2000000,-1000000 2000000,-2000000 2000000,-2000000 -2000000))'))
lyr.CreateFeature(f)
gdal.VectorTranslate('/vsimem/out.json', src_ds, format='GeoJSON', layerCreationOptions=['WRITE_NAME=NO', 'RFC7946=YES', 'WRITE_BBOX=YES'])
got = read_file('/vsimem/out.json')
gdal.Unlink('/vsimem/out.json')
expected = """{
"type": "FeatureCollection",
"bbox": [ -180.0000000, 64.3861643, 180.0000000, 90.0000000 ],
"features": [
{ "type": "Feature", "properties": { }, "bbox": [ -180.0, 64.3861643, 180.0, 90.0 ], "geometry": { "type": "Polygon", "coordinates": [ [ [ 135.0, 64.3861643 ], [ 180.0, 71.7425119 ], [ 180.0, 90.0 ], [ -180.0, 90.0 ], [ -180.0, 71.7425119 ], [ -135.0, 64.3861643 ], [ -45.0, 64.3861643 ], [ 45.0, 64.3861643 ], [ 135.0, 64.3861643 ] ] ] } },
{ "type": "Feature", "properties": { }, "bbox": [ -153.4349488, 64.3861643, -26.5650512, 69.6286694 ], "geometry": { "type": "Polygon", "coordinates": [ [ [ -45.0, 64.3861643 ], [ -26.5650512, 69.6286694 ], [ -153.4349488, 69.6286694 ], [ -135.0, 64.3861643 ], [ -45.0, 64.3861643 ] ] ] } }
]
}
"""
assert json.loads(got) == json.loads(expected)
# Polar case: slice of spherical cap (not intersecting antimeridian, west hemisphere)
src_ds = gdal.GetDriverByName('Memory').Create('', 0, 0, 0)
sr = osr.SpatialReference()
sr.SetFromUserInput('+proj=stere +lat_0=90 +lat_ts=71 +lon_0=0 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs')
lyr = src_ds.CreateLayer('test', srs=sr)
f = ogr.Feature(lyr.GetLayerDefn())
f.SetGeometry(ogr.CreateGeometryFromWkt('POLYGON((-2000000 2000000,0 0,-2000000 -2000000,-2000000 2000000))'))
lyr.CreateFeature(f)
gdal.VectorTranslate('/vsimem/out.json', src_ds, format='GeoJSON', layerCreationOptions=['WRITE_NAME=NO', 'RFC7946=YES', 'WRITE_BBOX=YES'])
got = read_file('/vsimem/out.json')
gdal.Unlink('/vsimem/out.json')
expected = """{
"type": "FeatureCollection",
"bbox": [ -135.0000000, 64.3861643, -45.0000000, 90.0000000 ],
"features": [
{ "type": "Feature", "properties": { }, "bbox": [ -135.0, 64.3861643, -45.0, 90.0 ], "geometry": { "type": "Polygon", "coordinates": [ [ [ -135.0, 64.3861643 ], [ -45.0, 64.3861643 ], [ -45.0, 90.0 ], [ -135.0, 90.0 ], [ -135.0, 64.3861643 ] ] ] } }
]
}
"""
assert json.loads(got) == json.loads(expected)
# Polar case: slice of spherical cap (not intersecting antimeridian, east hemisphere)
src_ds = gdal.GetDriverByName('Memory').Create('', 0, 0, 0)
sr = osr.SpatialReference()
sr.SetFromUserInput('+proj=stere +lat_0=90 +lat_ts=71 +lon_0=0 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs')
lyr = src_ds.CreateLayer('test', srs=sr)
f = ogr.Feature(lyr.GetLayerDefn())
f.SetGeometry(ogr.CreateGeometryFromWkt('MULTIPOLYGON(((2000000 2000000,0 0,2000000 -2000000,2000000 2000000)))'))
lyr.CreateFeature(f)
gdal.VectorTranslate('/vsimem/out.json', src_ds, format='GeoJSON', layerCreationOptions=['WRITE_NAME=NO', 'RFC7946=YES', 'WRITE_BBOX=YES'])
got = read_file('/vsimem/out.json')
gdal.Unlink('/vsimem/out.json')
expected = """{
"type": "FeatureCollection",
"bbox": [ 45.0000000, 64.3861643, 135.0000000, 90.0000000 ],
"features": [
{ "type": "Feature", "properties": { }, "bbox": [ 45.0, 64.3861643, 135.0, 90.0 ], "geometry": { "type": "Polygon", "coordinates": [ [ [ 135.0, 64.3861643 ], [ 135.0, 90.0 ], [ 45.0, 90.0 ], [ 45.0, 64.3861643 ], [ 135.0, 64.3861643 ] ] ] } }
]
}
"""
assert json.loads(got) == json.loads(expected)
# Polar case: slice of spherical cap crossing the antimeridian
src_ds = gdal.GetDriverByName('Memory').Create('', 0, 0, 0)
sr = osr.SpatialReference()
sr.SetFromUserInput('+proj=stere +lat_0=90 +lat_ts=71 +lon_0=0 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs')
lyr = src_ds.CreateLayer('test', srs=sr)
f = ogr.Feature(lyr.GetLayerDefn())
f.SetGeometry(ogr.CreateGeometryFromWkt('POLYGON((100000 100000,-100000 100000,0 0,100000 100000))'))
lyr.CreateFeature(f)
gdal.VectorTranslate('/vsimem/out.json', src_ds, format='GeoJSON', layerCreationOptions=['WRITE_NAME=NO', 'RFC7946=YES', 'WRITE_BBOX=YES'])
got = read_file('/vsimem/out.json')
gdal.Unlink('/vsimem/out.json')
expected = """{
"type": "FeatureCollection",
"bbox": [ 135.0000000, 88.6984598, -135.0000000, 90.0000000 ],
"features": [
{ "type": "Feature", "properties": { }, "bbox": [ 135.0, 88.6984598, -135.0, 90.0 ], "geometry": { "type": "MultiPolygon", "coordinates": [ [ [ [ 180.0, 89.0796531 ], [ 180.0, 90.0 ], [ 135.0, 88.6984598 ], [ 180.0, 89.0796531 ] ] ], [ [ [ -180.0, 90.0 ], [ -180.0, 89.0796531 ], [ -135.0, 88.6984598 ] ] ] ] } }
]
}
"""
assert json.loads(got) == json.loads(expected)
# Polar case: EPSG:3031: WGS 84 / Antarctic Polar Stereographic
src_ds = gdal.GetDriverByName('Memory').Create('', 0, 0, 0)
sr = osr.SpatialReference()
sr.SetFromUserInput('+proj=stere +lat_0=-90 +lat_ts=-71 +lon_0=0 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs')
lyr = src_ds.CreateLayer('test', srs=sr)
f = ogr.Feature(lyr.GetLayerDefn())
f.SetGeometry(ogr.CreateGeometryFromWkt('MULTIPOLYGON(((2000000 2000000,2000000 -2000000,-2000000 -2000000,-2000000 2000000,2000000 2000000)))'))
lyr.CreateFeature(f)
gdal.VectorTranslate('/vsimem/out.json', src_ds, format='GeoJSON', layerCreationOptions=['WRITE_NAME=NO', 'RFC7946=YES', 'WRITE_BBOX=YES'])
got = read_file('/vsimem/out.json')
gdal.Unlink('/vsimem/out.json')
expected = """{
"type": "FeatureCollection",
"bbox": [ -180.0000000, -90.0000000, 180.0000000, -64.3861643 ],
"features": [
{ "type": "Feature", "properties": { }, "bbox": [ -180.0, -90.0, 180.0, -64.3861643 ], "geometry": { "type": "Polygon", "coordinates": [ [ [ 45.0, -64.3861643 ], [ -45.0, -64.3861643 ], [ -135.0, -64.3861643 ], [ -180.0, -71.7425119 ], [ -180.0, -90.0 ], [ 180.0, -90.0 ], [ 180.0, -71.7425119 ], [ 135.0, -64.3861643 ], [ 45.0, -64.3861643 ] ] ] } }
]
}
"""
assert json.loads(got) == json.loads(expected)
# Antimeridian case: EPSG:32660: WGS 84 / UTM zone 60N with polygon and line crossing
src_ds = gdal.GetDriverByName('Memory').Create('', 0, 0, 0)
sr = osr.SpatialReference()
sr.SetFromUserInput('+proj=utm +zone=60 +datum=WGS84 +units=m +no_defs')
lyr = src_ds.CreateLayer('test', srs=sr)
f = ogr.Feature(lyr.GetLayerDefn())
f.SetGeometry(ogr.CreateGeometryFromWkt('POLYGON((670000 4000000,850000 4000000,850000 4100000,670000 4100000,670000 4000000))'))
lyr.CreateFeature(f)
f = ogr.Feature(lyr.GetLayerDefn())
f.SetGeometry(ogr.CreateGeometryFromWkt('MULTILINESTRING((670000 4000000,850000 4100000))'))
lyr.CreateFeature(f)
f = ogr.Feature(lyr.GetLayerDefn())
f.SetGeometry(ogr.CreateGeometryFromWkt('LINESTRING(670000 0,850000 0)'))
lyr.CreateFeature(f)
gdal.VectorTranslate('/vsimem/out.json', src_ds, format='GeoJSON', layerCreationOptions=['WRITE_NAME=NO', 'RFC7946=YES', 'WRITE_BBOX=YES'])
got = read_file('/vsimem/out.json')
gdal.Unlink('/vsimem/out.json')
expected = """{
"type": "FeatureCollection",
"bbox": [ 178.5275649, 0.0000000, -179.0681936, 37.0308258 ],
"features": [
{ "type": "Feature", "properties": { }, "bbox": [ 178.8892102, 36.0816324, -179.0681936, 37.0308258 ], "geometry": { "type": "MultiPolygon", "coordinates": [ [ [ [ 180.0, 36.1071354 ], [ 180.0, 37.0082839 ], [ 178.9112998, 37.0308258 ], [ 178.8892102, 36.1298163 ], [ 180.0, 36.1071354 ] ] ], [ [ [ -179.0681936, 36.9810434 ], [ -180.0, 37.0082839 ], [ -180.0, 36.1071354 ], [ -179.1135277, 36.0816324 ], [ -179.0681936, 36.9810434 ] ] ] ] } },
{ "type": "Feature", "properties": { }, "bbox": [ 178.8892102, 36.1298163, -179.0681936, 36.9810434 ], "geometry": { "type": "MultiLineString", "coordinates": [ [ [ 178.8892102, 36.1298163 ], [ 180.0, 36.5995612 ] ], [ [ -180.0, 36.5995612 ], [ -179.0681936, 36.9810434 ] ] ] } },
{ "type": "Feature", "properties": { }, "bbox": [ 178.5275649, 0.0, -179.8562277, 0.0 ], "geometry": { "type": "MultiLineString", "coordinates": [ [ [ 178.5275649, 0.0 ], [ 180.0, 0.0 ] ], [ [ -180.0, 0.0 ], [ -179.8562277, 0.0 ] ] ] } }
]
}
"""
# with proj 4.9.3
expected2 = """{
"type": "FeatureCollection",
"bbox": [ 178.5275649, 0.0000000, -179.0681936, 37.0308258 ],
"features": [
{ "type": "Feature", "properties": { }, "bbox": [ 178.8892102, 36.0816324, -179.0681936, 37.0308258 ], "geometry": { "type": "MultiPolygon", "coordinates": [ [ [ [ -179.0681936, 36.9810434 ], [ -180.0, 37.0082839 ], [ -180.0, 36.1071354 ], [ -179.1135277, 36.0816324 ], [ -179.0681936, 36.9810434 ] ] ], [ [ [ 178.8892102, 36.1298163 ], [ 180.0, 36.1071354 ], [ 180.0, 37.0082839 ], [ 178.9112998, 37.0308258 ], [ 178.8892102, 36.1298163 ] ] ] ] } },
{ "type": "Feature", "properties": { }, "bbox": [ 178.8892102, 36.1298163, -179.0681936, 36.9810434 ], "geometry": { "type": "MultiLineString", "coordinates": [ [ [ 178.8892102, 36.1298163 ], [ 180.0, 36.5995612 ] ], [ [ -180.0, 36.5995612 ], [ -179.0681936, 36.9810434 ] ] ] } },
{ "type": "Feature", "properties": { }, "bbox": [ 178.5275649, 0.0, -179.8562277, 0.0 ], "geometry": { "type": "MultiLineString", "coordinates": [ [ [ 178.5275649, 0.0 ], [ 180.0, 0.0 ] ], [ [ -180.0, 0.0 ], [ -179.8562277, 0.0 ] ] ] } }
]
}
"""
assert json.loads(got) == json.loads(expected) or json.loads(got) == json.loads(expected2)
# Antimeridian case: EPSG:32660: WGS 84 / UTM zone 60N wit polygon on west of antimeridian
src_ds = gdal.GetDriverByName('Memory').Create('', 0, 0, 0)
sr = osr.SpatialReference()
sr.SetFromUserInput('+proj=utm +zone=60 +datum=WGS84 +units=m +no_defs')
lyr = src_ds.CreateLayer('test', srs=sr)
f = ogr.Feature(lyr.GetLayerDefn())
f.SetGeometry(ogr.CreateGeometryFromWkt('POLYGON((670000 4000000,700000 4000000,700000 4100000,670000 4100000,670000 4000000))'))
lyr.CreateFeature(f)
gdal.VectorTranslate('/vsimem/out.json', src_ds, format='GeoJSON', layerCreationOptions=['WRITE_NAME=NO', 'RFC7946=YES', 'WRITE_BBOX=YES'])
got = read_file('/vsimem/out.json')
gdal.Unlink('/vsimem/out.json')
expected = """{
"type": "FeatureCollection",
"bbox": [ 178.8892102, 36.1240958, 179.2483693, 37.0308258 ],
"features": [
{ "type": "Feature", "properties": { }, "bbox": [ 178.8892102, 36.1240958, 179.2483693, 37.0308258 ], "geometry": { "type": "Polygon", "coordinates": [ [ [ 178.8892102, 36.1298163 ], [ 179.2223914, 36.1240958 ], [ 179.2483693, 37.0249155 ], [ 178.9112998, 37.0308258 ], [ 178.8892102, 36.1298163 ] ] ] } }
]
}
"""
assert json.loads(got) == json.loads(expected)
###############################################################################
# Test using the name member of FeatureCollection
def test_ogr_geojson_58():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('{ "type": "FeatureCollection", "name": "layer_name", "features": []}')
assert ds is not None, 'Failed to open datasource'
lyr = ds.GetLayerByName('layer_name')
assert lyr is not None, 'Missing layer called layer_name'
ds = None
ds = ogr.GetDriverByName('GeoJSON').CreateDataSource('/vsimem/ogr_geojson_58.json')
lyr = ds.CreateLayer('foo')
ds = None
ds = ogr.Open('/vsimem/ogr_geojson_58.json')
assert ds.GetLayerByName('foo') is not None, 'Missing layer called foo'
ds = None
gdal.Unlink('/vsimem/ogr_geojson_58.json')
###############################################################################
# Test using the description member of FeatureCollection
def test_ogr_geojson_59():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = ogr.Open('{ "type": "FeatureCollection", "description": "my_description", "features": []}')
assert ds is not None, 'Failed to open datasource'
lyr = ds.GetLayer(0)
assert lyr.GetMetadataItem('DESCRIPTION') == 'my_description', \
'Did not get DESCRIPTION'
ds = None
ds = ogr.GetDriverByName('GeoJSON').CreateDataSource('/vsimem/ogr_geojson_59.json')
lyr = ds.CreateLayer('foo', options=['DESCRIPTION=my desc'])
ds = None
ds = ogr.Open('/vsimem/ogr_geojson_59.json')
lyr = ds.GetLayerByName('foo')
assert lyr.GetMetadataItem('DESCRIPTION') == 'my desc', 'Did not get DESCRIPTION'
ds = None
gdal.Unlink('/vsimem/ogr_geojson_59.json')
###############################################################################
# Test null vs unset field
def test_ogr_geojson_60():
if gdaltest.geojson_drv is None:
pytest.skip()
ds = gdal.OpenEx("""{ "type": "FeatureCollection", "features": [
{ "type": "Feature", "properties" : { "foo" : "bar" } },
{ "type": "Feature", "properties" : { "foo": null } },
{ "type": "Feature", "properties" : { } } ] }""")
lyr = ds.GetLayer(0)
f = lyr.GetNextFeature()
if f['foo'] != 'bar':
f.DumpReadable()
pytest.fail()
f = lyr.GetNextFeature()
if not f.IsFieldNull('foo'):
f.DumpReadable()
pytest.fail()
f = lyr.GetNextFeature()
if f.IsFieldSet('foo'):
f.DumpReadable()
pytest.fail()
# Test writing side
gdal.VectorTranslate('/vsimem/ogr_geojson_60.json', ds, format='GeoJSON')
fp = gdal.VSIFOpenL('/vsimem/ogr_geojson_60.json', 'rb')
data = gdal.VSIFReadL(1, 10000, fp).decode('ascii')
gdal.VSIFCloseL(fp)
gdal.Unlink('/vsimem/ogr_geojson_60.json')
assert ('"properties": { "foo": "bar" }' in data and \
'"properties": { "foo": null }' in data and \
'"properties": { }' in data)
###############################################################################
# Test corner cases
def test_ogr_geojson_61():
# Invalid JSon
gdal.FileFromMemBuffer('/vsimem/ogr_geojson_61.json',
"""{ "type": "FeatureCollection", "features": [""")
with gdaltest.error_handler():
ds = gdal.OpenEx('/vsimem/ogr_geojson_61.json')
assert ds is None
gdal.Unlink('/vsimem/ogr_geojson_61.json')
# Invalid single geometry
with gdaltest.error_handler():
ds = gdal.OpenEx("""{ "type": "Point", "x" : { "coordinates" : null } } """)
assert ds is None
# Empty property name
gdal.FileFromMemBuffer('/vsimem/ogr_geojson_61.json',
"""{ "type": "FeatureCollection", "features": [ { "type": "Feature", "properties": {"": 1}, "geometry": null }] }""")
ds = gdal.OpenEx('/vsimem/ogr_geojson_61.json')
lyr = ds.GetLayer(0)
f = lyr.GetNextFeature()
assert f.GetField("") == 1
ds = None
gdal.Unlink('/vsimem/ogr_geojson_61.json')
###############################################################################
# Test crs object
def test_ogr_geojson_62():
# crs type=name tests
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"name" }, "features":[] }""")
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"name", "properties":null }, "features":[] }""")
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"name", "properties":1 }, "features":[] }""")
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"name", "properties":{"name":null} }, "features":[] }""")
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"name", "properties":{"name":1} }, "features":[] }""")
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"name", "properties":{"name":"x"} }, "features":[] }""")
ds = gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"name", "properties":{"name": "urn:ogc:def:crs:EPSG::32631"} }, "features":[] }""")
lyr = ds.GetLayer(0)
srs = lyr.GetSpatialRef()
assert srs.ExportToWkt().find('32631') >= 0
# crs type=EPSG (not even documented in GJ2008 spec!) tests. Just for coverage completeness
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"EPSG" }, "features":[] }""")
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"EPSG", "properties":null }, "features":[] }""")
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"EPSG", "properties":1 }, "features":[] }""")
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"EPSG", "properties":{"code":null} }, "features":[] }""")
with gdaltest.error_handler():
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"EPSG", "properties":{"code":1} }, "features":[] }""")
with gdaltest.error_handler():
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"EPSG", "properties":{"code":"x"} }, "features":[] }""")
ds = gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"EPSG", "properties":{"code": 32631} }, "features":[] }""")
lyr = ds.GetLayer(0)
srs = lyr.GetSpatialRef()
assert srs.ExportToWkt().find('32631') >= 0
# crs type=link tests
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"link" }, "features":[] }""")
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"link", "properties":null }, "features":[] }""")
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"link", "properties":1 }, "features":[] }""")
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"link", "properties":{"href":null} }, "features":[] }""")
with gdaltest.error_handler():
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"link", "properties":{"href":1} }, "features":[] }""")
with gdaltest.error_handler():
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"link", "properties":{"href": "1"} }, "features":[] }""")
# crs type=OGC (not even documented in GJ2008 spec!) tests. Just for coverage completeness
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"OGC" }, "features":[] }""")
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"OGC", "properties":null }, "features":[] }""")
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"OGC", "properties":1 }, "features":[] }""")
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"OGC", "properties":{"urn":null} }, "features":[] }""")
with gdaltest.error_handler():
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"OGC", "properties":{"urn":1} }, "features":[] }""")
with gdaltest.error_handler():
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"OGC", "properties":{"urn":"x"} }, "features":[] }""")
ds = gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"OGC", "properties":{"urn": "urn:ogc:def:crs:EPSG::32631"} }, "features":[] }""")
lyr = ds.GetLayer(0)
srs = lyr.GetSpatialRef()
assert srs.ExportToWkt().find('32631') >= 0
###############################################################################
# Extensive test of field tye promotion
def test_ogr_geojson_63():
ds_ref = ogr.Open('data/test_type_promotion_ref.json')
lyr_ref = ds_ref.GetLayer(0)
ds = ogr.Open('data/test_type_promotion.json')
lyr = ds.GetLayer(0)
return ogrtest.compare_layers(lyr, lyr_ref)
###############################################################################
# Test exporting XYM / XYZM (#6935)
def test_ogr_geojson_64():
g = ogr.CreateGeometryFromWkt('POINT ZM(1 2 3 4)')
assert (ogrtest.check_feature_geometry(ogr.CreateGeometryFromJson(g.ExportToJson()),
ogr.CreateGeometryFromWkt('POINT Z(1 2 3)')) == 0)
g = ogr.CreateGeometryFromWkt('POINT M(1 2 3)')
assert (ogrtest.check_feature_geometry(ogr.CreateGeometryFromJson(g.ExportToJson()),
ogr.CreateGeometryFromWkt('POINT (1 2)')) == 0)
g = ogr.CreateGeometryFromWkt('LINESTRING ZM(1 2 3 4,5 6 7 8)')
assert (ogrtest.check_feature_geometry(ogr.CreateGeometryFromJson(g.ExportToJson()),
ogr.CreateGeometryFromWkt('LINESTRING Z(1 2 3,5 6 7)')) == 0)
g = ogr.CreateGeometryFromWkt('LINESTRING M(1 2 3,4 5 6)')
assert (ogrtest.check_feature_geometry(ogr.CreateGeometryFromJson(g.ExportToJson()),
ogr.CreateGeometryFromWkt('LINESTRING (1 2,4 5)')) == 0)
###############################################################################
# Test feature geometry CRS when CRS set on the FeatureCollection
# See https://github.com/r-spatial/sf/issues/449#issuecomment-319369945
def test_ogr_geojson_65():
ds = ogr.Open("""{
"type": "FeatureCollection",
"crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::32631" } },
"features": [{
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [500000,4500000]},
"properties": {
}}]}""")
lyr = ds.GetLayer(0)
f = lyr.GetNextFeature()
srs = f.GetGeometryRef().GetSpatialReference()
pcs = int(srs.GetAuthorityCode('PROJCS'))
assert pcs == 32631, 'Spatial reference for individual geometry was not valid'
###############################################################################
# Test features with properties not being a dictionary
def test_ogr_geojson_66():
ds = ogr.Open("""{
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"geometry": null,
"properties": null
},
{
"type": "Feature",
"geometry": null,
"properties": []
}
]}""")
lyr = ds.GetLayer(0)
assert lyr.GetLayerDefn().GetFieldCount() == 0
###############################################################################
# Test reading GeoJSON files starting with {"features":[{"geometry":.... (#7198)
def test_ogr_geojson_67():
ds = ogr.Open('data/grenada.geojson')
assert ds is not None
assert ds.GetDriver().GetName() == 'GeoJSON'
lyr = ds.GetLayer(0)
assert lyr.GetFeatureCount() == 1
###############################################################################
# Test reading ESRIJSON files starting with {"features":[{"geometry":.... (#7198)
def test_ogr_geojson_68():
ds = ogr.Open('data/esrijsonstartingwithfeaturesgeometry.json')
assert ds is not None
assert ds.GetDriver().GetName() == 'ESRIJSON'
lyr = ds.GetLayer(0)
assert lyr.GetFeatureCount() == 1
###############################################################################
def test_ogr_geojson_id_field_and_id_type():
gdal.VectorTranslate('/vsimem/out.json', 'data/poly.shp', options='-f GeoJSON -lco ID_TYPE=String -preserve_fid -limit 1 -fid 2')
got = read_file('/vsimem/out.json')
assert '"id": "2", "properties": { "AREA": 261752.781, "EAS_ID": 171, "PRFEDEA": "35043414" }' in got
gdal.VectorTranslate('/vsimem/out.json', 'data/poly.shp', options='-f GeoJSON -lco ID_TYPE=Integer -preserve_fid -limit 1 -fid 2')
got = read_file('/vsimem/out.json')
assert '"id": 2, "properties": { "AREA": 261752.781, "EAS_ID": 171, "PRFEDEA": "35043414" }' in got
gdal.VectorTranslate('/vsimem/out.json', 'data/poly.shp', format='GeoJSON', layerCreationOptions=['ID_FIELD=EAS_ID'], limit=1)
got = read_file('/vsimem/out.json')
assert '"id": 168, "properties": { "AREA": 215229.266, "PRFEDEA": "35043411" }' in got
src_ds = gdal.OpenEx('/vsimem/out.json', open_options=['NATIVE_DATA=YES'])
gdal.VectorTranslate('/vsimem/out2.json', src_ds, format='GeoJSON')
src_ds = None
got = read_file('/vsimem/out2.json')
gdal.Unlink('/vsimem/out2.json')
assert '"id": 168, "properties": { "AREA": 215229.266, "PRFEDEA": "35043411" }' in got
src_ds = gdal.OpenEx('/vsimem/out.json', open_options=['NATIVE_DATA=YES'])
gdal.VectorTranslate('/vsimem/out2.json', src_ds, format='GeoJSON', layerCreationOptions=['ID_TYPE=String'])
src_ds = None
got = read_file('/vsimem/out2.json')
gdal.Unlink('/vsimem/out2.json')
assert '"id": "168", "properties": { "AREA": 215229.266, "PRFEDEA": "35043411" }' in got
src_ds = gdal.OpenEx('/vsimem/out.json', open_options=['NATIVE_DATA=YES'])
gdal.VectorTranslate('/vsimem/out2.json', src_ds, format='GeoJSON', layerCreationOptions=['ID_TYPE=Integer'])
src_ds = None
got = read_file('/vsimem/out2.json')
gdal.Unlink('/vsimem/out2.json')
assert '"id": 168, "properties": { "AREA": 215229.266, "PRFEDEA": "35043411" }' in got
gdal.Unlink('/vsimem/out.json')
gdal.VectorTranslate('/vsimem/out.json', 'data/poly.shp', format='GeoJSON', layerCreationOptions=['ID_FIELD=EAS_ID', 'ID_TYPE=String'], limit=1)
got = read_file('/vsimem/out.json')
assert '"id": "168", "properties": { "AREA": 215229.266, "PRFEDEA": "35043411" }' in got
src_ds = gdal.OpenEx('/vsimem/out.json', open_options=['NATIVE_DATA=YES'])
gdal.VectorTranslate('/vsimem/out2.json', src_ds, format='GeoJSON')
src_ds = None
got = read_file('/vsimem/out2.json')
gdal.Unlink('/vsimem/out2.json')
assert '"id": "168", "properties": { "AREA": 215229.266, "PRFEDEA": "35043411" }' in got
src_ds = gdal.OpenEx('/vsimem/out.json', open_options=['NATIVE_DATA=YES'])
gdal.VectorTranslate('/vsimem/out2.json', src_ds, format='GeoJSON', layerCreationOptions=['ID_TYPE=String'])
src_ds = None
got = read_file('/vsimem/out2.json')
gdal.Unlink('/vsimem/out2.json')
assert '"id": "168", "properties": { "AREA": 215229.266, "PRFEDEA": "35043411" }' in got
src_ds = gdal.OpenEx('/vsimem/out.json', open_options=['NATIVE_DATA=YES'])
gdal.VectorTranslate('/vsimem/out2.json', src_ds, format='GeoJSON', layerCreationOptions=['ID_TYPE=Integer'])
src_ds = None
got = read_file('/vsimem/out2.json')
gdal.Unlink('/vsimem/out2.json')
assert '"id": 168, "properties": { "AREA": 215229.266, "PRFEDEA": "35043411" }' in got
gdal.Unlink('/vsimem/out.json')
gdal.VectorTranslate('/vsimem/out.json', 'data/poly.shp', format='GeoJSON', layerCreationOptions=['ID_FIELD=PRFEDEA'], limit=1)
got = read_file('/vsimem/out.json')
gdal.Unlink('/vsimem/out.json')
assert '"id": "35043411", "properties": { "AREA": 215229.266, "EAS_ID": 168 }' in got
gdal.VectorTranslate('/vsimem/out.json', 'data/poly.shp', format='GeoJSON', layerCreationOptions=['ID_FIELD=PRFEDEA', 'ID_TYPE=Integer'], limit=1)
got = read_file('/vsimem/out.json')
gdal.Unlink('/vsimem/out.json')
assert '"id": 35043411, "properties": { "AREA": 215229.266, "EAS_ID": 168 }' in got
###############################################################################
def test_ogr_geojson_geom_export_failure():
g = ogr.CreateGeometryFromWkt('POINT EMPTY')
geojson = g.ExportToJson()
assert geojson is None
g = ogr.CreateGeometryFromWkt('GEOMETRYCOLLECTION(TIN EMPTY)')
geojson = json.loads(g.ExportToJson())
assert geojson == {"type": "GeometryCollection", "geometries": None}
g = ogr.Geometry(ogr.wkbLineString)
g.AddPoint_2D(float('nan'), 0)
with gdaltest.error_handler():
geojson = g.ExportToJson()
assert geojson is None
g = ogr.Geometry(ogr.wkbPolygon)
lr = ogr.Geometry(ogr.wkbLinearRing)
lr.AddPoint_2D(0, 0)
lr.AddPoint_2D(0, 1)
lr.AddPoint_2D(1, 1)
lr.AddPoint_2D(0, 0)
g.AddGeometry(lr)
lr = ogr.Geometry(ogr.wkbLinearRing)
lr.AddPoint_2D(0, 0)
lr.AddPoint_2D(float('nan'), 1)
lr.AddPoint_2D(1, 1)
lr.AddPoint_2D(0, 0)
g.AddGeometry(lr)
with gdaltest.error_handler():
geojson = g.ExportToJson()
assert geojson is None
###############################################################################
def test_ogr_geojson_starting_with_crs():
ds = ogr.Open("""{
"crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::32631" } },
"type": "FeatureCollection",
"features": [{
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [500000,4500000]},
"properties": {
}}]}""")
assert ds is not None
###############################################################################
# Test we properly flush the file in SyncToDisk() in append situations
def test_ogr_geojson_append_flush():
tmpfilename = 'tmp/ogr_geojson_append_flush.json'
f = gdal.VSIFOpenL(tmpfilename, 'wb')
content = """{
"type": "FeatureCollection",
"features": [
{ "type": "Feature", "properties": { "x": 1, "y": 2, "z": 3, "w": 4 }, "geometry": { "type": "Point", "coordinates": [ 0, 0 ] } } ] }"""
gdal.VSIFWriteL(content, 1, len(content), f)
gdal.VSIFCloseL(f)
ds = ogr.Open(tmpfilename, update=1)
lyr = ds.GetLayer(0)
f = ogr.Feature(lyr.GetLayerDefn())
f['x'] = 10
lyr.CreateFeature(f)
lyr.SyncToDisk()
ds2 = ogr.Open(tmpfilename, update=1)
lyr = ds2.GetLayer(0)
lyr.GetNextFeature()
f = lyr.GetNextFeature()
assert f is not None and f['x'] == 10
ds = None
ds2 = None
gdal.Unlink(tmpfilename)
###############################################################################
def test_ogr_geojson_empty_geometrycollection():
g = ogr.CreateGeometryFromJson('{"type": "GeometryCollection", "geometries": []}')
assert g.ExportToWkt() == 'GEOMETRYCOLLECTION EMPTY'
###############################################################################
def test_ogr_esrijson_without_geometryType():
ds = ogr.Open("""{
"spatialReference" : {
"wkid" : 4326
},
"fields": [],
"fieldAliases": {},
"features" : [
{
"geometry" : {
"x" : 2,
"y" : 49
},
"attributes" : {
}
}
]
}
""")
lyr = ds.GetLayer(0)
f = lyr.GetNextFeature()
assert f.GetGeometryRef() is not None
###############################################################################
def test_ogr_geojson_read_fields_with_different_case():
ds = ogr.Open("""{
"type": "FeatureCollection",
"features": [
{ "type": "Feature", "id": "my_id", "geometry": null, "properties":
{ "ID": "MY_ID", "x": "foo", "X": "FOO"} }
]}""")
lyr = ds.GetLayer(0)
f = lyr.GetNextFeature()
if f.GetField(0) != 'my_id' or f.GetField(1) != 'MY_ID' or f.GetField(2) != 'foo' or f.GetField(3) != 'FOO':
f.DumpReadable()
pytest.fail()
###############################################################################
# Test bugfix for https://github.com/OSGeo/gdal/issues/1068
def test_ogr_geojson_clip_geometries_rfc7946():
if not ogrtest.have_geos():
pytest.skip()
tmpfilename = '/vsimem/out.json'
gdal.VectorTranslate(tmpfilename, """{
"type": "FeatureCollection",
"features": [
{ "type": "Feature", "geometry": {"type":"Polygon","coordinates":[[[-220,-20],[-220,30],[16,30],[16,-20],[-220,-20]]]} },
{ "type": "Feature", "geometry": {"type":"Polygon","coordinates":[[[220,40],[220,70],[-16,70],[-16,40],[220,40]]]} },
{ "type": "Feature", "geometry": {"type":"Polygon","coordinates":[[[170,-40],[170,-70],[-16,70],[-16,-40],[170,-40]]]} }
]
}""", options='-f GeoJSON -lco RFC7946=YES')
ds = ogr.Open(tmpfilename)
lyr = ds.GetLayer(0)
f = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('MULTIPOLYGON (((-180 30,-180 -20,16 -20,16 30,-180 30)),((140 -20,180 -20,180 30,140 30,140 -20)))')
if ogrtest.check_feature_geometry(f, ref_geom) != 0:
f.DumpReadable()
pytest.fail()
f = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('MULTIPOLYGON (((180 40,180 70,-16 70,-16 40,180 40)),((-180 70,-180 40,-140 40,-140 70,-180 70)))')
if ogrtest.check_feature_geometry(f, ref_geom) != 0:
f.DumpReadable()
pytest.fail()
f = lyr.GetNextFeature()
ref_geom = ogr.CreateGeometryFromWkt('POLYGON ((170 -40,-16 -40,-16 70,170 -70,170 -40))')
if ogrtest.check_feature_geometry(f, ref_geom) != 0:
f.DumpReadable()
pytest.fail()
ds = None
gdal.Unlink(tmpfilename)
###############################################################################
# Test bugfix for https://github.com/OSGeo/gdal/issues/1109
def test_ogr_geojson_non_finite():
json_content = """{
"type": "FeatureCollection",
"features": [
{ "type": "Feature", "properties": { "inf_prop": infinity, "minus_inf_prop": -infinity, "nan_prop": nan }, "geometry": null }
]
}"""
with gdaltest.error_handler():
ds = ogr.Open(json_content)
if ds is None:
# Might fail with older libjson-c versions
pytest.skip()
lyr = ds.GetLayer(0)
f = lyr.GetNextFeature()
for i in range(3):
assert lyr.GetLayerDefn().GetFieldDefn(i).GetType() == ogr.OFTReal
if f['inf_prop'] != float('inf'):
f.DumpReadable()
pytest.fail()
if f['minus_inf_prop'] != float('-inf'):
f.DumpReadable()
pytest.fail()
if not math.isnan(f['nan_prop']):
f.DumpReadable()
pytest.fail(str(f['nan_prop']))
ds = None
tmpfilename = '/vsimem/out.json'
with gdaltest.error_handler():
gdal.VectorTranslate(tmpfilename, json_content, options='-f GeoJSON')
ds = ogr.Open(tmpfilename)
lyr = ds.GetLayer(0)
assert lyr.GetLayerDefn().GetFieldCount() == 0
ds = None
gdal.VectorTranslate(tmpfilename, json_content, options='-f GeoJSON -lco WRITE_NON_FINITE_VALUES=YES')
ds = ogr.Open(tmpfilename)
lyr = ds.GetLayer(0)
assert lyr.GetLayerDefn().GetFieldCount() == 3
f = lyr.GetNextFeature()
if f['inf_prop'] != float('inf'):
f.DumpReadable()
pytest.fail()
if f['minus_inf_prop'] != float('-inf'):
f.DumpReadable()
pytest.fail()
if not math.isnan(f['nan_prop']):
f.DumpReadable()
pytest.fail(str(f['nan_prop']))
ds = None
gdal.Unlink(tmpfilename)
###############################################################################
def test_ogr_geojson_random_reading_with_id():
json_content = """{
"type": "FeatureCollection",
"features": [
{ "type": "Feature", "id": 1, "properties": { "a": "a" }, "geometry": null },
{ "type": "Feature", "id": 2, "properties": { "a": "bc" }, "geometry": null }
]
}"""
tmpfilename = '/vsimem/temp.json'
gdal.FileFromMemBuffer(tmpfilename, json_content)
ds = ogr.Open(tmpfilename)
lyr = ds.GetLayer(0)
f1_ref = lyr.GetNextFeature()
f2_ref = lyr.GetNextFeature()
f1 = lyr.GetFeature(1)
f2 = lyr.GetFeature(2)
assert f1.Equal(f1_ref)
assert f2.Equal(f2_ref)
assert not lyr.GetFeature(3)
ds = None
gdal.Unlink(tmpfilename)
###############################################################################
def test_ogr_geojson_random_reading_without_id():
json_content = """{
"type": "FeatureCollection",
"features": [
{ "type": "Feature", "properties": { "a": "a" }, "geometry": null },
{ "type": "Feature", "properties": { "a": "bc" }, "geometry": null }
]
}"""
tmpfilename = '/vsimem/temp.json'
gdal.FileFromMemBuffer(tmpfilename, json_content)
ds = ogr.Open(tmpfilename)
lyr = ds.GetLayer(0)
f1_ref = lyr.GetNextFeature()
f2_ref = lyr.GetNextFeature()
f1 = lyr.GetFeature(0)
f2 = lyr.GetFeature(1)
assert f1.Equal(f1_ref)
assert f2.Equal(f2_ref)
assert not lyr.GetFeature(2)
ds = None
gdal.Unlink(tmpfilename)
###############################################################################
def test_ogr_geojson_single_feature_random_reading_with_id():
json_content = """
{ "type": "Feature", "id": 1, "properties": { "a": "a" }, "geometry": null }
}"""
tmpfilename = '/vsimem/temp.json'
gdal.FileFromMemBuffer(tmpfilename, json_content)
ds = ogr.Open(tmpfilename)
lyr = ds.GetLayer(0)
f1_ref = lyr.GetNextFeature()
f1 = lyr.GetFeature(1)
assert f1.Equal(f1_ref)
ds = None
gdal.Unlink(tmpfilename)
###############################################################################
def test_ogr_geojson_cleanup():
gdal.SetConfigOption('CPL_CURL_ENABLE_VSIMEM', None)
try:
if gdaltest.tests is not None:
gdal.PushErrorHandler('CPLQuietErrorHandler')
for i in range(len(gdaltest.tests)):
fname = os.path.join('tmp', gdaltest.tests[i][0] + '.geojson')
ogr.GetDriverByName('GeoJSON').DeleteDataSource(fname)
fname = os.path.join('tmp', gdaltest.tests[i][0] + '.geojson.gz')
gdal.Unlink(fname)
fname = os.path.join('tmp', gdaltest.tests[i][0] + '.geojson.gz.properties')
gdal.Unlink(fname)
gdal.PopErrorHandler()
gdaltest.tests = None
except:
pass
try:
os.remove('tmp/out_ogr_geojson_14.geojson')
except OSError:
pass
for f in gdal.ReadDir('/vsimem/geojson'):
gdal.Unlink('/vsimem/geojson/' + f)
| 35.653473
| 450
| 0.585129
|
b6b7eb9186d818cb0c80bd98cdfd75ed91c96f42
| 402
|
py
|
Python
|
leetcode/june-challenge/Day25-Find_the_Duplicate_Number.py
|
youngchaena/Problem-Solving
|
93995f96954e7e2fba3ba383c7bd43912e19e07d
|
[
"MIT"
] | null | null | null |
leetcode/june-challenge/Day25-Find_the_Duplicate_Number.py
|
youngchaena/Problem-Solving
|
93995f96954e7e2fba3ba383c7bd43912e19e07d
|
[
"MIT"
] | null | null | null |
leetcode/june-challenge/Day25-Find_the_Duplicate_Number.py
|
youngchaena/Problem-Solving
|
93995f96954e7e2fba3ba383c7bd43912e19e07d
|
[
"MIT"
] | null | null | null |
# Day 25 - Find the Duplicate Number
class Solution:
def findDuplicate(self, nums: List[int]) -> int:
slow = fast = finder = 0
while True:
slow = nums[slow]
fast = nums[nums[fast]]
if slow == fast:
while finder != slow:
finder = nums[finder]
slow = nums[slow]
return finder
| 30.923077
| 52
| 0.475124
|
de7973fa45dd18150b317f1ebdea474989ffb553
| 6,077
|
py
|
Python
|
pysnmp/SYSLOG-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/SYSLOG-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/SYSLOG-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module SYSLOG-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/SYSLOG-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:06:54 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueSizeConstraint, ConstraintsIntersection, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueSizeConstraint", "ConstraintsIntersection", "SingleValueConstraint", "ValueRangeConstraint")
dlink_common_mgmt, = mibBuilder.importSymbols("DLINK-ID-REC-MIB", "dlink-common-mgmt")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Counter64, Gauge32, Integer32, ObjectIdentity, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, IpAddress, Bits, Counter32, iso, TimeTicks, NotificationType, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "Gauge32", "Integer32", "ObjectIdentity", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "IpAddress", "Bits", "Counter32", "iso", "TimeTicks", "NotificationType", "Unsigned32")
TextualConvention, RowStatus, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "RowStatus", "DisplayString")
swSysLogMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 171, 12, 12))
if mibBuilder.loadTexts: swSysLogMIB.setLastUpdated('0007150000Z')
if mibBuilder.loadTexts: swSysLogMIB.setOrganization(' ')
swSysLogCtrlState = MibScalar((1, 3, 6, 1, 4, 1, 171, 12, 12, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swSysLogCtrlState.setStatus('current')
swSysLogServerTable = MibTable((1, 3, 6, 1, 4, 1, 171, 12, 12, 2), )
if mibBuilder.loadTexts: swSysLogServerTable.setStatus('current')
swSysLogServerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 12, 12, 2, 1), ).setIndexNames((0, "SYSLOG-MIB", "swSysLogServerIndex"))
if mibBuilder.loadTexts: swSysLogServerEntry.setStatus('current')
swSysLogServerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 12, 12, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swSysLogServerIndex.setStatus('current')
swSysLogServerIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 12, 12, 2, 1, 2), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swSysLogServerIPAddress.setStatus('current')
swSysLogServerFacility = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 12, 12, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("local0", 0), ("local1", 1), ("local2", 2), ("local3", 3), ("local4", 4), ("local5", 5), ("local6", 6), ("local7", 7)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swSysLogServerFacility.setStatus('current')
swSysLogServerSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 12, 12, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("all", 1), ("warning", 2), ("informational", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swSysLogServerSeverity.setStatus('current')
swSysLogServerUDPPort = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 12, 12, 2, 1, 5), Integer32().clone(514)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swSysLogServerUDPPort.setStatus('current')
swSysLogServerState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 12, 12, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swSysLogServerState.setStatus('current')
swSysLogServerRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 12, 12, 2, 1, 7), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swSysLogServerRowStatus.setStatus('current')
swLogSaveCtrl = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 12, 12, 3))
swLogSaveMethod = MibScalar((1, 3, 6, 1, 4, 1, 171, 12, 12, 3, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("time-interval", 1), ("on-demand", 2), ("log-trigger", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swLogSaveMethod.setStatus('current')
swLogSaveTimeInterval = MibScalar((1, 3, 6, 1, 4, 1, 171, 12, 12, 3, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swLogSaveTimeInterval.setStatus('current')
swSysLogCtrl = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 12, 12, 4))
swSysLogCtrlClearLog = MibScalar((1, 3, 6, 1, 4, 1, 171, 12, 12, 4, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("other", 1), ("start", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swSysLogCtrlClearLog.setStatus('current')
mibBuilder.exportSymbols("SYSLOG-MIB", swSysLogServerIndex=swSysLogServerIndex, swSysLogCtrl=swSysLogCtrl, swSysLogServerRowStatus=swSysLogServerRowStatus, swSysLogMIB=swSysLogMIB, swSysLogServerIPAddress=swSysLogServerIPAddress, swLogSaveTimeInterval=swLogSaveTimeInterval, PYSNMP_MODULE_ID=swSysLogMIB, swSysLogCtrlState=swSysLogCtrlState, swSysLogServerSeverity=swSysLogServerSeverity, swSysLogServerEntry=swSysLogServerEntry, swSysLogServerState=swSysLogServerState, swLogSaveMethod=swLogSaveMethod, swLogSaveCtrl=swLogSaveCtrl, swSysLogServerFacility=swSysLogServerFacility, swSysLogCtrlClearLog=swSysLogCtrlClearLog, swSysLogServerUDPPort=swSysLogServerUDPPort, swSysLogServerTable=swSysLogServerTable)
| 129.297872
| 708
| 0.766332
|
451bc4291b23607b5954e5508057021389d01f38
| 10,512
|
py
|
Python
|
tests/csa_json_to_stix/test_class.py
|
krishna2345/stix-shifter
|
f1315cbfb25216db6e3348b0ef1220d920d29f2e
|
[
"Apache-2.0"
] | null | null | null |
tests/csa_json_to_stix/test_class.py
|
krishna2345/stix-shifter
|
f1315cbfb25216db6e3348b0ef1220d920d29f2e
|
[
"Apache-2.0"
] | null | null | null |
tests/csa_json_to_stix/test_class.py
|
krishna2345/stix-shifter
|
f1315cbfb25216db6e3348b0ef1220d920d29f2e
|
[
"Apache-2.0"
] | 2
|
2019-06-26T19:23:52.000Z
|
2019-07-09T15:33:16.000Z
|
from stix_shifter.stix_translation.src.json_to_stix import json_to_stix_translator
from stix_shifter.stix_translation.src.utils import transformers
from stix_shifter.stix_translation.src.modules.csa import csa_translator
import json
import unittest
from os import path
interface = csa_translator.Translator()
map_file = open(interface.mapping_filepath).read()
map_data = json.loads(map_file)
data_source = {
"type": "identity",
"id": "identity--3532c56d-ea72-48be-a2ad-1a53f4c9c6d3",
"name": "QRadar",
"identity_class": "events"
}
options = {}
class TestTransform(object):
@staticmethod
def get_first(itr, constraint):
return next(
(obj for obj in itr if constraint(obj)),
None
)
@staticmethod
def get_first_of_type(itr, typ):
return TestTransform.get_first(itr, lambda o: type(o) == dict and o.get('type') == typ)
def test_common_prop(self):
# transformer = None
data = {"starttime": 1531169112, "eventcount": 5}
result_bundle = json_to_stix_translator.convert_to_stix(
data_source, map_data, [data], transformers.get_all_transformers(), options)
assert(result_bundle['type'] == 'bundle')
result_bundle_objects = result_bundle['objects']
result_bundle_identity = result_bundle_objects[0]
assert(result_bundle_identity['type'] == data_source['type'])
assert(result_bundle_identity['id'] == data_source['id'])
assert(result_bundle_identity['name'] == data_source['name'])
assert(result_bundle_identity['identity_class']
== data_source['identity_class'])
observed_data = result_bundle_objects[1]
assert(observed_data['id'] is not None)
assert(observed_data['type'] == "observed-data")
assert(observed_data['created_by_ref'] == result_bundle_identity['id'])
assert(observed_data['number_observed'] == 5)
assert(observed_data['created'] is not None)
assert(observed_data['modified'] is not None)
assert(observed_data['first_observed'] is not None)
assert(observed_data['last_observed'] is not None)
def test_cybox_observables(self):
# transformer = None
payload = "SomeBase64Payload"
user_id = "someuserid2018"
url = "https://example.com"
domain = "example.com"
source_ip = "127.0.0.1"
destination_ip = "255.255.255.1"
data = {"sourceip": source_ip, "destinationip": destination_ip, "url": url,
"domain": domain, "payload": payload, "username": user_id, "protocol": 'TCP', "sourceport": 3000, "destinationport": 2000}
# data = {"Network": {"A" : source_ip}, "destinationip": destination_ip, "url": url,
# "domain": domain, "payload": payload, "username": user_id, "protocol": 'TCP', "sourceport": 3000, "destinationport": 2000}
result_bundle = json_to_stix_translator.convert_to_stix(
data_source, map_data, [data], transformers.get_all_transformers(), options)
assert(result_bundle['type'] == 'bundle')
result_bundle_objects = result_bundle['objects']
observed_data = result_bundle_objects[1]
assert('objects' in observed_data)
objects = observed_data['objects']
# Test that each data element is properly mapped and input into the STIX JSON
for key, value in objects.items():
assert(int(key) in list(range(0, len(objects))))
# Todo: handle case where there is both a source and destination ip, there will be more than one ipv4-addr
if(value['type'] == 'ipv4-addr'):
# assert(
# value['value'] == source_ip), "Wrong value returned " + key + ":" + str(value)
assert(True)
elif(value['type'] == 'url'):
assert(value['value'] == url), "Wrong value returned " + \
key + ":" + str(value)
elif(value['type'] == 'domain-name'):
assert(
value['value'] == domain), "Wrong value returned " + key + ":" + str(value)
elif(value['type'] == 'artifact'):
assert(
value['payload_bin'] == payload), "Wrong value returned " + key + ":" + str(value)
elif(value['type'] == 'user-account'):
assert(
value['user_id'] == user_id), "Wrong value returned " + key + ":" + str(value)
# Todo: should not be returned since the address passed in isn't ipv6, still needs to be fixed in logic
elif(value['type'] == 'ipv6-addr'):
# assert(
# value['value'] == source_ip), "Wrong value returned " + key + ":" + str(value)
assert(True)
elif(value['type'] == 'network-traffic'):
assert(int(value['src_ref']) in list(
range(0, len(objects)))), "Wrong value returned " + key + ":" + str(value)
assert(type(value['src_ref'])
is str), "Reference value should be a string"
assert(int(value['dst_ref']) in list(
range(0, len(objects)))), "Wrong value returned " + key + ":" + str(value)
assert(type(value['dst_ref'])
is str), "Reference value should be a string"
assert(value['protocols'] == ['tcp'])
assert(value['src_port'] == 3000)
assert(value['dst_port'] == 2000)
else:
assert(False), "Returned a non-mapped value " + \
key + ":" + str(value)
def test_custom_props(self):
transformer = None
data = {"logsourceid": 126, "qid": 55500004,
"identityip": "0.0.0.0", "magnitude": 4, "logsourcename": "someLogSourceName"}
result_bundle = json_to_stix_translator.convert_to_stix(
data_source, map_data, [data], transformers.get_all_transformers(), options)
observed_data = result_bundle['objects'][1]
assert('x_com_ibm_ariel' in observed_data)
custom_props = observed_data['x_com_ibm_ariel']
assert(custom_props['identity_ip'] == data['identityip'])
assert(custom_props['log_source_id'] == data['logsourceid'])
assert(custom_props['qid'] == data['qid'])
assert(custom_props['magnitude'] == data['magnitude'])
assert(custom_props['log_source_name'] == data['logsourcename'])
def test_at_props(self):
transformer = None
actio = "actioniam-identity.serviceid-apikey.login"
isoti = "2018-07-16T15:00+0000"
alcht = "alchtenida:560ea90982962784957b94135af14810"
tarid = "taridcrn:v1:bluemix:public:iam-identity::a/560ea90982962784957b94135af14810::apikey:ApiKey-b1ebb918c"
tarna = "auto-generated-apikey-4df4db90-06ef-4efa-9d3f-bc7ccf247326"
event = "d9b5a3a9-8fb8-4d6d-90cd-0a50f4842c6a"
messa = "message is a very long string"
activ = "activity"
sampl = "sample-response"
alcha = "alchaccid560ea90982962784957b"
initi = "iam-ServiceId-f48385a1"
initn = "ServiceId-f48385a1"
initc = "apikey"
data = {
# "initiator_name": initn,
# "initiator_id": initi,
"ALCH_ACCOUNT_ID": alcha,
"responseData": sampl,
"eventType": activ,
"message": messa,
"type": "ActivityTracker",
"event_uuid": event,
"tags": [],
# "target_name": tarna,
# "target_id": tarid,
"ALCH_TENANT_ID": alcht,
"logmet_cluster": "topic3-elasticsearch_3",
"@timestamp": "2018-07-16T15:00:03.062Z",
"typeURI": "http://schemas.dmtf.org/cloud/audit/1.0/event",
"@version": "1",
"eventTime": isoti,
"action": actio,
"requestData": "requestdata",
"outcome": "success"
}
result_bundle = json_to_stix_translator.convert_to_stix(
data_source, map_data, [data], transformers.get_all_transformers(), options)
print (result_bundle)
observed_data = result_bundle['objects'][1]
assert(result_bundle['type'] == 'bundle')
result_bundle_objects = result_bundle['objects']
observed_data = result_bundle_objects[1]
assert('objects' in observed_data)
objects = observed_data['objects']
# Test that each data element is properly mapped and input into the STIX JSON
for key, value in objects.items():
assert(int(key) in list(range(0, len(objects))))
if(value['type'] == 'initiator_credential_type'):
assert(value['value'] == initc), "Wrong value returned " + key + ":" + str(value)
assert(True)
elif(value['type'] == 'initiator_name'):
assert(value['value'] == initn), "Wrong value returned " + \
key + ":" + str(value)
elif(value['type'] == 'initiator_id'):
assert(
value['value'] == initi), "Wrong value returned " + key + ":" + str(value)
elif(value['type'] == 'ALCH_ACCOUNT_ID'):
assert(
value['value'] == alcha), "Wrong value returned " + key + ":" + str(value)
elif(value['type'] == 'message'):
assert(
value['value'] == messa), "Wrong value returned " + key + ":" + str(value)
elif(value['type'] == 'event_uuid'):
assert(
value['value'] == event), "Wrong value returned " + key + ":" + str(value)
elif(value['type'] == 'ALCH_TENANT_ID'):
assert(
value['value'] == alcht), "Wrong value returned " + key + ":" + str(value)
elif(value['type'] == 'action'):
assert(
value['value'] == actio), "Wrong value returned " + key + ":" + str(value)
elif(value['type'] == 'eventTime'):
assert(
value['value'] == isoti), "Wrong value returned " + key + ":" + str(value)
else:
assert(False), "Returned a non-mapped value " + \
key + ":" + str(value)
| 46.513274
| 139
| 0.564688
|
a4d48f5a18b76e4b0585fb1165ca4a0d43a9bba2
| 60,921
|
py
|
Python
|
fortnitepy/http.py
|
Luc1412/fortnitepy
|
08825ede2226f02f4047cfed63d5f1e153a5bb62
|
[
"MIT"
] | 127
|
2019-07-15T15:55:30.000Z
|
2022-03-22T07:39:29.000Z
|
fortnitepy/http.py
|
Luc1412/fortnitepy
|
08825ede2226f02f4047cfed63d5f1e153a5bb62
|
[
"MIT"
] | 65
|
2019-07-15T22:48:35.000Z
|
2022-01-30T05:18:36.000Z
|
fortnitepy/http.py
|
Luc1412/fortnitepy
|
08825ede2226f02f4047cfed63d5f1e153a5bb62
|
[
"MIT"
] | 83
|
2019-07-18T12:37:58.000Z
|
2022-03-19T20:56:47.000Z
|
# -*- coding: utf-8 -*-
"""
MIT License
Copyright (c) 2019-2021 Terbau
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import aiohttp
import asyncio
import logging
import json
import re
import time
import functools
from typing import TYPE_CHECKING, List, Optional, Any, Union, Tuple
from urllib.parse import quote as urllibquote
from .utils import MaybeLock
from .errors import HTTPException
if TYPE_CHECKING:
from .client import Client
log = logging.getLogger(__name__)
GRAPHQL_HTML_ERROR_PATTERN = re.compile(
r'<title>((\d+).*)<\/title>',
re.MULTILINE
)
def quote(string: str) -> str:
string = urllibquote(string)
string = string.replace('/', '%2F')
return string
class HTTPRetryConfig:
"""Config for how HTTPClient should handle retries.
.. warning::
Messing with these values could potentially make retries spammy.
Worst case scenario of this would be that either your ip or
your account could be limited due to high traffic. Change these
values with caution!
Parameters
----------
max_retry_attempts: :class:`int`
The max amount of retry attempts for a request. Defaults to ``5``.
.. note::
This is ignored when handling capacity throttling.
max_wait_time: Optional[:class:`float`]
The max amount of seconds to wait for a request before
raising the original exception. This works by keeping track of the
total seconds that has been waited for the request regardless of
number of attempts. If ``None`` this is ignored. Defaults to ``65``.
handle_rate_limits: :class:`bool`
Whether or not the client should handle rate limit errors and wait
the received ``Retry-After`` before automatically retrying the request.
Defaults to ``True``.
.. note::
This option is only for throttling errors with a Retry-After value.
max_retry_after: :class:`float`
The max amount of seconds the client should handle. If a throttled
error with a higher Retry-After than this value is received, then
the original :exc:`HTTPException` is raised instead. *Only matters
when ``handle_rate_limits`` is ``True``*
other_requests_wait: :class:`bool`
Whether other requests to a rate limited endpoint should wait
for the rate limit to disappear before requesting. Defaults to
``True``. *Only matters when ``handle_rate_limits`` is ``True``*
handle_capacity_throttling: :class:`bool`
Whether or not the client should automatically handle capacity
throttling errors. These occur when the prod server you
are requesting from has no available capacity to process a
request and therefore returns with a throttle error
without a Retry-After. Defaults to ``True``.
backoff_start: :class:`float`
The initial seconds to wait for the exponential backoff. Defaults
to ``1``. *Only matters when ``handle_capacity_throttling`` is
``True``*
backoff_factor: :class:`float`
The multiplying factor used for the exponential backoff when a
request fails. Defaults to ``1.5``. *Only matters when
``handle_capacity_throttling`` is ``True``*
backoff_cap: :class:`float`
The cap for the exponential backoff to avoid having
unrealistically high wait times. Defaults to ``20``. *Only matters
when ``handle_capacity_throttling`` is ``True``*
"""
def __init__(self, **kwargs):
self.max_retry_attempts = kwargs.get('max_retry_attempts', 5)
self.max_wait_time = kwargs.get('max_wait_time', 65)
self.handle_rate_limits = kwargs.get('handle_rate_limits', True)
self.max_retry_after = kwargs.get('max_retry_after', 60)
self.other_requests_wait = kwargs.get('other_requests_wait', True)
self.handle_capacity_throttling = kwargs.get('handle_capacity_throttling', True) # noqa
self.backoff_start = kwargs.get('backoff_start', 1)
self.backoff_factor = kwargs.get('backoff_factor', 1.5)
self.backoff_cap = kwargs.get('backoff_cap', 20)
class GraphQLRequest:
def __init__(self, query: str, *,
operation_name: str = None,
variables: dict = None
) -> None:
self.query = query
self.operation_name = operation_name
self.variables = variables
def _to_camel_case(self, text: str) -> str:
components = text.split('_')
return components[0] + ''.join(x.title() for x in components[1:])
def __iter__(self) -> str:
for key, value in self.__dict__.items():
if value is None:
continue
yield (self._to_camel_case(key), value)
def as_dict(self) -> dict:
return dict(self)
def as_multiple_payload(self) -> dict:
return {
'operationName': (self.operation_name
or self.get_operation_name_by_query()),
'variables': self.variables,
'query': self.query
}
def get_operation_name_by_query(self) -> str:
return re.search(r'(?:mutation|query) (\w+)', self.query).group(1)
class Route:
"""Represents a route to use for a http request. This should
be subclassed by new routes and the class attributes ``BASE`` and
optionally ``AUTH`` should be overridden.
.. warning::
Usually there is no reason to subclass and implement routes
yourself as most of them are already implemented. Take a look
at http.py if you're interested in knowing all of the predefined
routes.
Available authentication placeholders:
- `IOS_BASIC_TOKEN`
- `FORTNITE_BASIC_TOKEN`
- `IOS_ACCESS_TOKEN`
- `FORTNITE_ACCESS_TOKEN`
Example usage: ::
class SocialBanPublicService(fortnitepy.Route):
BASE = 'https://social-ban-public-service-prod.ol.epicgames.com'
AUTH = 'FORTNITE_ACCESS_TOKEN'
route = SocialBanPublicService(
'/socialban/api/public/v1/{user_id}',
user_id='c7af4984a77a498b83d8b16d475d76bc'
)
resp = await client.http.get(route)
# resp would look something like this:
# {
# "bans" : [],
# "warnings" : []
# }
Parameters
----------
path: :class:`path`
The path to used for the request.
.. warning::
You should always use name
formatting for arguments and instead of using `.format()`on the
path, you should pass the format kwargs as kwargs to the route.
This might seem counterintuitive but it is important to ensure
rate limit retry reliability.
auth: Optional[:class:`str`]
The authentication to use for the request. If ``None`` the default
auth specified for the route is used.
**params: Any
The variables to format the path with passed alongside their name.
Attributes
----------
path: :class:`str`
The requests path.
params: Dict[:class:`str`, Any]
A mapping of the params passed.
base: :class:`str`
The base of the request url.
auth: Optional[:class:`str`]
The auth placeholder.
url: :class:`str`
The formatted url.
sanitized_url: :class:`str`
The yet to be formatted url.
"""
BASE = ''
AUTH = None
def __init__(self, path: str = '', *,
auth: str = None,
**params: Any) -> None:
self.path = path
self.params = {k: (quote(v) if isinstance(v, str) else v)
for k, v in params.items()}
if self.BASE == '':
raise ValueError('Route must have a base')
self.sanitized_url = url = self.BASE + self.path
self.url = url.format(**self.params) if self.params else url
if auth:
self.AUTH = auth
self.base = self.BASE
self.auth = self.AUTH
class EpicGamesGraphQL(Route):
BASE = 'https://graphql.epicgames.com/graphql'
AUTH = 'FORTNITE_ACCESS_TOKEN'
class EpicGames(Route):
BASE = 'https://www.epicgames.com'
AUTH = None
class PaymentWebsite(Route):
BASE = 'https://payment-website-pci.ol.epicgames.com'
AUTH = None
class LightswitchPublicService(Route):
BASE = 'https://lightswitch-public-service-prod06.ol.epicgames.com'
AUTH = 'IOS_ACCESS_TOKEN'
class UserSearchService(Route):
BASE = 'https://user-search-service-prod.ol.epicgames.com'
AUTH = 'FORTNITE_ACCESS_TOKEN'
class AccountPublicService(Route):
BASE = 'https://account-public-service-prod.ol.epicgames.com'
AUTH = 'FORTNITE_ACCESS_TOKEN'
class EulatrackingPublicService(Route):
BASE = 'https://eulatracking-public-service-prod-m.ol.epicgames.com'
AUTH = 'FORTNITE_ACCESS_TOKEN'
class AffiliatePublicService(Route):
BASE = 'https://affiliate-public-service-prod.ol.epicgames.com'
AUTH = 'FORTNITE_ACCESS_TOKEN'
class EventsPublicService(Route):
BASE = 'https://events-public-service-live.ol.epicgames.com'
AUTH = 'FORTNITE_ACCESS_TOKEN'
class FortniteContentWebsite(Route):
BASE = 'https://fortnitecontent-website-prod07.ol.epicgames.com'
AUTH = 'FORTNITE_ACCESS_TOKEN'
class FortnitePublicService(Route):
BASE = 'https://fortnite-public-service-prod11.ol.epicgames.com'
AUTH = 'FORTNITE_ACCESS_TOKEN'
class FriendsPublicService(Route):
BASE = 'https://friends-public-service-prod.ol.epicgames.com'
AUTH = 'FORTNITE_ACCESS_TOKEN'
class PartyService(Route):
BASE = 'https://party-service-prod.ol.epicgames.com'
AUTH = 'FORTNITE_ACCESS_TOKEN'
class PresencePublicService(Route):
BASE = 'https://presence-public-service-prod.ol.epicgames.com'
AUTH = 'FORTNITE_ACCESS_TOKEN'
class StatsproxyPublicService(Route):
BASE = 'https://statsproxy-public-service-live.ol.epicgames.com'
AUTH = 'FORTNITE_ACCESS_TOKEN'
def create_aiohttp_closed_event(session) -> asyncio.Event:
"""Work around aiohttp issue that doesn't properly close transports on exit.
See https://github.com/aio-libs/aiohttp/issues/1925#issuecomment-639080209
Returns:
An event that will be set once all transports have been properly closed.
"""
transports = 0
all_is_lost = asyncio.Event()
def connection_lost(exc, orig_lost):
nonlocal transports
try:
orig_lost(exc)
finally:
transports -= 1
if transports == 0:
all_is_lost.set()
def eof_received(orig_eof_received):
try:
orig_eof_received()
except AttributeError:
# It may happen that eof_received() is called after
# _app_protocol and _transport are set to None.
pass
for conn in session.connector._conns.values():
for handler, _ in conn:
proto = getattr(handler.transport, "_ssl_protocol", None)
if proto is None:
continue
transports += 1
orig_lost = proto.connection_lost
orig_eof_received = proto.eof_received
proto.connection_lost = functools.partial(
connection_lost, orig_lost=orig_lost
)
proto.eof_received = functools.partial(
eof_received, orig_eof_received=orig_eof_received
)
if transports == 0:
all_is_lost.set()
return all_is_lost
class HTTPClient:
def __init__(self, client: 'Client', *,
connector: aiohttp.BaseConnector = None,
retry_config: Optional[HTTPRetryConfig] = None) -> None:
self.client = client
self.connector = connector
self.retry_config = retry_config or HTTPRetryConfig()
self._jar = aiohttp.CookieJar()
self.headers = {}
self.device_id = self.client.auth.device_id
self._endpoint_events = {}
# How many refreshes (max_refresh_attempts) to attempt in
# a time window (refresh_attempt_window) before closing.
self.max_refresh_attempts = 3
self.refresh_attempt_window = 20
self.create_connection()
@staticmethod
async def json_or_text(response: aiohttp.ClientResponse) -> Union[str,
dict]:
text = await response.text(encoding='utf-8')
if 'application/json' in response.headers.get('content-type', ''):
return json.loads(text)
return text
@property
def user_agent(self) -> str:
return 'Fortnite/{0.client.build} {0.client.os}'.format(self)
def get_auth(self, auth: str) -> str:
u_auth = auth.upper()
if u_auth == 'IOS_BASIC_TOKEN':
return 'basic {0}'.format(self.client.auth.ios_token)
elif u_auth == 'FORTNITE_BASIC_TOKEN':
return 'basic {0}'.format(self.client.auth.fortnite_token)
elif u_auth == 'IOS_ACCESS_TOKEN':
return self.client.auth.ios_authorization
elif u_auth == 'FORTNITE_ACCESS_TOKEN':
return self.client.auth.authorization
return auth
def add_header(self, key: str, val: Any) -> None:
self.headers[key] = val
def remove_header(self, key: str) -> Any:
return self.headers.pop(key)
async def close(self) -> None:
self._jar.clear()
if self.__session:
event = create_aiohttp_closed_event(self.__session)
await self.__session.close()
try:
await asyncio.wait_for(event.wait(), timeout=2)
except asyncio.TimeoutError:
pass
def create_connection(self) -> None:
self.__session = aiohttp.ClientSession(
connector=self.connector,
connector_owner=self.connector is None,
cookie_jar=self._jar
)
async def request(self, method: str, url: str,
**kwargs: Any
) -> Tuple[aiohttp.ClientResponse, Union[str, dict]]:
try:
params = kwargs['params']
if isinstance(params, dict):
kwargs['params'] = {k: (str(v).lower() if isinstance(v, bool)
else v) for k, v in params.items()}
else:
kwargs['params'] = [(k, (str(v).lower() if isinstance(v, bool)
else v)) for k, v in params]
except KeyError:
pass
pre_time = time.time()
async with self.__session.request(method, url, **kwargs) as r:
log.debug('{0} {1} has returned {2.status} in {3:.2f}s'.format(
method,
url,
r,
time.time() - pre_time
))
data = await self.json_or_text(r)
return r, data
async def _fn_request(self, method: str,
route: Union[Route, str],
auth: Optional[str] = None,
graphql: Optional[Union[Route, List[Route]]] = None,
**kwargs: Any) -> Any:
url = route.url if not isinstance(route, str) else route
headers = {**kwargs.get('headers', {}), **self.headers}
headers['User-Agent'] = self.user_agent
auth = auth or route.AUTH
if auth is not None:
headers['Authorization'] = self.get_auth(auth)
device_id = kwargs.pop('device_id', None)
if device_id is not None:
headers['X-Epic-Device-ID'] = (self.device_id if device_id is True
else device_id)
if graphql is not None:
is_multiple = isinstance(graphql, (list, tuple))
if not is_multiple:
graphql = (graphql,)
kwargs['json'] = [gql_query.as_multiple_payload()
for gql_query in graphql]
kwargs['headers'] = headers
try:
del kwargs['config']
except KeyError:
pass
raw = kwargs.pop('raw', False)
r, data = await self.request(method, url, **kwargs)
if raw:
return r
if graphql is not None:
if isinstance(data, str):
m = GRAPHQL_HTML_ERROR_PATTERN.search(data)
error_data = ({
'serviceResponse': '',
'message': 'Unknown reason' if m is None else m.group(1)
},)
if m is not None:
error_data[0]['serviceResponse'] = json.dumps({
'errorStatus': int(m.group(2))
})
elif isinstance(data, dict):
if data['status'] >= 400:
message = data['message']
error_data = ({
'serviceResponse': json.dumps({
'errorCode': message
}),
'message': message
},)
else:
error_data = None
for child_data in data:
if 'errors' in child_data:
error_data = child_data['errors']
break
if error_data is not None:
selected = error_data[0]
obj = {'errorMessage': selected['message']}
service_response = selected['serviceResponse']
if service_response == '':
error_payload = {}
else:
error_payload = json.loads(service_response)
if isinstance(error_payload, str):
m = GRAPHQL_HTML_ERROR_PATTERN.search(error_payload)
message = 'Unknown reason' if m is None else m.group(1)
error_payload = {
'errorMessage': message,
}
if m is not None:
error_payload['errorStatus'] = int(m.group(2))
raise HTTPException(
r,
route,
{**obj, **error_payload},
headers
)
def get_payload(d):
return next(iter(d['data'].values()))
if len(data) == 1:
return get_payload(data[0])
return [get_payload(d) for d in data]
if 'errorCode' in data or r.status >= 400:
if isinstance(data, str):
data = {
'errorMessage': data if data else 'Unknown {}'.format(
r.status
)
}
raise HTTPException(r, route, data, headers)
return data
def get_retry_after(self, exc):
retry_after = exc.response.headers.get('Retry-After')
if retry_after is not None:
return int(retry_after)
# For some reason graphql response headers doesn't contain
# rate limit headers so we have to get retry after from
# the message vars.
try:
return int(exc.message_vars[0])
except (ValueError, IndexError):
return None
async def fn_request(self, method: str,
route: Union[Route, str],
auth: Optional[str] = None,
graphql: Union[Route, List[Route]] = None,
priority: int = 0,
**kwargs: Any) -> Any:
if self.client.is_closed():
raise RuntimeError('Client is closed.')
cfg = self.retry_config
if isinstance(route, Route):
url = route.url
url_key = (method, route.sanitized_url)
else:
url = route
url_key = None
tries = 0
total_slept = 0
backoff = cfg.backoff_start
while True:
sleep_time = 0
tries += 1
endpoint_event = self._endpoint_events.get(url_key)
if endpoint_event is not None:
log.debug('Waiting for {0:.2f}s before requesting {1} {2}.'.format( # noqa
endpoint_event.ends_at - time.time(),
method,
url,
))
await endpoint_event.wait()
endpoint_event = None
lock = self.client._reauth_lock
if priority <= 0:
await lock.wait()
if lock.failed:
raise asyncio.CancelledError(
'Client is shutting down.'
)
try:
return await self._fn_request(
method,
route,
auth,
graphql,
**kwargs
)
except HTTPException as exc:
if self.client._closing:
raise
if tries >= cfg.max_retry_attempts:
raise
code = exc.message_code
if graphql:
gql_server_error = exc.raw.get('errorStatus') in {500, 502}
else:
gql_server_error = False
catch = (
'errors.com.epicgames.common.oauth.invalid_token',
'errors.com.epicgames.common.authentication.token_verification_failed', # noqa
'error.graphql.401',
)
if code in catch:
_auth = auth or route.AUTH
if exc.request_headers['Authorization'] != self.get_auth(_auth): # noqa
continue
force_attempts = self.max_refresh_attempts
retry = True
def should_force():
ts = self.client._refresh_times
if len(ts) > force_attempts:
self.client._refresh_times = ts[-force_attempts:]
try:
old = self.client._refresh_times[-force_attempts]
except IndexError:
return True
else:
cur = time.time()
return cur - old > self.refresh_attempt_window
if priority > lock.priority - 1:
async with MaybeLock(lock):
if should_force():
try:
await self.client.auth.do_refresh()
except asyncio.CancelledError:
lock.failed = True
retry = False
except Exception:
if self.client.can_restart():
await self.client.restart()
else:
lock.failed = True
retry = False
else:
retry = False
else:
if lock.locked():
await lock.wait()
if lock.failed:
raise asyncio.CancelledError(
'Client is shutting down.'
)
else:
retry = False
if retry:
continue
else:
try:
e = RuntimeError('Oauth token invalid.')
e.__cause__ = exc
self.client._exception_future.set_exception(e)
except asyncio.InvalidStateError:
pass
raise asyncio.CancelledError(
'Client is shutting down.'
)
elif code == 'errors.com.epicgames.common.throttled' or exc.status == 429: # noqa
retry_after = self.get_retry_after(exc)
if retry_after is not None and cfg.handle_rate_limits:
if retry_after <= cfg.max_retry_after:
sleep_time = retry_after + 0.5
if cfg.other_requests_wait and url_key is not None:
if url_key not in self._endpoint_events:
endpoint_event = asyncio.Event()
endpoint_event.ends_at = time.time() + sleep_time # noqa
self._endpoint_events[url_key] = endpoint_event # noqa
else:
tries -= 1 # backoff tries shouldn't count
if cfg.handle_capacity_throttling:
backoff *= cfg.backoff_factor
if backoff <= cfg.backoff_cap:
sleep_time = backoff
elif (code == 'errors.com.epicgames.common.concurrent_modification_error' # noqa
or code == 'errors.com.epicgames.common.server_error'
or gql_server_error): # noqa
sleep_time = 0.5 + (tries - 1) * 2
if sleep_time > 0:
total_slept += sleep_time
if cfg.max_wait_time and total_slept > cfg.max_wait_time:
raise
log.debug('Retrying {0} {1} in {2:.2f}s.'.format(
method,
url,
sleep_time
))
await asyncio.sleep(sleep_time)
continue
raise
except aiohttp.ServerDisconnectedError:
await asyncio.sleep(0.5 + (tries - 1) * 2)
continue
except OSError as exc:
if exc.errno in (54, 10054):
continue
raise
finally:
if endpoint_event is not None:
del self._endpoint_events[url_key]
endpoint_event.set()
async def get(self, route: Union[Route, str],
auth: Optional[str] = None,
**kwargs: Any) -> Any:
return await self.fn_request('GET', route, auth, **kwargs)
async def post(self, route: Union[Route, str],
auth: Optional[str] = None,
**kwargs: Any) -> Any:
return await self.fn_request('POST', route, auth, **kwargs)
async def delete(self, route: Union[Route, str],
auth: Optional[str] = None,
**kwargs: Any) -> Any:
return await self.fn_request('DELETE', route, auth, **kwargs)
async def patch(self, route: Union[Route, str],
auth: Optional[str] = None,
**kwargs: Any) -> Any:
return await self.fn_request('PATCH', route, auth, **kwargs)
async def put(self, route: Union[Route, str],
auth: Optional[str] = None,
**kwargs: Any) -> Any:
return await self.fn_request('PUT', route, auth, **kwargs)
async def graphql_request(self, graphql: Union[GraphQLRequest,
List[GraphQLRequest]],
auth: Optional[str] = None,
**kwargs: Any) -> Any:
return await self.fn_request('POST', EpicGamesGraphQL(), auth, graphql,
**kwargs)
###################################
# Epicgames GraphQL #
###################################
async def graphql_friends_set_alias(self) -> Any:
variables = {
"friendId": "65db72079052463cb345d23ee27ae6a1",
"alias": "Hallo1233"
}
query = """
mutation FriendsMutation($friendId: String!, $alias: String!) {
Friends {
setAlias(friendId: $friendId, alias: $alias) {
success
}
}
}"""
return await self.graphql_request(
GraphQLRequest(query, variables=variables))
async def graphql_initialize_friends_request(self) -> List[dict]:
queries = (
GraphQLRequest(
query="""
query FriendsQuery($displayNames: Boolean!) {
Friends {
summary(displayNames: $displayNames) {
friends {
alias
note
favorite
...friendFields
}
incoming {
...friendFields
}
outgoing {
...friendFields
}
blocklist {
...friendFields
}
}
}
}
fragment friendFields on Friend {
accountId
displayName
account {
externalAuths {
type
accountId
externalAuthId
externalDisplayName
}
}
}
""",
variables={
'displayNames': True
}
),
GraphQLRequest(
query="""
query PresenceV2Query($namespace: String!, $circle: String!) {
PresenceV2 {
getLastOnlineSummary(namespace: $namespace, circle: $circle) {
summary { #Type: [LastOnline]
friendId #Type: String
last_online #Type: String
}
}
}
}
""", # noqa
variables={
'namespace': 'Fortnite',
'circle': 'friends'
}
)
)
return await self.graphql_request(queries)
###################################
# Epicgames #
###################################
async def epicgames_get_csrf(self) -> aiohttp.ClientResponse:
return await self.get(EpicGames('/id/api/csrf'), raw=True)
async def epicgames_reputation(self, xsrf_token: str) -> Any:
headers = {
'x-xsrf-token': xsrf_token
}
return await self.get(EpicGames('/id/api/reputation'), headers=headers)
async def epicgames_login(self, email: str,
password: str,
xsrf_token: str) -> Any:
headers = {
'x-xsrf-token': xsrf_token
}
payload = {
'email': email,
'password': password,
'rememberMe': False,
'captcha': ''
}
cookies = {
'EPIC_COUNTRY': 'US'
}
return await self.post(EpicGames('/id/api/login'),
headers=headers,
data=payload,
cookies=cookies)
async def epicgames_mfa_login(self, method: str,
code: str,
xsrf_token: str) -> Any:
headers = {
'x-xsrf-token': xsrf_token
}
payload = {
'code': code,
'method': method,
'rememberDevice': False
}
return await self.post(EpicGames('/id/api/login/mfa'),
headers=headers,
data=payload)
async def epicgames_redirect(self, xsrf_token: str) -> Any:
headers = {
'x-xsrf-token': xsrf_token
}
return await self.get(EpicGames('/id/api/redirect'), headers=headers)
async def epicgames_get_exchange_data(self, xsrf_token: str) -> dict:
headers = {
'x-xsrf-token': xsrf_token
}
r = EpicGames('/id/api/exchange/generate')
return await self.post(r, headers=headers)
###################################
# Payment Website #
###################################
async def payment_website_search_sac_by_slug(self, slug: str) -> Any:
params = {
'slug': slug
}
r = PaymentWebsite('/affiliate/search-by-slug', auth=None)
return await self.get(r, params=params)
###################################
# Lightswitch #
###################################
async def lightswitch_get_status(self, *,
service_id: Optional[str] = None) -> list:
params = {'serviceId': service_id} if service_id else None
r = LightswitchPublicService('/lightswitch/api/service/bulk/status')
return await self.get(r, params=params)
###################################
# User Search #
###################################
async def user_search_by_prefix(self, client_id: str, prefix: str, platform: str) -> list:
params = {
'prefix': prefix,
'platform': platform
}
r = UserSearchService('/api/v1/search/{client_id}', client_id=client_id)
return await self.get(r, params=params)
###################################
# Account #
###################################
async def account_get_exchange_data(self, auth: str,
**kwargs: Any) -> dict:
r = AccountPublicService('/account/api/oauth/exchange')
return await self.get(r, auth=auth, **kwargs)
async def account_oauth_grant(self, **kwargs: Any) -> dict:
r = AccountPublicService('/account/api/oauth/token')
return await self.post(r, **kwargs)
async def account_generate_device_auth(self, client_id: str) -> dict:
r = AccountPublicService(
'/account/api/public/account/{client_id}/deviceAuth',
client_id=client_id
)
return await self.post(r, auth="IOS_ACCESS_TOKEN", json={})
async def account_get_device_auths(self, client_id: str) -> list:
r = AccountPublicService(
'/account/api/public/account/{client_id}/deviceAuth',
client_id=client_id,
auth="IOS_ACCESS_TOKEN"
)
return await self.get(r)
async def account_lookup_device_auth(self, client_id: str,
device_id: str) -> dict:
r = AccountPublicService(
'/account/api/public/account/{client_id}/deviceAuth/{device_id}',
client_id=client_id,
device_id=device_id,
auth="IOS_ACCESS_TOKEN"
)
return await self.get(r)
async def account_delete_device_auth(self, client_id: str,
device_id: str) -> None:
r = AccountPublicService(
'/account/api/public/account/{client_id}/deviceAuth/{device_id}',
client_id=client_id,
device_id=device_id,
auth="IOS_ACCESS_TOKEN"
)
return await self.delete(r)
async def account_sessions_kill_token(self, token: str, auth=None) -> Any:
r = AccountPublicService(
'/account/api/oauth/sessions/kill/{token}',
token=token
)
return await self.delete(r, auth='bearer {0}'.format(token))
async def account_sessions_kill(self, kill_type: str,
auth='IOS_ACCESS_TOKEN',
**kwargs: Any) -> Any:
params = {
'killType': kill_type
}
r = AccountPublicService('/account/api/oauth/sessions/kill')
return await self.delete(r, params=params, auth=auth, **kwargs)
async def account_get_by_display_name(self, display_name: str) -> dict:
r = AccountPublicService(
'/account/api/public/account/displayName/{display_name}',
display_name=display_name
)
return await self.get(r)
async def account_get_by_user_id(self, user_id: str, *,
auth: Optional[str] = None,
**kwargs: Any) -> dict:
r = AccountPublicService(
'/account/api/public/account/{user_id}',
user_id=user_id
)
return await self.get(r, auth=auth, **kwargs)
async def account_get_by_email(self, email: str) -> dict:
r = AccountPublicService(
'/account/api/public/account/email/{email}',
email=email
)
return await self.get(r, auth='IOS_ACCESS_TOKEN')
async def account_get_external_auths_by_id(self, user_id: str,
**kwargs: Any) -> list:
r = AccountPublicService(
'/account/api/public/account/{user_id}/externalAuths',
user_id=user_id
)
return await self.get(r, **kwargs)
async def account_get_multiple_by_user_id(self,
user_ids: List[str]) -> list:
params = [('accountId', user_id) for user_id in user_ids]
r = AccountPublicService('/account/api/public/account')
return await self.get(r, params=params)
async def account_graphql_get_multiple_by_user_id(self,
user_ids: List[str],
**kwargs: Any
) -> dict:
return await self.graphql_request(GraphQLRequest(
query="""
query AccountQuery($accountIds: [String]!) {
Account {
# Get details about an account given an account ID
accounts(accountIds: $accountIds) { #Type: [Account]
# The AccountID for this account
id #Type: String
# The epic display name for this account
displayName #Type: String
# External auths associated with this account
externalAuths { #Type: [ExternalAuth]
type #Type: String
accountId #Type: String
externalAuthId #Type: String
externalDisplayName #Type: String
}
}
}
}
""",
variables={
'accountIds': user_ids
}
), **kwargs)
async def account_graphql_get_by_display_name(self,
display_name: str) -> dict:
return await self.graphql_request(GraphQLRequest(
query="""
query AccountQuery($displayName: String!) {
Account {
account(displayName: $displayName) {
id
displayName
externalAuths {
type
accountId
externalAuthId
externalDisplayName
}
}
}
}
""",
variables={
'displayName': display_name
}
))
async def account_graphql_get_clients_external_auths(self,
**kwargs: Any
) -> dict:
return await self.graphql_request(GraphQLRequest(
query="""
query AccountQuery {
Account {
myAccount {
externalAuths {
type
accountId
externalAuthId
externalDisplayName
}
}
}
}
"""
), **kwargs)
###################################
# Eula Tracking #
###################################
async def eulatracking_get_data(self, **kwargs: Any) -> dict:
r = EulatrackingPublicService(
'/eulatracking/api/public/agreements/fn/account/{client_id}',
client_id=self.client.user.id
)
return await self.get(r, **kwargs)
async def eulatracking_accept(self, version: int, *,
locale: str = 'en',
**kwargs: Any) -> Any:
params = {
'locale': locale
}
r = EulatrackingPublicService(
('/eulatracking/api/public/agreements/fn/version/{version}'
'/account/{client_id}/accept'),
version=version,
client_id=self.client.user.id
)
return await self.post(r, params=params, **kwargs)
###################################
# Fortnite Public #
###################################
async def fortnite_grant_access(self, **kwargs: Any) -> Any:
r = FortnitePublicService(
'/fortnite/api/game/v2/grant_access/{client_id}',
client_id=self.client.user.id
)
return await self.post(r, json={}, **kwargs)
async def fortnite_get_store_catalog(self) -> dict:
r = FortnitePublicService('/fortnite/api/storefront/v2/catalog')
return await self.get(r)
async def fortnite_check_gift_eligibility(self,
user_id: str,
offer_id: str) -> Any:
r = FortnitePublicService(
'/fortnite/api/storefront/v2/gift/check_eligibility/recipient/{user_id}/offer/{offer_id}', # noqa
user_id=user_id,
offer_id=offer_id,
)
return await self.get(r)
async def fortnite_get_timeline(self) -> dict:
r = FortnitePublicService('/fortnite/api/calendar/v1/timeline')
return await self.get(r)
###################################
# Fortnite Content #
###################################
async def fortnitecontent_get(self) -> dict:
r = FortniteContentWebsite('/content/api/pages/fortnite-game')
return await self.get(r)
###################################
# Friends #
###################################
async def friends_get_all(self, *,
include_pending: bool = False,
**kwargs) -> list:
params = {
'includePending': include_pending
}
r = FriendsPublicService('/friends/api/public/friends/{client_id}',
client_id=self.client.user.id)
return await self.get(r, params=params, **kwargs)
async def friends_add_or_accept(self, user_id: str) -> Any:
r = FriendsPublicService(
'/friends/api/v1/{client_id}/friends/{user_id}',
client_id=self.client.user.id,
user_id=user_id
)
return await self.post(r)
async def friends_remove_or_decline(self, user_id: str) -> Any:
r = FriendsPublicService(
'/friends/api/v1/{client_id}/friends/{user_id}',
client_id=self.client.user.id,
user_id=user_id
)
return await self.delete(r)
async def friends_get_blocklist(self) -> list:
r = FriendsPublicService('/friends/api/v1/{client_id}/blocklist',
client_id=self.client.user.id)
return await self.get(r)
async def friends_set_nickname(self, user_id: str, nickname: str) -> Any:
r = FriendsPublicService(
'/friends/api/v1/{client_id}/friends/{user_id}/alias',
client_id=self.client.user.id,
user_id=user_id
)
return await self.put(r, data=nickname)
async def friends_remove_nickname(self, user_id: str) -> Any:
r = FriendsPublicService(
'/friends/api/v1/{client_id}/friends/{user_id}/alias',
client_id=self.client.user.id,
user_id=user_id
)
return await self.delete(r)
async def friends_set_note(self, user_id: str, note: str) -> Any:
r = FriendsPublicService(
'/friends/api/v1/{client_id}/friends/{user_id}/note',
client_id=self.client.user.id,
user_id=user_id
)
return await self.put(r, data=note)
async def friends_remove_note(self, user_id: str) -> Any:
r = FriendsPublicService(
'/friends/api/v1/{client_id}/friends/{user_id}/note',
client_id=self.client.user.id,
user_id=user_id
)
return await self.delete(r)
async def friends_get_summary(self, **kwargs) -> dict:
r = FriendsPublicService('/friends/api/v1/{client_id}/summary',
client_id=self.client.user.id)
return await self.get(r, **kwargs)
async def friends_block(self, user_id: str) -> Any:
r = FriendsPublicService(
'/friends/api/v1/{client_id}/blocklist/{user_id}',
client_id=self.client.user.id,
user_id=user_id
)
return await self.post(r)
async def friends_unblock(self, user_id: str) -> Any:
r = FriendsPublicService(
'/friends/api/v1/{client_id}/blocklist/{user_id}',
client_id=self.client.user.id,
user_id=user_id
)
return await self.delete(r)
async def friends_get_mutual(self, user_id: str) -> Any:
r = FriendsPublicService(
'/friends/api/v1/{client_id}/friends/{user_id}/mutual',
client_id=self.client.user.id,
user_id=user_id
)
return await self.get(r)
###################################
# Presence #
###################################
async def presence_get_last_online(self, **kwargs) -> dict:
r = PresencePublicService('/presence/api/v1/_/{client_id}/last-online',
client_id=self.client.user.id)
return await self.get(r, **kwargs)
###################################
# Stats #
###################################
async def stats_get_v2(self, user_id: str, *,
start_time: Optional[int] = None,
end_time: Optional[int] = None) -> dict:
params = {}
if start_time:
params['startTime'] = start_time
if end_time:
params['endTime'] = end_time
r = StatsproxyPublicService(
'/statsproxy/api/statsv2/account/{user_id}',
user_id=user_id
)
return await self.get(r, params=params)
async def stats_get_multiple_v2(self, ids: List[str], stats: List[str], *,
category: Optional[str] = None,
start_time: Optional[int] = None,
end_time: Optional[int] = None) -> list:
payload = {
'appId': 'fortnite',
'owners': ids,
'stats': stats
}
params = {}
if category is not None:
params['category'] = 'collection_fish'
if start_time:
payload['startDate'] = start_time
if end_time:
payload['endDate'] = end_time
r = StatsproxyPublicService('/statsproxy/api/statsv2/query')
return await self.post(r, json=payload, params=params)
async def stats_get_leaderboard_v2(self, stat: str) -> dict:
r = StatsproxyPublicService(
'/statsproxy/api/statsv2/leaderboards/{stat}',
stat=stat
)
return await self.get(r)
###################################
# Party #
###################################
async def party_disconnect(self, party_id: str, user_id: str):
r = PartyService(
'party/api/v1/Fortnite/parties/{party_id}/members/{user_id}/disconnect', # noqa
party_id=party_id,
user_id=user_id,
)
return await self.post(r)
async def party_send_invite(self, party_id: str,
user_id: str,
send_ping: bool = True) -> Any:
conn_type = self.client.default_party_member_config.cls.CONN_TYPE
payload = {
'urn:epic:cfg:build-id_s': self.client.party_build_id,
'urn:epic:conn:platform_s': self.client.platform.value,
'urn:epic:conn:type_s': conn_type,
'urn:epic:invite:platformdata_s': '',
'urn:epic:member:dn_s': self.client.user.display_name,
}
params = {
'sendPing': send_ping
}
r = PartyService(
'/party/api/v1/Fortnite/parties/{party_id}/invites/{user_id}',
party_id=party_id,
user_id=user_id
)
return await self.post(r, json=payload, params=params)
async def party_delete_invite(self, party_id: str, user_id: str) -> Any:
r = PartyService(
'/party/api/v1/Fortnite/parties/{party_id}/invites/{user_id}',
party_id=party_id,
user_id=user_id
)
return await self.delete(r)
# NOTE: Depracated since fortnite v11.30. Use param sendPing=True with
# send_invite
# NOTE: Now used for sending invites from private parties
async def party_send_ping(self, user_id: str) -> Any:
r = PartyService(
'/party/api/v1/Fortnite/user/{user_id}/pings/{client_id}',
user_id=user_id,
client_id=self.client.user.id
)
return await self.post(r, json={})
async def party_delete_ping(self, user_id: str) -> Any:
r = PartyService(
'/party/api/v1/Fortnite/user/{client_id}/pings/{user_id}',
client_id=self.client.user.id,
user_id=user_id
)
return await self.delete(r)
async def party_decline_invite(self, party_id: str) -> Any:
r = PartyService(
('/party/api/v1/Fortnite/parties/{party_id}/invites/'
'{client_id}/decline'),
party_id=party_id,
client_id=self.client.user.id
)
return await self.post(r, json={})
async def party_member_confirm(self, party_id: str, user_id: str) -> Any:
r = PartyService(
('/party/api/v1/Fortnite/parties/{party_id}/members/'
'{user_id}/confirm'),
party_id=party_id,
user_id=user_id
)
return await self.post(r, json={})
async def party_member_reject(self, party_id: str, user_id: str) -> Any:
r = PartyService(
('/party/api/v1/Fortnite/parties/{party_id}/members/'
'{user_id}/reject'),
party_id=party_id,
user_id=user_id
)
return await self.post(r, json={})
async def party_promote_member(self, party_id: str, user_id: str) -> Any:
r = PartyService(
('/party/api/v1/Fortnite/parties/{party_id}/members/'
'{user_id}/promote'),
party_id=party_id,
user_id=user_id
)
return await self.post(r, json={})
async def party_kick_member(self, party_id: str, user_id: str) -> Any:
r = PartyService(
'/party/api/v1/Fortnite/parties/{party_id}/members/{user_id}',
party_id=party_id,
user_id=user_id
)
return await self.delete(r)
async def party_leave(self, party_id: str, **kwargs: Any) -> Any:
conn_type = self.client.default_party_member_config.cls.CONN_TYPE
payload = {
'connection': {
'id': self.client.user.jid,
'meta': {
'urn:epic:conn:platform_s': self.client.platform.value,
'urn:epic:conn:type_s': conn_type,
},
},
'meta': {
'urn:epic:member:dn_s': self.client.user.display_name,
'urn:epic:member:type_s': conn_type,
'urn:epic:member:platform_s': self.client.platform.value,
'urn:epic:member:joinrequest_j': json.dumps({
'CrossplayPreference_i': '1'
}),
}
}
r = PartyService(
'/party/api/v1/Fortnite/parties/{party_id}/members/{client_id}',
party_id=party_id,
client_id=self.client.user.id
)
return await self.delete(r, json=payload, **kwargs)
async def party_join_request(self, party_id: str) -> Any:
conf = self.client.default_party_member_config
conn_type = conf.cls.CONN_TYPE
payload = {
'connection': {
'id': str(self.client.xmpp.xmpp_client.local_jid),
'meta': {
'urn:epic:conn:platform_s': self.client.platform.value,
'urn:epic:conn:type_s': conn_type,
},
'yield_leadership': conf.yield_leadership,
'offline_ttl': conf.offline_ttl,
},
'meta': {
'urn:epic:member:dn_s': self.client.user.display_name,
'urn:epic:member:joinrequestusers_j': json.dumps({
'users': [
{
'id': self.client.user.id,
'dn': self.client.user.display_name,
'plat': self.client.platform.value,
'data': json.dumps({
'CrossplayPreference': '1',
'SubGame_u': '1',
})
}
]
}),
},
}
r = PartyService(
('/party/api/v1/Fortnite/parties/{party_id}/members/'
'{client_id}/join'),
party_id=party_id,
client_id=self.client.user.id
)
return await self.post(r, json=payload)
async def party_send_intention(self, user_id: str) -> dict:
payload = {
'urn:epic:invite:platformdata_s': '',
}
r = PartyService(
'/party/api/v1/Fortnite/members/{user_id}/intentions/{client_id}',
client_id=self.client.user.id,
user_id=user_id
)
return await self.post(r, json=payload)
async def party_lookup(self, party_id: str, **kwargs: Any) -> dict:
r = PartyService('/party/api/v1/Fortnite/parties/{party_id}',
party_id=party_id)
return await self.get(r, **kwargs)
async def party_lookup_user(self, user_id: str, **kwargs: Any) -> dict:
r = PartyService('/party/api/v1/Fortnite/user/{user_id}',
user_id=user_id)
return await self.get(r, **kwargs)
async def party_lookup_ping(self, user_id: str) -> list:
r = PartyService(
('/party/api/v1/Fortnite/user/{client_id}/pings/'
'{user_id}/parties'),
client_id=self.client.user.id,
user_id=user_id
)
return await self.get(r)
async def party_create(self, config: dict, **kwargs: Any) -> dict:
conf = self.client.default_party_member_config
conn_type = conf.cls.CONN_TYPE
_chat_enabled = str(config['chat_enabled']).lower()
payload = {
'config': {
'join_confirmation': config['join_confirmation'],
'joinability': config['joinability'],
'max_size': config['max_size']
},
'join_info': {
'connection': {
'id': str(self.client.xmpp.xmpp_client.local_jid),
'meta': {
'urn:epic:conn:platform_s': self.client.platform.value,
'urn:epic:conn:type_s': conn_type
},
'yield_leadership': conf.yield_leadership,
'offline_ttl': conf.offline_ttl,
},
},
'meta': {
'urn:epic:cfg:accepting-members_b': False,
'urn:epic:cfg:build-id_s': str(self.client.party_build_id),
'urn:epic:cfg:can-join_b': True,
'urn:epic:cfg:chat-enabled_b': _chat_enabled,
'urn:epic:cfg:invite-perm_s': 'Noone',
'urn:epic:cfg:join-request-action_s': 'Manual',
'urn:epic:cfg:not-accepting-members-reason_i': 0,
'urn:epic:cfg:party-type-id_s': 'default',
'urn:epic:cfg:presence-perm_s': 'Noone',
}
}
r = PartyService('/party/api/v1/Fortnite/parties')
return await self.post(r, json=payload, **kwargs)
async def party_update_member_meta(self, party_id: str,
user_id: str,
updated_meta: dict,
deleted_meta: list,
overridden_meta: dict,
revision: int,
override: dict = {},
**kwargs: Any) -> Any:
payload = {
'delete': deleted_meta,
'update': updated_meta,
'override': overridden_meta,
'revision': revision,
}
r = PartyService(
('/party/api/v1/Fortnite/parties/{party_id}/members/'
'{user_id}/meta'),
party_id=party_id,
user_id=user_id
)
return await self.patch(r, json=payload, **kwargs)
async def party_update_meta(self, party_id: str,
updated_meta: dict,
deleted_meta: list,
overridden_meta: dict,
revision: int,
config: dict = {},
**kwargs: Any) -> Any:
payload = {
'meta': {
'delete': deleted_meta,
'update': updated_meta,
'override': overridden_meta
},
'revision': revision,
}
if config:
payload['config'] = config
r = PartyService('/party/api/v1/Fortnite/parties/{party_id}',
party_id=party_id)
return await self.patch(r, json=payload, **kwargs)
| 36.09064
| 110
| 0.511006
|
9ecf081ed0b079a370968a8c10ace03c3012852f
| 108
|
py
|
Python
|
example3.py
|
touilleMan/pythecode
|
84da4b3d9de87c4c1ec5daf285028a61fcf7e80e
|
[
"WTFPL"
] | 2
|
2021-11-08T02:45:12.000Z
|
2021-11-08T09:41:04.000Z
|
example3.py
|
touilleMan/pythecode
|
84da4b3d9de87c4c1ec5daf285028a61fcf7e80e
|
[
"WTFPL"
] | null | null | null |
example3.py
|
touilleMan/pythecode
|
84da4b3d9de87c4c1ec5daf285028a61fcf7e80e
|
[
"WTFPL"
] | null | null | null |
def k(x, a=1, b=2, c=3):
print(x, a, b, c)
d = (a + b) * c
return 'res', d, x
print(k(10, 20))
| 15.428571
| 24
| 0.416667
|
c52bbb00d23df7037698c3a747adb61e86903a19
| 90,576
|
py
|
Python
|
srunner/scenariomanager/scenarioatomics/atomic_behaviors.py
|
Soolek/scenario_runner
|
7a50628b50e9458ab2895ce252cf882f55731717
|
[
"MIT"
] | null | null | null |
srunner/scenariomanager/scenarioatomics/atomic_behaviors.py
|
Soolek/scenario_runner
|
7a50628b50e9458ab2895ce252cf882f55731717
|
[
"MIT"
] | null | null | null |
srunner/scenariomanager/scenarioatomics/atomic_behaviors.py
|
Soolek/scenario_runner
|
7a50628b50e9458ab2895ce252cf882f55731717
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# Copyright (c) 2018-2020 Intel Corporation
#
# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.
"""
This module provides all atomic scenario behaviors required to realize
complex, realistic scenarios such as "follow a leading vehicle", "lane change",
etc.
The atomic behaviors are implemented with py_trees.
"""
from __future__ import print_function
import copy
import math
import operator
import os
import random
import time
import subprocess
import numpy as np
import py_trees
from py_trees.blackboard import Blackboard
import carla
from agents.navigation.basic_agent import BasicAgent, LocalPlanner
from agents.navigation.local_planner import RoadOption
from srunner.scenariomanager.carla_data_provider import CarlaDataProvider
from srunner.scenariomanager.actorcontrols.actor_control import ActorControl
from srunner.scenariomanager.timer import GameTime
from srunner.tools.scenario_helper import detect_lane_obstacle
from srunner.tools.scenario_helper import generate_target_waypoint_list_multilane
import srunner.tools
EPSILON = 0.001
def calculate_distance(location, other_location, global_planner=None):
"""
Method to calculate the distance between to locations
Note: It uses the direct distance between the current location and the
target location to estimate the time to arrival.
To be accurate, it would have to use the distance along the
(shortest) route between the two locations.
"""
if global_planner:
distance = 0
# Get the route
route = global_planner.trace_route(location, other_location)
# Get the distance of the route
for i in range(1, len(route)):
curr_loc = route[i][0].transform.location
prev_loc = route[i - 1][0].transform.location
distance += curr_loc.distance(prev_loc)
return distance
return location.distance(other_location)
def get_actor_control(actor):
"""
Method to return the type of control to the actor.
"""
control = actor.get_control()
actor_type = actor.type_id.split('.')[0]
if not isinstance(actor, carla.Walker):
control.steering = 0
else:
control.speed = 0
return control, actor_type
class AtomicBehavior(py_trees.behaviour.Behaviour):
"""
Base class for all atomic behaviors used to setup a scenario
*All behaviors should use this class as parent*
Important parameters:
- name: Name of the atomic behavior
"""
def __init__(self, name, actor=None):
"""
Default init. Has to be called via super from derived class
"""
super(AtomicBehavior, self).__init__(name)
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
self.name = name
self._actor = actor
def setup(self, unused_timeout=15):
"""
Default setup
"""
self.logger.debug("%s.setup()" % (self.__class__.__name__))
return True
def initialise(self):
"""
Initialise setup terminates WaypointFollowers
Check whether WF for this actor is running and terminate all active WFs
"""
if self._actor is not None:
try:
check_attr = operator.attrgetter("running_WF_actor_{}".format(self._actor.id))
terminate_wf = copy.copy(check_attr(py_trees.blackboard.Blackboard()))
py_trees.blackboard.Blackboard().set(
"terminate_WF_actor_{}".format(self._actor.id), terminate_wf, overwrite=True)
except AttributeError:
# It is ok to continue, if the Blackboard variable does not exist
pass
self.logger.debug("%s.initialise()" % (self.__class__.__name__))
def terminate(self, new_status):
"""
Default terminate. Can be extended in derived class
"""
self.logger.debug("%s.terminate()[%s->%s]" % (self.__class__.__name__, self.status, new_status))
class RunScript(AtomicBehavior):
"""
This is an atomic behavior to start execution of an additional script.
Args:
script (str): String containing the interpreter, scriptpath and arguments
Example: "python /path/to/script.py --arg1"
base_path (str): String containing the base path of for the script
Attributes:
_script (str): String containing the interpreter, scriptpath and arguments
Example: "python /path/to/script.py --arg1"
_base_path (str): String containing the base path of for the script
Example: "/path/to/"
Note:
This is intended for the use with OpenSCENARIO. Be aware of security side effects.
"""
def __init__(self, script, base_path=None, name="RunScript"):
"""
Setup parameters
"""
super(RunScript, self).__init__(name)
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
self._script = script
self._base_path = base_path
def update(self):
"""
Start script
"""
path = None
script_components = self._script.split(' ')
if len(script_components) > 1:
path = script_components[1]
if not os.path.isfile(path):
path = os.path.join(self._base_path, path)
if not os.path.isfile(path):
new_status = py_trees.common.Status.FAILURE
print("Script file does not exists {}".format(path))
else:
subprocess.Popen(self._script, shell=True, cwd=self._base_path)
new_status = py_trees.common.Status.SUCCESS
self.logger.debug("%s.update()[%s->%s]" % (self.__class__.__name__, self.status, new_status))
return new_status
class ChangeWeather(AtomicBehavior):
"""
Atomic to write a new weather configuration into the blackboard.
Used in combination with WeatherBehavior() to have a continuous weather simulation.
The behavior immediately terminates with SUCCESS after updating the blackboard.
Args:
weather (srunner.scenariomanager.weather_sim.Weather): New weather settings.
name (string): Name of the behavior.
Defaults to 'UpdateWeather'.
Attributes:
_weather (srunner.scenariomanager.weather_sim.Weather): Weather settings.
"""
def __init__(self, weather, name="ChangeWeather"):
"""
Setup parameters
"""
super(ChangeWeather, self).__init__(name)
self._weather = weather
def update(self):
"""
Write weather into blackboard and exit with success
returns:
py_trees.common.Status.SUCCESS
"""
py_trees.blackboard.Blackboard().set("CarlaWeather", self._weather, overwrite=True)
return py_trees.common.Status.SUCCESS
class ChangeRoadFriction(AtomicBehavior):
"""
Atomic to update the road friction in CARLA.
The behavior immediately terminates with SUCCESS after updating the friction.
Args:
friction (float): New friction coefficient.
name (string): Name of the behavior.
Defaults to 'UpdateRoadFriction'.
Attributes:
_friction (float): Friction coefficient.
"""
def __init__(self, friction, name="ChangeRoadFriction"):
"""
Setup parameters
"""
super(ChangeRoadFriction, self).__init__(name)
self._friction = friction
def update(self):
"""
Update road friction. Spawns new friction blueprint and removes the old one, if existing.
returns:
py_trees.common.Status.SUCCESS
"""
for actor in CarlaDataProvider.get_world().get_actors().filter('static.trigger.friction'):
actor.destroy()
friction_bp = CarlaDataProvider.get_world().get_blueprint_library().find('static.trigger.friction')
extent = carla.Location(1000000.0, 1000000.0, 1000000.0)
friction_bp.set_attribute('friction', str(self._friction))
friction_bp.set_attribute('extent_x', str(extent.x))
friction_bp.set_attribute('extent_y', str(extent.y))
friction_bp.set_attribute('extent_z', str(extent.z))
# Spawn Trigger Friction
transform = carla.Transform()
transform.location = carla.Location(-10000.0, -10000.0, 0.0)
CarlaDataProvider.get_world().spawn_actor(friction_bp, transform)
return py_trees.common.Status.SUCCESS
class ChangeActorControl(AtomicBehavior):
"""
Atomic to change the longitudinal/lateral control logic for an actor.
The (actor, controller) pair is stored inside the Blackboard.
The behavior immediately terminates with SUCCESS after the controller.
Args:
actor (carla.Actor): Actor that should be controlled by the controller.
control_py_module (string): Name of the python module containing the implementation
of the controller.
args (dictionary): Additional arguments for the controller.
name (string): Name of the behavior.
Defaults to 'ChangeActorControl'.
Attributes:
_actor_control (ActorControl): Instance of the actor control.
"""
def __init__(self, actor, control_py_module, args, name="ChangeActorControl"):
"""
Setup actor controller.
"""
super(ChangeActorControl, self).__init__(name, actor)
self._actor_control = ActorControl(actor, control_py_module=control_py_module, args=args)
def update(self):
"""
Write (actor, controler) pair to Blackboard, or update the controller
if actor already exists as a key.
returns:
py_trees.common.Status.SUCCESS
"""
actor_dict = {}
try:
check_actors = operator.attrgetter("ActorsWithController")
actor_dict = check_actors(py_trees.blackboard.Blackboard())
except AttributeError:
pass
if actor_dict:
if self._actor.id in actor_dict:
actor_dict[self._actor.id].reset()
actor_dict[self._actor.id] = self._actor_control
py_trees.blackboard.Blackboard().set("ActorsWithController", actor_dict, overwrite=True)
return py_trees.common.Status.SUCCESS
class UpdateAllActorControls(AtomicBehavior):
"""
Atomic to update (run one control loop step) all actor controls.
The behavior is always in RUNNING state.
Args:
name (string): Name of the behavior.
Defaults to 'UpdateAllActorControls'.
"""
def __init__(self, name="UpdateAllActorControls"):
"""
Constructor
"""
super(UpdateAllActorControls, self).__init__(name)
def update(self):
"""
Execute one control loop step for all actor controls.
returns:
py_trees.common.Status.RUNNING
"""
actor_dict = {}
try:
check_actors = operator.attrgetter("ActorsWithController")
actor_dict = check_actors(py_trees.blackboard.Blackboard())
except AttributeError:
pass
for actor_id in actor_dict:
actor_dict[actor_id].run_step()
return py_trees.common.Status.RUNNING
class ChangeActorTargetSpeed(AtomicBehavior):
"""
Atomic to change the target speed for an actor controller.
The behavior is in RUNNING state until the distance/duration
conditions are satisfied, or if a second ChangeActorTargetSpeed atomic
for the same actor is triggered.
Args:
actor (carla.Actor): Controlled actor.
target_speed (float): New target speed [m/s].
init_speed (boolean): Flag to indicate if the speed is the initial actor speed.
Defaults to False.
duration (float): Duration of the maneuver [s].
Defaults to None.
distance (float): Distance of the maneuver [m].
Defaults to None.
relative_actor (carla.Actor): If the target speed setting should be relative to another actor.
Defaults to None.
value (float): Offset, if the target speed setting should be relative to another actor.
Defaults to None.
value_type (string): Either 'Delta' or 'Factor' influencing how the offset to the reference actors
velocity is applied. Defaults to None.
continuous (boolean): If True, the atomic remains in RUNNING, independent of duration or distance.
Defaults to False.
name (string): Name of the behavior.
Defaults to 'ChangeActorTargetSpeed'.
Attributes:
_target_speed (float): New target speed [m/s].
_init_speed (boolean): Flag to indicate if the speed is the initial actor speed.
Defaults to False.
_start_time (float): Start time of the atomic [s].
Defaults to None.
_start_location (carla.Location): Start location of the atomic.
Defaults to None.
_duration (float): Duration of the maneuver [s].
Defaults to None.
_distance (float): Distance of the maneuver [m].
Defaults to None.
_relative_actor (carla.Actor): If the target speed setting should be relative to another actor.
Defaults to None.
_value (float): Offset, if the target speed setting should be relative to another actor.
Defaults to None.
_value_type (string): Either 'Delta' or 'Factor' influencing how the offset to the reference actors
velocity is applied. Defaults to None.
_continuous (boolean): If True, the atomic remains in RUNNING, independent of duration or distance.
Defaults to False.
"""
def __init__(self, actor, target_speed, init_speed=False,
duration=None, distance=None, relative_actor=None,
value=None, value_type=None, continuous=False, name="ChangeActorTargetSpeed"):
"""
Setup parameters
"""
super(ChangeActorTargetSpeed, self).__init__(name, actor)
self._target_speed = target_speed
self._init_speed = init_speed
self._start_time = None
self._start_location = None
self._relative_actor = relative_actor
self._value = value
self._value_type = value_type
self._continuous = continuous
self._duration = duration
self._distance = distance
def initialise(self):
"""
Set initial parameters such as _start_time and _start_location,
and get (actor, controller) pair from Blackboard.
May throw if actor is not available as key for the ActorsWithController
dictionary from Blackboard.
"""
actor_dict = {}
try:
check_actors = operator.attrgetter("ActorsWithController")
actor_dict = check_actors(py_trees.blackboard.Blackboard())
except AttributeError:
pass
if not actor_dict or not self._actor.id in actor_dict:
raise RuntimeError("Actor not found in ActorsWithController BlackBoard")
self._start_time = GameTime.get_time()
self._start_location = CarlaDataProvider.get_location(self._actor)
actor_dict[self._actor.id].update_target_speed(self._target_speed, start_time=self._start_time)
if self._init_speed:
actor_dict[self._actor.id].set_init_speed()
super(ChangeActorTargetSpeed, self).initialise()
def update(self):
"""
Check the actor's current state and update target speed, if it is relative to another actor.
returns:
py_trees.common.Status.SUCCESS, if the duration or distance driven exceeded limits
if another ChangeActorTargetSpeed atomic for the same actor was triggered.
py_trees.common.Status.FAILURE, if the actor is not found in ActorsWithController Blackboard dictionary.
py_trees.common.Status.FAILURE, else.
"""
try:
check_actors = operator.attrgetter("ActorsWithController")
actor_dict = check_actors(py_trees.blackboard.Blackboard())
except AttributeError:
pass
if not actor_dict or not self._actor.id in actor_dict:
return py_trees.common.Status.FAILURE
if actor_dict[self._actor.id].get_last_longitudinal_command() != self._start_time:
return py_trees.common.Status.SUCCESS
new_status = py_trees.common.Status.RUNNING
if self._relative_actor:
relative_velocity = CarlaDataProvider.get_velocity(self._relative_actor)
# get target velocity
if self._value_type == 'delta':
actor_dict[self._actor.id].update_target_speed(relative_velocity + self._value)
elif self._value_type == 'factor':
actor_dict[self._actor.id].update_target_speed(relative_velocity * self._value)
else:
print('self._value_type must be delta or factor')
# check duration and driven_distance
if not self._continuous:
if (self._duration is not None) and (GameTime.get_time() - self._start_time > self._duration):
new_status = py_trees.common.Status.SUCCESS
driven_distance = CarlaDataProvider.get_location(self._actor).distance(self._start_location)
if (self._distance is not None) and (driven_distance > self._distance):
new_status = py_trees.common.Status.SUCCESS
if self._distance is None and self._duration is None:
new_status = py_trees.common.Status.SUCCESS
return new_status
class ChangeActorWaypoints(AtomicBehavior):
"""
Atomic to change the waypoints for an actor controller.
The behavior is in RUNNING state until the last waypoint is reached,
or if a second ChangeActorWaypoints atomic for the same actor is triggered.
Args:
actor (carla.Actor): Controlled actor.
waypoints (List of carla.Transform): List of waypoints (CARLA transforms).
name (string): Name of the behavior.
Defaults to 'ChangeActorWaypoints'.
Attributes:
_waypoints (List of carla.Transform): List of waypoints (CARLA transforms).
_start_time (float): Start time of the atomic [s].
Defaults to None.
"""
def __init__(self, actor, waypoints, name="ChangeActorWaypoints"):
"""
Setup parameters
"""
super(ChangeActorWaypoints, self).__init__(name, actor)
self._waypoints = waypoints
self._start_time = None
def initialise(self):
"""
Set _start_time and get (actor, controller) pair from Blackboard.
Set waypoint list for actor controller.
May throw if actor is not available as key for the ActorsWithController
dictionary from Blackboard.
"""
actor_dict = {}
try:
check_actors = operator.attrgetter("ActorsWithController")
actor_dict = check_actors(py_trees.blackboard.Blackboard())
except AttributeError:
pass
if not actor_dict or not self._actor.id in actor_dict:
raise RuntimeError("Actor not found in ActorsWithController BlackBoard")
self._start_time = GameTime.get_time()
actor_dict[self._actor.id].update_waypoints(self._waypoints, start_time=self._start_time)
super(ChangeActorWaypoints, self).initialise()
def update(self):
"""
Check the actor's state along the waypoint route.
returns:
py_trees.common.Status.SUCCESS, if the final waypoint was reached, or
if another ChangeActorWaypoints atomic for the same actor was triggered.
py_trees.common.Status.FAILURE, if the actor is not found in ActorsWithController Blackboard dictionary.
py_trees.common.Status.FAILURE, else.
"""
try:
check_actors = operator.attrgetter("ActorsWithController")
actor_dict = check_actors(py_trees.blackboard.Blackboard())
except AttributeError:
pass
if not actor_dict or not self._actor.id in actor_dict:
return py_trees.common.Status.FAILURE
if actor_dict[self._actor.id].get_last_waypoint_command() != self._start_time:
return py_trees.common.Status.SUCCESS
new_status = py_trees.common.Status.RUNNING
if actor_dict[self._actor.id].check_reached_waypoint_goal():
new_status = py_trees.common.Status.SUCCESS
return new_status
class ChangeActorLateralMotion(AtomicBehavior):
"""
Atomic to change the waypoints for an actor controller.
The behavior is in RUNNING state until the driven distance exceeds
distance_lane_change, or if a second ChangeActorLateralMotion atomic
for the same actor is triggered.
Args:
actor (carla.Actor): Controlled actor.
direction (string): Lane change direction ('left' or 'right').
Defaults to 'left'.
distance_lane_change (float): Distance of the lance change [meters].
Defaults to 25.
name (string): Name of the behavior.
Defaults to 'ChangeActorLateralMotion'.
Attributes:
_waypoints (List of carla.Transform): List of waypoints representing the lane change (CARLA transforms).
_direction (string): Lane change direction ('left' or 'right').
_distance_same_lane (float): Distance on the same lane before the lane change starts [meters]
Constant to 5.
_distance_other_lane (float): Max. distance on the target lane after the lance change [meters]
Constant to 100.
_distance_lane_change (float): Max. total distance of the lane change [meters].
_pos_before_lane_change: carla.Location of the actor before the lane change.
Defaults to None.
_target_lane_id (int): Id of the target lane
Defaults to None.
_start_time (float): Start time of the atomic [s].
Defaults to None.
"""
def __init__(self, actor, direction='left', distance_lane_change=25, name="ChangeActorLateralMotion"):
"""
Setup parameters
"""
super(ChangeActorLateralMotion, self).__init__(name, actor)
self._waypoints = []
self._direction = direction
self._distance_same_lane = 5
self._distance_other_lane = 100
self._distance_lane_change = distance_lane_change
self._pos_before_lane_change = None
self._target_lane_id = None
self._start_time = None
def initialise(self):
"""
Set _start_time and get (actor, controller) pair from Blackboard.
Generate a waypoint list (route) which representes the lane change. Set
this waypoint list for the actor controller.
May throw if actor is not available as key for the ActorsWithController
dictionary from Blackboard.
"""
actor_dict = {}
try:
check_actors = operator.attrgetter("ActorsWithController")
actor_dict = check_actors(py_trees.blackboard.Blackboard())
except AttributeError:
pass
if not actor_dict or not self._actor.id in actor_dict:
raise RuntimeError("Actor not found in ActorsWithController BlackBoard")
self._start_time = GameTime.get_time()
# get start position
position_actor = CarlaDataProvider.get_map().get_waypoint(CarlaDataProvider.get_location(self._actor))
# calculate plan with scenario_helper function
plan, self._target_lane_id = generate_target_waypoint_list_multilane(
position_actor, self._direction, self._distance_same_lane,
self._distance_other_lane, self._distance_lane_change, check='false')
for elem in plan:
self._waypoints.append(elem[0].transform)
actor_dict[self._actor.id].update_waypoints(self._waypoints, start_time=self._start_time)
super(ChangeActorLateralMotion, self).initialise()
def update(self):
"""
Check the actor's current state and if the lane change was completed
returns:
py_trees.common.Status.SUCCESS, if lane change was successful, or
if another ChangeActorLateralMotion atomic for the same actor was triggered.
py_trees.common.Status.FAILURE, if the actor is not found in ActorsWithController Blackboard dictionary.
py_trees.common.Status.FAILURE, else.
"""
try:
check_actors = operator.attrgetter("ActorsWithController")
actor_dict = check_actors(py_trees.blackboard.Blackboard())
except AttributeError:
pass
if not actor_dict or not self._actor.id in actor_dict:
return py_trees.common.Status.FAILURE
if actor_dict[self._actor.id].get_last_waypoint_command() != self._start_time:
return py_trees.common.Status.SUCCESS
new_status = py_trees.common.Status.RUNNING
current_position_actor = CarlaDataProvider.get_map().get_waypoint(self._actor.get_location())
current_lane_id = current_position_actor.lane_id
if current_lane_id == self._target_lane_id:
# driving on new lane
distance = current_position_actor.transform.location.distance(self._pos_before_lane_change)
if distance > self._distance_other_lane:
# long enough distance on new lane --> SUCCESS
new_status = py_trees.common.Status.SUCCESS
else:
# no lane change yet
self._pos_before_lane_change = current_position_actor.transform.location
return new_status
class ActorTransformSetterToOSCPosition(AtomicBehavior):
"""
OpenSCENARIO atomic
This class contains an atomic behavior to set the transform of an OpenSCENARIO actor.
Important parameters:
- actor: CARLA actor to execute the behavior
- osc_position: OpenSCENARIO position
- physics [optional]: If physics is true, the actor physics will be reactivated upon success
The behavior terminates when actor is set to the new actor transform (closer than 1 meter)
NOTE:
It is very important to ensure that the actor location is spawned to the new transform because of the
appearence of a rare runtime processing error. WaypointFollower with LocalPlanner,
might fail if new_status is set to success before the actor is really positioned at the new transform.
Therefore: calculate_distance(actor, transform) < 1 meter
"""
def __init__(self, actor, osc_position, physics=True, name="ActorTransformSetterToOSCPosition"):
"""
Setup parameters
"""
super(ActorTransformSetterToOSCPosition, self).__init__(name, actor)
self._osc_position = osc_position
self._physics = physics
self._osc_transform = None
def initialise(self):
super(ActorTransformSetterToOSCPosition, self).initialise()
if self._actor.is_alive:
self._actor.set_velocity(carla.Vector3D(0, 0, 0))
self._actor.set_angular_velocity(carla.Vector3D(0, 0, 0))
def update(self):
"""
Transform actor
"""
new_status = py_trees.common.Status.RUNNING
# calculate transform with method in openscenario_parser.py
self._osc_transform = srunner.tools.openscenario_parser.OpenScenarioParser.convert_position_to_transform(
self._osc_position)
self._actor.set_transform(self._osc_transform)
if not self._actor.is_alive:
new_status = py_trees.common.Status.FAILURE
if calculate_distance(self._actor.get_location(), self._osc_transform.location) < 1.0:
if self._physics:
self._actor.set_simulate_physics(enabled=True)
new_status = py_trees.common.Status.SUCCESS
return new_status
class AccelerateToVelocity(AtomicBehavior):
"""
This class contains an atomic acceleration behavior. The controlled
traffic participant will accelerate with _throttle_value_ until reaching
a given _target_velocity_
Important parameters:
- actor: CARLA actor to execute the behavior
- throttle_value: The amount of throttle used to accelerate in [0,1]
- target_velocity: The target velocity the actor should reach in m/s
The behavior will terminate, if the actor's velocity is at least target_velocity
"""
def __init__(self, actor, throttle_value, target_velocity, name="Acceleration"):
"""
Setup parameters including acceleration value (via throttle_value)
and target velocity
"""
super(AccelerateToVelocity, self).__init__(name, actor)
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
self._control, self._type = get_actor_control(actor)
self._throttle_value = throttle_value
self._target_velocity = target_velocity
def initialise(self):
# In case of walkers, we have to extract the current heading
if self._type == 'walker':
self._control.speed = self._target_velocity
self._control.direction = CarlaDataProvider.get_transform(self._actor).get_forward_vector()
super(AccelerateToVelocity, self).initialise()
def update(self):
"""
Set throttle to throttle_value, as long as velocity is < target_velocity
"""
new_status = py_trees.common.Status.RUNNING
if self._type == 'vehicle':
if CarlaDataProvider.get_velocity(self._actor) < self._target_velocity:
self._control.throttle = self._throttle_value
else:
new_status = py_trees.common.Status.SUCCESS
self._control.throttle = 0
self._actor.apply_control(self._control)
self.logger.debug("%s.update()[%s->%s]" % (self.__class__.__name__, self.status, new_status))
return new_status
class AccelerateToCatchUp(AtomicBehavior):
"""
This class contains an atomic acceleration behavior.
The car will accelerate until it is faster than another car, in order to catch up distance.
This behaviour is especially useful before a lane change (e.g. LaneChange atom).
Important parameters:
- actor: CARLA actor to execute the behaviour
- other_actor: Reference CARLA actor, actor you want to catch up to
- throttle_value: acceleration value between 0.0 and 1.0
- delta_velocity: speed up to the velocity of other actor plus delta_velocity
- trigger_distance: distance between the actors
- max_distance: driven distance to catch up has to be smaller than max_distance
The behaviour will terminate succesful, when the two actors are in trigger_distance.
If max_distance is driven by the actor before actors are in trigger_distance,
then the behaviour ends with a failure.
"""
def __init__(self, actor, other_actor, throttle_value=1, delta_velocity=10, trigger_distance=5,
max_distance=500, name="AccelerateToCatchUp"):
"""
Setup parameters
The target_speet is calculated on the fly.
"""
super(AccelerateToCatchUp, self).__init__(name, actor)
self._other_actor = other_actor
self._throttle_value = throttle_value
self._delta_velocity = delta_velocity # 1m/s=3.6km/h
self._trigger_distance = trigger_distance
self._max_distance = max_distance
self._control, self._type = get_actor_control(actor)
self._initial_actor_pos = None
def initialise(self):
# get initial actor position
self._initial_actor_pos = CarlaDataProvider.get_location(self._actor)
super(AccelerateToCatchUp, self).initialise()
def update(self):
# get actor speed
actor_speed = CarlaDataProvider.get_velocity(self._actor)
target_speed = CarlaDataProvider.get_velocity(self._other_actor) + self._delta_velocity
# distance between actors
distance = CarlaDataProvider.get_location(self._actor).distance(
CarlaDataProvider.get_location(self._other_actor))
# driven distance of actor
driven_distance = CarlaDataProvider.get_location(self._actor).distance(self._initial_actor_pos)
if actor_speed < target_speed:
# set throttle to throttle_value to accelerate
self._control.throttle = self._throttle_value
if actor_speed >= target_speed:
# keep velocity until the actors are in trigger distance
self._control.throttle = 0
self._actor.apply_control(self._control)
# new status:
if distance <= self._trigger_distance:
new_status = py_trees.common.Status.SUCCESS
elif driven_distance > self._max_distance:
new_status = py_trees.common.Status.FAILURE
else:
new_status = py_trees.common.Status.RUNNING
return new_status
class KeepVelocity(AtomicBehavior):
"""
This class contains an atomic behavior to keep the provided velocity.
The controlled traffic participant will accelerate as fast as possible
until reaching a given _target_velocity_, which is then maintained for
as long as this behavior is active.
Important parameters:
- actor: CARLA actor to execute the behavior
- target_velocity: The target velocity the actor should reach
- duration[optional]: Duration in seconds of this behavior
- distance[optional]: Maximum distance in meters covered by the actor during this behavior
A termination can be enforced by providing distance or duration values.
Alternatively, a parallel termination behavior has to be used.
"""
def __init__(self, actor, target_velocity, duration=float("inf"), distance=float("inf"), name="KeepVelocity"):
"""
Setup parameters including acceleration value (via throttle_value)
and target velocity
"""
super(KeepVelocity, self).__init__(name, actor)
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
self._target_velocity = target_velocity
self._control, self._type = get_actor_control(actor)
self._map = self._actor.get_world().get_map()
self._waypoint = self._map.get_waypoint(self._actor.get_location())
self._duration = duration
self._target_distance = distance
self._distance = 0
self._start_time = 0
self._location = None
def initialise(self):
self._location = CarlaDataProvider.get_location(self._actor)
self._start_time = GameTime.get_time()
# In case of walkers, we have to extract the current heading
if self._type == 'walker':
self._control.speed = self._target_velocity
self._control.direction = CarlaDataProvider.get_transform(self._actor).get_forward_vector()
super(KeepVelocity, self).initialise()
def update(self):
"""
As long as the stop condition (duration or distance) is not violated, set a new vehicle control
For vehicles: set throttle to throttle_value, as long as velocity is < target_velocity
For walkers: simply apply the given self._control
"""
new_status = py_trees.common.Status.RUNNING
if self._type == 'vehicle':
if CarlaDataProvider.get_velocity(self._actor) < self._target_velocity:
self._control.throttle = 1.0
else:
self._control.throttle = 0.0
self._actor.apply_control(self._control)
new_location = CarlaDataProvider.get_location(self._actor)
self._distance += calculate_distance(self._location, new_location)
self._location = new_location
if self._distance > self._target_distance:
new_status = py_trees.common.Status.SUCCESS
if GameTime.get_time() - self._start_time > self._duration:
new_status = py_trees.common.Status.SUCCESS
self.logger.debug("%s.update()[%s->%s]" % (self.__class__.__name__, self.status, new_status))
return new_status
def terminate(self, new_status):
"""
On termination of this behavior, the throttle should be set back to 0.,
to avoid further acceleration.
"""
if self._type == 'vehicle':
self._control.throttle = 0.0
elif self._type == 'walker':
self._control.speed = 0.0
if self._actor is not None and self._actor.is_alive:
self._actor.apply_control(self._control)
super(KeepVelocity, self).terminate(new_status)
class DriveAtUntil(AtomicBehavior):
def __init__(self, actor, targetActor, speedMs, untilDistance=float("inf"), name="DriveAtUntil"):
super(DriveAtUntil, self).__init__(name, actor)
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
self._control, self._type = get_actor_control(actor)
self._map = self._actor.get_world().get_map()
self._waypoint = self._map.get_waypoint(self._actor.get_location())
self._targetActor = targetActor
self._speedMs = speedMs
self._untilDistance = untilDistance
def initialise(self):
self._location = CarlaDataProvider.get_location(self._actor)
super(DriveAtUntil, self).initialise()
def update(self):
new_status = py_trees.common.Status.RUNNING
ego_location = CarlaDataProvider.get_location(self._actor)
target_location = CarlaDataProvider.get_location(self._targetActor)
speedDiff = self._speedMs - CarlaDataProvider.get_velocity(self._actor)
self._control.throttle = min(1, max(0, speedDiff + 0.5))
loc_diff = target_location - ego_location
loc_yaw = math.atan2(loc_diff.y, loc_diff.x)
ego_yaw = math.radians(self._actor.get_transform().rotation.yaw)
yaw_diff = (loc_yaw - ego_yaw + math.pi) % (2*math.pi) - math.pi
self._control.steer = yaw_diff
self._actor.apply_control(self._control)
distance = calculate_distance(ego_location, target_location)
if distance <= self._untilDistance:
new_status = py_trees.common.Status.SUCCESS
self.logger.debug("%s.update()[%s->%s]" % (self.__class__.__name__, self.status, new_status))
return new_status
def terminate(self, new_status):
self._control.throttle = 0.0
self._control.steer = 0.0
if self._actor is not None and self._actor.is_alive:
self._actor.apply_control(self._control)
super(DriveAtUntil, self).terminate(new_status)
class ChangeAutoPilot(AtomicBehavior):
"""
This class contains an atomic behavior to disable/enable the use of the autopilot.
Important parameters:
- actor: CARLA actor to execute the behavior
- activate: True (=enable autopilot) or False (=disable autopilot)
- lane_change: Traffic Manager parameter. True (=enable lane changes) or False (=disable lane changes)
- distance_between_vehicles: Traffic Manager parameter
- max_speed: Traffic Manager parameter. Max speed of the actor. This will only work for road segments
with the same speed limit as the first one
The behavior terminates after changing the autopilot state
"""
def __init__(self, actor, activate, parameters=None, name="ChangeAutoPilot"):
"""
Setup parameters
"""
super(ChangeAutoPilot, self).__init__(name, actor)
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
self._activate = activate
self._tm = CarlaDataProvider.get_client().get_trafficmanager(
CarlaDataProvider.get_traffic_manager_port())
self._parameters = parameters
def update(self):
"""
De/activate autopilot
"""
self._actor.set_autopilot(self._activate)
if self._parameters is not None:
if "auto_lane_change" in self._parameters:
lane_change = self._parameters["auto_lane_change"]
self._tm.auto_lane_change(self._actor, lane_change)
if "max_speed" in self._parameters:
max_speed = self._parameters["max_speed"]
max_road_speed = self._actor.get_speed_limit()
if max_road_speed is not None:
percentage = (max_road_speed - max_speed) / max_road_speed * 100.0
self._tm.vehicle_percentage_speed_difference(self._actor, percentage)
else:
print("ChangeAutopilot: Unable to find the vehicle's speed limit")
if "distance_between_vehicles" in self._parameters:
dist_vehicles = self._parameters["distance_between_vehicles"]
self._tm.distance_to_leading_vehicle(self._actor, dist_vehicles)
if "force_lane_change" in self._parameters:
force_lane_change = self._parameters["force_lane_change"]
self._tm.force_lane_change(self._actor, force_lane_change)
if "ignore_vehicles_percentage" in self._parameters:
ignore_vehicles = self._parameters["ignore_vehicles_percentage"]
self._tm.ignore_vehicles_percentage(self._actor, ignore_vehicles)
new_status = py_trees.common.Status.SUCCESS
self.logger.debug("%s.update()[%s->%s]" % (self.__class__.__name__, self.status, new_status))
return new_status
class StopVehicle(AtomicBehavior):
"""
This class contains an atomic stopping behavior. The controlled traffic
participant will decelerate with _bake_value_ until reaching a full stop.
Important parameters:
- actor: CARLA actor to execute the behavior
- brake_value: Brake value in [0,1] applied
The behavior terminates when the actor stopped moving
"""
def __init__(self, actor, brake_value, name="Stopping"):
"""
Setup _actor and maximum braking value
"""
super(StopVehicle, self).__init__(name, actor)
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
self._control, self._type = get_actor_control(actor)
if self._type == 'walker':
self._control.speed = 0
self._brake_value = brake_value
def update(self):
"""
Set brake to brake_value until reaching full stop
"""
new_status = py_trees.common.Status.RUNNING
if self._type == 'vehicle':
if CarlaDataProvider.get_velocity(self._actor) > EPSILON:
self._control.brake = self._brake_value
else:
new_status = py_trees.common.Status.SUCCESS
self._control.brake = 0
else:
new_status = py_trees.common.Status.SUCCESS
self._actor.apply_control(self._control)
self.logger.debug("%s.update()[%s->%s]" % (self.__class__.__name__, self.status, new_status))
return new_status
class SyncArrival(AtomicBehavior):
"""
This class contains an atomic behavior to
set velocity of actor so that it reaches location at the same time as
actor_reference. The behavior assumes that the two actors are moving
towards location in a straight line.
Important parameters:
- actor: CARLA actor to execute the behavior
- actor_reference: Reference actor with which arrival is synchronized
- target_location: CARLA location where the actors should "meet"
- gain[optional]: Coefficient for actor's throttle and break controls
Note: In parallel to this behavior a termination behavior has to be used
to keep continue synchronization for a certain duration, or for a
certain distance, etc.
"""
def __init__(self, actor, actor_reference, target_location, gain=1, name="SyncArrival"):
"""
Setup required parameters
"""
super(SyncArrival, self).__init__(name, actor)
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
self._control = carla.VehicleControl()
self._actor_reference = actor_reference
self._target_location = target_location
self._gain = gain
self._control.steering = 0
def update(self):
"""
Dynamic control update for actor velocity
"""
new_status = py_trees.common.Status.RUNNING
distance_reference = calculate_distance(CarlaDataProvider.get_location(self._actor_reference),
self._target_location)
distance = calculate_distance(CarlaDataProvider.get_location(self._actor),
self._target_location)
velocity_reference = CarlaDataProvider.get_velocity(self._actor_reference)
time_reference = float('inf')
if velocity_reference > 0:
time_reference = distance_reference / velocity_reference
velocity_current = CarlaDataProvider.get_velocity(self._actor)
time_current = float('inf')
if velocity_current > 0:
time_current = distance / velocity_current
control_value = (self._gain) * (time_current - time_reference)
if control_value > 0:
self._control.throttle = min([control_value, 1])
self._control.brake = 0
else:
self._control.throttle = 0
self._control.brake = min([abs(control_value), 1])
self._actor.apply_control(self._control)
self.logger.debug("%s.update()[%s->%s]" % (self.__class__.__name__, self.status, new_status))
return new_status
def terminate(self, new_status):
"""
On termination of this behavior, the throttle should be set back to 0.,
to avoid further acceleration.
"""
if self._actor is not None and self._actor.is_alive:
self._control.throttle = 0.0
self._control.brake = 0.0
self._actor.apply_control(self._control)
super(SyncArrival, self).terminate(new_status)
class AddNoiseToVehicle(AtomicBehavior):
"""
This class contains an atomic jitter behavior.
To add noise to steer as well as throttle of the vehicle.
Important parameters:
- actor: CARLA actor to execute the behavior
- steer_value: Applied steering noise in [0,1]
- throttle_value: Applied throttle noise in [0,1]
The behavior terminates after setting the new actor controls
"""
def __init__(self, actor, steer_value, throttle_value, name="Jittering"):
"""
Setup actor , maximum steer value and throttle value
"""
super(AddNoiseToVehicle, self).__init__(name, actor)
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
self._control = carla.VehicleControl()
self._steer_value = steer_value
self._throttle_value = throttle_value
def update(self):
"""
Set steer to steer_value and throttle to throttle_value until reaching full stop
"""
self._control = self._actor.get_control()
self._control.steer = self._steer_value
self._control.throttle = self._throttle_value
new_status = py_trees.common.Status.SUCCESS
self.logger.debug("%s.update()[%s->%s]" % (self.__class__.__name__, self.status, new_status))
self._actor.apply_control(self._control)
return new_status
class ChangeNoiseParameters(AtomicBehavior):
"""
This class contains an atomic jitter behavior.
To add noise to steer as well as throttle of the vehicle.
This behavior should be used in conjuction with AddNoiseToVehicle
The behavior terminates after one iteration
"""
def __init__(self, new_steer_noise, new_throttle_noise,
noise_mean, noise_std, dynamic_mean_for_steer, dynamic_mean_for_throttle, name="ChangeJittering"):
"""
Setup actor , maximum steer value and throttle value
"""
super(ChangeNoiseParameters, self).__init__(name)
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
self._new_steer_noise = new_steer_noise
self._new_throttle_noise = new_throttle_noise
self._noise_mean = noise_mean
self._noise_std = noise_std
self._dynamic_mean_for_steer = dynamic_mean_for_steer
self._dynamic_mean_for_throttle = dynamic_mean_for_throttle
self._noise_to_apply = abs(random.gauss(self._noise_mean, self._noise_std))
def update(self):
"""
Change the noise parameters from the structure copy that it receives.
"""
self._new_steer_noise[0] = min(0, -(self._noise_to_apply - self._dynamic_mean_for_steer))
self._new_throttle_noise[0] = min(self._noise_to_apply + self._dynamic_mean_for_throttle, 1)
new_status = py_trees.common.Status.SUCCESS
self.logger.debug("%s.update()[%s->%s]" % (self.__class__.__name__, self.status, new_status))
return new_status
class BasicAgentBehavior(AtomicBehavior):
"""
This class contains an atomic behavior, which uses the
basic_agent from CARLA to control the actor until
reaching a target location.
Important parameters:
- actor: CARLA actor to execute the behavior
- target_location: Is the desired target location (carla.location),
the actor should move to
The behavior terminates after reaching the target_location (within 2 meters)
"""
_acceptable_target_distance = 2
def __init__(self, actor, target_location, name="BasicAgentBehavior"):
"""
Setup actor and maximum steer value
"""
super(BasicAgentBehavior, self).__init__(name, actor)
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
self._agent = BasicAgent(actor) # pylint: disable=undefined-variable
self._agent.set_destination((target_location.x, target_location.y, target_location.z))
self._control = carla.VehicleControl()
self._target_location = target_location
def update(self):
new_status = py_trees.common.Status.RUNNING
self._control = self._agent.run_step()
location = CarlaDataProvider.get_location(self._actor)
if calculate_distance(location, self._target_location) < self._acceptable_target_distance:
new_status = py_trees.common.Status.SUCCESS
self.logger.debug("%s.update()[%s->%s]" % (self.__class__.__name__, self.status, new_status))
self._actor.apply_control(self._control)
return new_status
def terminate(self, new_status):
self._control.throttle = 0.0
self._control.brake = 0.0
self._actor.apply_control(self._control)
super(BasicAgentBehavior, self).terminate(new_status)
class Idle(AtomicBehavior):
"""
This class contains an idle behavior scenario
Important parameters:
- duration[optional]: Duration in seconds of this behavior
A termination can be enforced by providing a duration value.
Alternatively, a parallel termination behavior has to be used.
"""
def __init__(self, duration=float("inf"), name="Idle"):
"""
Setup actor
"""
super(Idle, self).__init__(name)
self._duration = duration
self._start_time = 0
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
def initialise(self):
"""
Set start time
"""
self._start_time = GameTime.get_time()
super(Idle, self).initialise()
def update(self):
"""
Keep running until termination condition is satisfied
"""
new_status = py_trees.common.Status.RUNNING
if GameTime.get_time() - self._start_time > self._duration:
new_status = py_trees.common.Status.SUCCESS
return new_status
class WaypointFollower(AtomicBehavior):
"""
This is an atomic behavior to follow waypoints while maintaining a given speed.
If no plan is provided, the actor will follow its foward waypoints indefinetely.
Otherwise, the behavior will end with SUCCESS upon reaching the end of the plan.
If no target velocity is provided, the actor continues with its current velocity.
Args:
actor (carla.Actor): CARLA actor to execute the behavior.
target_speed (float, optional): Desired speed of the actor in m/s. Defaults to None.
plan ([carla.Location] or [(carla.Waypoint, carla.agent.navigation.local_planner)], optional):
Waypoint plan the actor should follow. Defaults to None.
blackboard_queue_name (str, optional):
Blackboard variable name, if additional actors should be created on-the-fly. Defaults to None.
avoid_collision (bool, optional):
Enable/Disable(=default) collision avoidance for vehicles/bikes. Defaults to False.
name (str, optional): Name of the behavior. Defaults to "FollowWaypoints".
Attributes:
actor (carla.Actor): CARLA actor to execute the behavior.
name (str, optional): Name of the behavior.
_target_speed (float, optional): Desired speed of the actor in m/s. Defaults to None.
_plan ([carla.Location] or [(carla.Waypoint, carla.agent.navigation.local_planner)]):
Waypoint plan the actor should follow. Defaults to None.
_blackboard_queue_name (str):
Blackboard variable name, if additional actors should be created on-the-fly. Defaults to None.
_avoid_collision (bool): Enable/Disable(=default) collision avoidance for vehicles/bikes. Defaults to False.
_actor_dict: Dictonary of all actors, and their corresponding plans (e.g. {actor: plan}).
_local_planner_dict: Dictonary of all actors, and their corresponding local planners.
Either "Walker" for pedestrians, or a carla.agent.navigation.LocalPlanner for other actors.
_args_lateral_dict: Parameters for the PID of the used carla.agent.navigation.LocalPlanner.
_unique_id: Unique ID of the behavior based on timestamp in nanoseconds.
Note:
OpenScenario:
The WaypointFollower atomic must be called with an individual name if multiple consecutive WFs.
Blackboard variables with lists are used for consecutive WaypointFollower behaviors.
Termination of active WaypointFollowers in initialise of AtomicBehavior because any
following behavior must terminate the WaypointFollower.
"""
def __init__(self, actor, target_speed=None, plan=None, blackboard_queue_name=None,
avoid_collision=False, name="FollowWaypoints"):
"""
Set up actor and local planner
"""
super(WaypointFollower, self).__init__(name, actor)
self._actor_dict = {}
self._actor_dict[actor] = None
self._target_speed = target_speed
self._local_planner_dict = {}
self._local_planner_dict[actor] = None
self._plan = plan
self._blackboard_queue_name = blackboard_queue_name
if blackboard_queue_name is not None:
self._queue = Blackboard().get(blackboard_queue_name)
self._args_lateral_dict = {'K_P': 1.0, 'K_D': 0.01, 'K_I': 0.0, 'dt': 0.05}
self._avoid_collision = avoid_collision
self._unique_id = 0
def initialise(self):
"""
Delayed one-time initialization
Checks if another WaypointFollower behavior is already running for this actor.
If this is the case, a termination signal is sent to the running behavior.
"""
super(WaypointFollower, self).initialise()
self._unique_id = int(round(time.time() * 1e9))
try:
# check whether WF for this actor is already running and add new WF to running_WF list
check_attr = operator.attrgetter("running_WF_actor_{}".format(self._actor.id))
running = check_attr(py_trees.blackboard.Blackboard())
active_wf = copy.copy(running)
active_wf.append(self._unique_id)
py_trees.blackboard.Blackboard().set(
"running_WF_actor_{}".format(self._actor.id), active_wf, overwrite=True)
except AttributeError:
# no WF is active for this actor
py_trees.blackboard.Blackboard().set("terminate_WF_actor_{}".format(self._actor.id), [], overwrite=True)
py_trees.blackboard.Blackboard().set(
"running_WF_actor_{}".format(self._actor.id), [self._unique_id], overwrite=True)
for actor in self._actor_dict:
self._apply_local_planner(actor)
return True
def _apply_local_planner(self, actor):
"""
Convert the plan into locations for walkers (pedestrians), or to a waypoint list for other actors.
For non-walkers, activate the carla.agent.navigation.LocalPlanner module.
"""
if self._target_speed is None:
self._target_speed = CarlaDataProvider.get_velocity(actor)
else:
self._target_speed = self._target_speed
if isinstance(actor, carla.Walker):
self._local_planner_dict[actor] = "Walker"
if self._plan is not None:
if isinstance(self._plan[0], carla.Location):
self._actor_dict[actor] = self._plan
else:
self._actor_dict[actor] = [element[0].transform.location for element in self._plan]
else:
local_planner = LocalPlanner( # pylint: disable=undefined-variable
actor, opt_dict={
'target_speed': self._target_speed * 3.6,
'lateral_control_dict': self._args_lateral_dict})
if self._plan is not None:
if isinstance(self._plan[0], carla.Location):
plan = []
for location in self._plan:
waypoint = CarlaDataProvider.get_map().get_waypoint(location,
project_to_road=True,
lane_type=carla.LaneType.Any)
plan.append((waypoint, RoadOption.LANEFOLLOW))
local_planner.set_global_plan(plan)
else:
local_planner.set_global_plan(self._plan)
self._local_planner_dict[actor] = local_planner
self._actor_dict[actor] = self._plan
def update(self):
"""
Compute next control step for the given waypoint plan, obtain and apply control to actor
"""
new_status = py_trees.common.Status.RUNNING
check_term = operator.attrgetter("terminate_WF_actor_{}".format(self._actor.id))
terminate_wf = check_term(py_trees.blackboard.Blackboard())
check_run = operator.attrgetter("running_WF_actor_{}".format(self._actor.id))
active_wf = check_run(py_trees.blackboard.Blackboard())
# Termination of WF if the WFs unique_id is listed in terminate_wf
# only one WF should be active, therefore all previous WF have to be terminated
if self._unique_id in terminate_wf:
terminate_wf.remove(self._unique_id)
if self._unique_id in active_wf:
active_wf.remove(self._unique_id)
py_trees.blackboard.Blackboard().set(
"terminate_WF_actor_{}".format(self._actor.id), terminate_wf, overwrite=True)
py_trees.blackboard.Blackboard().set(
"running_WF_actor_{}".format(self._actor.id), active_wf, overwrite=True)
new_status = py_trees.common.Status.SUCCESS
return new_status
if self._blackboard_queue_name is not None:
while not self._queue.empty():
actor = self._queue.get()
if actor is not None and actor not in self._actor_dict:
self._apply_local_planner(actor)
success = True
for actor in self._local_planner_dict:
local_planner = self._local_planner_dict[actor] if actor else None
if actor is not None and actor.is_alive and local_planner is not None:
# Check if the actor is a vehicle/bike
if not isinstance(actor, carla.Walker):
control = local_planner.run_step(debug=False)
if self._avoid_collision and detect_lane_obstacle(actor):
control.throttle = 0.0
control.brake = 1.0
actor.apply_control(control)
# Check if the actor reached the end of the plan
# @TODO replace access to private _waypoints_queue with public getter
if local_planner._waypoints_queue: # pylint: disable=protected-access
success = False
# If the actor is a pedestrian, we have to use the WalkerAIController
# The walker is sent to the next waypoint in its plan
else:
actor_location = CarlaDataProvider.get_location(actor)
success = False
if self._actor_dict[actor]:
location = self._actor_dict[actor][0]
direction = location - actor_location
direction_norm = math.sqrt(direction.x ** 2 + direction.y ** 2)
control = actor.get_control()
control.speed = self._target_speed
control.direction = direction / direction_norm
actor.apply_control(control)
if direction_norm < 1.0:
self._actor_dict[actor] = self._actor_dict[actor][1:]
if self._actor_dict[actor] is None:
success = True
else:
control = actor.get_control()
control.speed = self._target_speed
control.direction = CarlaDataProvider.get_transform(actor).rotation.get_forward_vector()
actor.apply_control(control)
if success:
new_status = py_trees.common.Status.SUCCESS
return new_status
def terminate(self, new_status):
"""
On termination of this behavior,
the controls should be set back to 0.
"""
for actor in self._local_planner_dict:
if actor is not None and actor.is_alive:
control, _ = get_actor_control(actor)
actor.apply_control(control)
local_planner = self._local_planner_dict[actor]
if local_planner is not None and local_planner != "Walker":
local_planner.reset_vehicle()
local_planner = None
self._local_planner_dict = {}
self._actor_dict = {}
super(WaypointFollower, self).terminate(new_status)
class LaneChange(WaypointFollower):
"""
This class inherits from the class WaypointFollower.
This class contains an atomic lane change behavior to a parallel lane.
The vehicle follows a waypoint plan to the other lane, which is calculated in the initialise method.
This waypoint plan is calculated with a scenario helper function.
Important parameters:
- actor: CARLA actor to execute the behavior
- speed: speed of the actor for the lane change, in m/s
- direction: 'right' or 'left', depending on which lane to change
- distance_same_lane: straight distance before lane change, in m
- distance_other_lane: straight distance after lane change, in m
- distance_lane_change: straight distance for the lane change itself, in m
The total distance driven is greater than the sum of distance_same_lane and distance_other_lane.
It results from the lane change distance plus the distance_same_lane plus distance_other_lane.
The lane change distance is set to 25m (straight), the driven distance is slightly greater.
A parallel termination behavior has to be used.
"""
def __init__(self, actor, speed=10, direction='left',
distance_same_lane=5, distance_other_lane=100, distance_lane_change=25, name='LaneChange'):
self._direction = direction
self._distance_same_lane = distance_same_lane
self._distance_other_lane = distance_other_lane
self._distance_lane_change = distance_lane_change
self._target_lane_id = None
self._distance_new_lane = 0
self._pos_before_lane_change = None
super(LaneChange, self).__init__(actor, target_speed=speed, name=name)
def initialise(self):
# get start position
position_actor = CarlaDataProvider.get_map().get_waypoint(self._actor.get_location())
# calculate plan with scenario_helper function
self._plan, self._target_lane_id = generate_target_waypoint_list_multilane(
position_actor, self._direction, self._distance_same_lane,
self._distance_other_lane, self._distance_lane_change, check='true')
super(LaneChange, self).initialise()
def update(self):
status = super(LaneChange, self).update()
current_position_actor = CarlaDataProvider.get_map().get_waypoint(self._actor.get_location())
current_lane_id = current_position_actor.lane_id
if current_lane_id == self._target_lane_id:
# driving on new lane
distance = current_position_actor.transform.location.distance(self._pos_before_lane_change)
if distance > self._distance_other_lane:
# long enough distance on new lane --> SUCCESS
status = py_trees.common.Status.SUCCESS
else:
# no lane change yet
self._pos_before_lane_change = current_position_actor.transform.location
return status
class SetInitSpeed(AtomicBehavior):
"""
This class contains an atomic behavior to set the init_speed of an actor,
succeding immeditely after initializing
"""
def __init__(self, actor, init_speed=10, name='SetInitSpeed'):
self._init_speed = init_speed
self._terminate = None
self._actor = actor
super(SetInitSpeed, self).__init__(name, actor)
def initialise(self):
"""
Initialize it's speed
"""
transform = self._actor.get_transform()
yaw = transform.rotation.yaw * (math.pi / 180)
vx = math.cos(yaw) * self._init_speed
vy = math.sin(yaw) * self._init_speed
self._actor.set_velocity(carla.Vector3D(vx, vy, 0))
def update(self):
"""
Nothing to update, end the behavior
"""
return py_trees.common.Status.SUCCESS
class HandBrakeVehicle(AtomicBehavior):
"""
This class contains an atomic hand brake behavior.
To set the hand brake value of the vehicle.
Important parameters:
- vehicle: CARLA actor to execute the behavior
- hand_brake_value to be applied in [0,1]
The behavior terminates after setting the hand brake value
"""
def __init__(self, vehicle, hand_brake_value, name="Braking"):
"""
Setup vehicle control and brake value
"""
super(HandBrakeVehicle, self).__init__(name)
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
self._vehicle = vehicle
self._control, self._type = get_actor_control(vehicle)
self._hand_brake_value = hand_brake_value
def update(self):
"""
Set handbrake
"""
new_status = py_trees.common.Status.SUCCESS
if self._type == 'vehicle':
self._control.hand_brake = self._hand_brake_value
self._vehicle.apply_control(self._control)
else:
self._hand_brake_value = None
self.logger.debug("%s.update()[%s->%s]" %
(self.__class__.__name__, self.status, new_status))
self._vehicle.apply_control(self._control)
return new_status
class ActorDestroy(AtomicBehavior):
"""
This class contains an actor destroy behavior.
Given an actor this behavior will delete it.
Important parameters:
- actor: CARLA actor to be deleted
The behavior terminates after removing the actor
"""
def __init__(self, actor, name="ActorDestroy"):
"""
Setup actor
"""
super(ActorDestroy, self).__init__(name, actor)
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
def update(self):
new_status = py_trees.common.Status.RUNNING
if self._actor:
CarlaDataProvider.remove_actor_by_id(self._actor.id)
self._actor = None
new_status = py_trees.common.Status.SUCCESS
return new_status
class ActorTransformSetter(AtomicBehavior):
"""
This class contains an atomic behavior to set the transform
of an actor.
Important parameters:
- actor: CARLA actor to execute the behavior
- transform: New target transform (position + orientation) of the actor
- physics [optional]: If physics is true, the actor physics will be reactivated upon success
The behavior terminates when actor is set to the new actor transform (closer than 1 meter)
NOTE:
It is very important to ensure that the actor location is spawned to the new transform because of the
appearence of a rare runtime processing error. WaypointFollower with LocalPlanner,
might fail if new_status is set to success before the actor is really positioned at the new transform.
Therefore: calculate_distance(actor, transform) < 1 meter
"""
def __init__(self, actor, transform, physics=True, name="ActorTransformSetter"):
"""
Init
"""
super(ActorTransformSetter, self).__init__(name, actor)
self._transform = transform
self._physics = physics
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
def initialise(self):
if self._actor.is_alive:
self._actor.set_velocity(carla.Vector3D(0, 0, 0))
self._actor.set_angular_velocity(carla.Vector3D(0, 0, 0))
self._actor.set_transform(self._transform)
super(ActorTransformSetter, self).initialise()
def update(self):
"""
Transform actor
"""
new_status = py_trees.common.Status.RUNNING
if not self._actor.is_alive:
new_status = py_trees.common.Status.FAILURE
if calculate_distance(self._actor.get_location(), self._transform.location) < 1.0:
if self._physics:
self._actor.set_simulate_physics(enabled=True)
new_status = py_trees.common.Status.SUCCESS
return new_status
class TrafficLightStateSetter(AtomicBehavior):
"""
This class contains an atomic behavior to set the state of a given traffic light
Args:
actor (carla.TrafficLight): ID of the traffic light that shall be changed
state (carla.TrafficLightState): New target state
The behavior terminates after trying to set the new state
"""
def __init__(self, actor, state, name="TrafficLightStateSetter"):
"""
Init
"""
super(TrafficLightStateSetter, self).__init__(name)
self._actor = actor if "traffic_light" in actor.type_id else None
self._state = state
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
def update(self):
"""
Change the state of the traffic light
"""
if self._actor is None:
return py_trees.common.Status.FAILURE
new_status = py_trees.common.Status.RUNNING
if self._actor.is_alive:
self._actor.set_state(self._state)
new_status = py_trees.common.Status.SUCCESS
else:
# For some reason the actor is gone...
new_status = py_trees.common.Status.FAILURE
return new_status
class ActorSource(AtomicBehavior):
"""
Implementation for a behavior that will indefinitely create actors
at a given transform if no other actor exists in a given radius
from the transform.
Important parameters:
- actor_type_list: Type of CARLA actors to be spawned
- transform: Spawn location
- threshold: Min available free distance between other actors and the spawn location
- blackboard_queue_name: Name of the blackboard used to control this behavior
- actor_limit [optional]: Maximum number of actors to be spawned (default=7)
A parallel termination behavior has to be used.
"""
def __init__(self, actor_type_list, transform, threshold, blackboard_queue_name,
actor_limit=7, name="ActorSource"):
"""
Setup class members
"""
super(ActorSource, self).__init__(name)
self._world = CarlaDataProvider.get_world()
self._actor_types = actor_type_list
self._spawn_point = transform
self._threshold = threshold
self._queue = Blackboard().get(blackboard_queue_name)
self._actor_limit = actor_limit
self._last_blocking_actor = None
def update(self):
new_status = py_trees.common.Status.RUNNING
if self._actor_limit > 0:
world_actors = self._world.get_actors()
spawn_point_blocked = False
if (self._last_blocking_actor and
self._spawn_point.location.distance(self._last_blocking_actor.get_location()) < self._threshold):
spawn_point_blocked = True
if not spawn_point_blocked:
for actor in world_actors:
if self._spawn_point.location.distance(actor.get_location()) < self._threshold:
spawn_point_blocked = True
self._last_blocking_actor = actor
break
if not spawn_point_blocked:
try:
new_actor = CarlaDataProvider.request_new_actor(
np.random.choice(self._actor_types), self._spawn_point)
self._actor_limit -= 1
self._queue.put(new_actor)
except: # pylint: disable=bare-except
print("ActorSource unable to spawn actor")
return new_status
class ActorSink(AtomicBehavior):
"""
Implementation for a behavior that will indefinitely destroy actors
that wander near a given location within a specified threshold.
Important parameters:
- actor_type_list: Type of CARLA actors to be spawned
- sink_location: Location (carla.location) at which actors will be deleted
- threshold: Distance around sink_location in which actors will be deleted
A parallel termination behavior has to be used.
"""
def __init__(self, sink_location, threshold, name="ActorSink"):
"""
Setup class members
"""
super(ActorSink, self).__init__(name)
self._sink_location = sink_location
self._threshold = threshold
def update(self):
new_status = py_trees.common.Status.RUNNING
CarlaDataProvider.remove_actors_in_surrounding(self._sink_location, self._threshold)
return new_status
class TrafficLightManipulator(AtomicBehavior):
"""
Atomic behavior that manipulates traffic lights around the ego_vehicle to trigger scenarios 7 to 10.
This is done by setting 2 of the traffic light at the intersection to green (with some complex precomputation
to set everything up).
Important parameters:
- ego_vehicle: CARLA actor that controls this behavior
- subtype: string that gathers information of the route and scenario number
(check SUBTYPE_CONFIG_TRANSLATION below)
"""
RED = carla.TrafficLightState.Red
YELLOW = carla.TrafficLightState.Yellow
GREEN = carla.TrafficLightState.Green
# Time constants
RED_TIME = 1.5 # Minimum time the ego vehicle waits in red (seconds)
YELLOW_TIME = 2 # Time spent at yellow state (seconds)
RESET_TIME = 6 # Time waited before resetting all the junction (seconds)
# Experimental values
TRIGGER_DISTANCE = 10 # Distance that makes all vehicles in the lane enter the junction (meters)
DIST_TO_WAITING_TIME = 0.04 # Used to wait longer at larger intersections (s/m)
INT_CONF_OPP1 = {'ego': RED, 'ref': RED, 'left': RED, 'right': RED, 'opposite': GREEN}
INT_CONF_OPP2 = {'ego': GREEN, 'ref': GREEN, 'left': RED, 'right': RED, 'opposite': GREEN}
INT_CONF_LFT1 = {'ego': RED, 'ref': RED, 'left': GREEN, 'right': RED, 'opposite': RED}
INT_CONF_LFT2 = {'ego': GREEN, 'ref': GREEN, 'left': GREEN, 'right': RED, 'opposite': RED}
INT_CONF_RGT1 = {'ego': RED, 'ref': RED, 'left': RED, 'right': GREEN, 'opposite': RED}
INT_CONF_RGT2 = {'ego': GREEN, 'ref': GREEN, 'left': RED, 'right': GREEN, 'opposite': RED}
INT_CONF_REF1 = {'ego': GREEN, 'ref': GREEN, 'left': RED, 'right': RED, 'opposite': RED}
INT_CONF_REF2 = {'ego': YELLOW, 'ref': YELLOW, 'left': RED, 'right': RED, 'opposite': RED}
# Depending on the scenario, IN ORDER OF IMPORTANCE, the traffic light changed
# The list has to contain only items of the INT_CONF
SUBTYPE_CONFIG_TRANSLATION = {
'S7left': ['left', 'opposite', 'right'],
'S7right': ['left', 'opposite'],
'S7opposite': ['right', 'left', 'opposite'],
'S8left': ['opposite'],
'S9right': ['left', 'opposite']
}
CONFIG_TLM_TRANSLATION = {
'left': [INT_CONF_LFT1, INT_CONF_LFT2],
'right': [INT_CONF_RGT1, INT_CONF_RGT2],
'opposite': [INT_CONF_OPP1, INT_CONF_OPP2]
}
def __init__(self, ego_vehicle, subtype, debug=False, name="TrafficLightManipulator"):
super(TrafficLightManipulator, self).__init__(name)
self.ego_vehicle = ego_vehicle
self.subtype = subtype
self.current_step = 1
self.debug = debug
self.traffic_light = None
self.annotations = None
self.configuration = None
self.prev_junction_state = None
self.junction_location = None
self.seconds_waited = 0
self.prev_time = None
self.max_trigger_distance = None
self.waiting_time = None
self.inside_junction = False
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
def update(self):
new_status = py_trees.common.Status.RUNNING
# 1) Set up the parameters
if self.current_step == 1:
# Traffic light affecting the ego vehicle
self.traffic_light = CarlaDataProvider.get_next_traffic_light(self.ego_vehicle, use_cached_location=False)
if not self.traffic_light:
# nothing else to do in this iteration...
return new_status
# "Topology" of the intersection
self.annotations = CarlaDataProvider.annotate_trafficlight_in_group(self.traffic_light)
# Which traffic light will be modified (apart from the ego lane)
self.configuration = self.get_traffic_light_configuration(self.subtype, self.annotations)
if self.configuration is None:
self.current_step = 0 # End the behavior
return new_status
# Modify the intersection. Store the previous state
self.prev_junction_state = self.set_intersection_state(self.INT_CONF_REF1)
self.current_step += 1
if self.debug:
print("--- All set up")
# 2) Modify the ego lane to yellow when closeby
elif self.current_step == 2:
ego_location = CarlaDataProvider.get_location(self.ego_vehicle)
if self.junction_location is None:
ego_waypoint = CarlaDataProvider.get_map().get_waypoint(ego_location)
junction_waypoint = ego_waypoint.next(0.5)[0]
while not junction_waypoint.is_junction:
next_wp = junction_waypoint.next(0.5)[0]
junction_waypoint = next_wp
self.junction_location = junction_waypoint.transform.location
distance = ego_location.distance(self.junction_location)
# Failure check
if self.max_trigger_distance is None:
self.max_trigger_distance = distance + 1
if distance > self.max_trigger_distance:
self.current_step = 0
elif distance < self.TRIGGER_DISTANCE:
_ = self.set_intersection_state(self.INT_CONF_REF2)
self.current_step += 1
if self.debug:
print("--- Distance until traffic light changes: {}".format(distance))
# 3) Modify the ego lane to red and the chosen one to green after several seconds
elif self.current_step == 3:
if self.passed_enough_time(self.YELLOW_TIME):
_ = self.set_intersection_state(self.CONFIG_TLM_TRANSLATION[self.configuration][0])
self.current_step += 1
# 4) Wait a bit to let vehicles enter the intersection, then set the ego lane to green
elif self.current_step == 4:
# Get the time in red, dependent on the intersection dimensions
if self.waiting_time is None:
self.waiting_time = self.get_waiting_time(self.annotations, self.configuration)
if self.passed_enough_time(self.waiting_time):
_ = self.set_intersection_state(self.CONFIG_TLM_TRANSLATION[self.configuration][1])
self.current_step += 1
# 5) Wait for the end of the intersection
elif self.current_step == 5:
# the traffic light has been manipulated, wait until the vehicle finsihes the intersection
ego_location = CarlaDataProvider.get_location(self.ego_vehicle)
ego_waypoint = CarlaDataProvider.get_map().get_waypoint(ego_location)
if not self.inside_junction:
if ego_waypoint.is_junction:
# Wait for the ego_vehicle to enter a junction
self.inside_junction = True
else:
if self.debug:
print("--- Waiting to ENTER a junction")
else:
if ego_waypoint.is_junction:
if self.debug:
print("--- Waiting to EXIT a junction")
else:
# And to leave it
self.inside_junction = False
self.current_step += 1
# 6) At the end (or if something failed), reset to the previous state
else:
if self.prev_junction_state:
CarlaDataProvider.reset_lights(self.prev_junction_state)
if self.debug:
print("--- Returning the intersection to its previous state")
self.variable_cleanup()
new_status = py_trees.common.Status.SUCCESS
return new_status
def passed_enough_time(self, time_limit):
"""
Returns true or false depending on the time that has passed from the
first time this function was called
"""
# Start the timer
if self.prev_time is None:
self.prev_time = GameTime.get_time()
timestamp = GameTime.get_time()
self.seconds_waited += (timestamp - self.prev_time)
self.prev_time = timestamp
if self.debug:
print("--- Waited seconds: {}".format(self.seconds_waited))
if self.seconds_waited >= time_limit:
self.seconds_waited = 0
self.prev_time = None
return True
return False
def set_intersection_state(self, choice):
"""
Changes the intersection to the desired state
"""
prev_state = CarlaDataProvider.update_light_states(
self.traffic_light,
self.annotations,
choice,
freeze=True)
return prev_state
def get_waiting_time(self, annotation, direction):
"""
Calculates the time the ego traffic light will remain red
to let vehicles enter the junction
"""
tl = annotation[direction][0]
ego_tl = annotation["ref"][0]
tl_location = CarlaDataProvider.get_trafficlight_trigger_location(tl)
ego_tl_location = CarlaDataProvider.get_trafficlight_trigger_location(ego_tl)
distance = ego_tl_location.distance(tl_location)
return self.RED_TIME + distance * self.DIST_TO_WAITING_TIME
def get_traffic_light_configuration(self, subtype, annotations):
"""
Checks the list of possible altered traffic lights and gets
the first one that exists in the intersection
Important parameters:
- subtype: Subtype of the scenario
- annotations: list of the traffic light of the junction, with their direction (right, left...)
"""
configuration = None
if subtype in self.SUBTYPE_CONFIG_TRANSLATION:
possible_configurations = self.SUBTYPE_CONFIG_TRANSLATION[self.subtype]
while possible_configurations:
# Chose the first one and delete it
configuration = possible_configurations[0]
possible_configurations = possible_configurations[1:]
if configuration in annotations:
if annotations[configuration]:
# Found a valid configuration
break
else:
# The traffic light doesn't exist, get another one
configuration = None
else:
if self.debug:
print("This configuration name is wrong")
configuration = None
if configuration is None and self.debug:
print("This subtype has no traffic light available")
else:
if self.debug:
print("This subtype is unknown")
return configuration
def variable_cleanup(self):
"""
Resets all variables to the intial state
"""
self.current_step = 1
self.traffic_light = None
self.annotations = None
self.configuration = None
self.prev_junction_state = None
self.junction_location = None
self.max_trigger_distance = None
self.waiting_time = None
self.inside_junction = False
class ScenarioTriggerer(AtomicBehavior):
"""
Handles the triggering of the scenarios that are part of a route.
Initializes a list of blackboard variables to False, and only sets them to True when
the ego vehicle is very close to the scenarios
"""
WINDOWS_SIZE = 5
def __init__(self, actor, route, blackboard_list, distance,
repeat_scenarios=False, debug=False, name="ScenarioTriggerer"):
"""
Setup class members
"""
super(ScenarioTriggerer, self).__init__(name)
self._world = CarlaDataProvider.get_world()
self._map = CarlaDataProvider.get_map()
self._repeat = repeat_scenarios
self._debug = debug
self._actor = actor
self._route = route
self._distance = distance
self._blackboard_list = blackboard_list
self._triggered_scenarios = [] # List of already done scenarios
self._current_index = 0
self._route_length = len(self._route)
self._waypoints, _ = zip(*self._route)
def update(self):
new_status = py_trees.common.Status.RUNNING
location = CarlaDataProvider.get_location(self._actor)
if location is None:
return new_status
lower_bound = self._current_index
upper_bound = min(self._current_index + self.WINDOWS_SIZE + 1, self._route_length)
shortest_distance = float('inf')
closest_index = -1
for index in range(lower_bound, upper_bound):
ref_waypoint = self._waypoints[index]
ref_location = ref_waypoint.location
dist_to_route = ref_location.distance(location)
if dist_to_route <= shortest_distance:
closest_index = index
shortest_distance = dist_to_route
if closest_index == -1 or shortest_distance == float('inf'):
return new_status
# Update the ego position at the route
self._current_index = closest_index
route_location = self._waypoints[closest_index].location
# Check which scenarios can be triggered
blackboard = py_trees.blackboard.Blackboard()
for black_var_name, scen_location in self._blackboard_list:
# Close enough
scen_distance = route_location.distance(scen_location)
condition1 = bool(scen_distance < self._distance)
# Not being currently done
value = blackboard.get(black_var_name)
condition2 = bool(not value)
# Already done, if needed
condition3 = bool(self._repeat or black_var_name not in self._triggered_scenarios)
if condition1 and condition2 and condition3:
_ = blackboard.set(black_var_name, True)
self._triggered_scenarios.append(black_var_name)
if self._debug:
self._world.debug.draw_point(
scen_location + carla.Location(z=4),
size=0.5,
life_time=0.5,
color=carla.Color(255, 255, 0)
)
self._world.debug.draw_string(
scen_location + carla.Location(z=5),
str(black_var_name),
False,
color=carla.Color(0, 0, 0),
life_time=1000
)
return new_status
| 38.62516
| 120
| 0.650989
|
66014bda2cc91b85af8020a1218d790c90f75fc9
| 1,108
|
py
|
Python
|
gans/discriminator_model.py
|
slaily/deep-learning-bits
|
cb9ce7ec539efbdfcaa023d141466f919bd31b71
|
[
"MIT"
] | null | null | null |
gans/discriminator_model.py
|
slaily/deep-learning-bits
|
cb9ce7ec539efbdfcaa023d141466f919bd31b71
|
[
"MIT"
] | null | null | null |
gans/discriminator_model.py
|
slaily/deep-learning-bits
|
cb9ce7ec539efbdfcaa023d141466f919bd31b71
|
[
"MIT"
] | null | null | null |
"""
The GAN discriminator network
"""
import keras
import numpy as np
from keras import layers
discriminator_input = layers.Input(shape=(height, width, channels))
x = layers.Conv2D(128, 3)(discriminator_input)
x = layers.LeakyReLU()(x)
x = layers.Conv2D(128, 4, strides=2)(x)
x = layers.LeakyReLU()(x)
x = layers.Conv2D(128, 4, strides=2)(x)
x = layers.LeakyReLU()(x)
x = layers.Conv2D(128, 4, strides=2)(x)
x = layers.LeakyReLU()(x)
x = layers.Flatten()(x)
# One dropout layer: an important trick!
x = layers.Dropout(0.4)(x)
# Classification layer
x = layers.Dense(1, activation='sigmoid')(x)
# Instantiates the discriminator model, which turns a (32, 32, 3)
# input into a binary classifi-cation decision (fake/real)
discriminator = keras.models.Model(discriminator_input, x)
discriminator.summary()
discriminator_optimizer = keras.optimizers.RMSprop(
lr=0.0008,
clipvalue=1.0, # Uses gradient clipping (by value) in the optimizer
decay=1e-8 # To stabilize training, uses learning-rate decay
)
discriminator.compile(
optimizer=discriminator_optimizer,
loss='binary_crossentropy'
)
| 29.945946
| 72
| 0.733755
|
7dfe45820236f1804aeec2a8774835ca59d63c24
| 1,375
|
py
|
Python
|
openweave/tlv/schema/error.py
|
robszewczyk/openweave-tlv-schema
|
c0acbccce4fcaf213a09261f79d6a141ae94f7e8
|
[
"Apache-2.0"
] | 1
|
2020-05-19T22:52:27.000Z
|
2020-05-19T22:52:27.000Z
|
openweave/tlv/schema/error.py
|
robszewczyk/openweave-tlv-schema
|
c0acbccce4fcaf213a09261f79d6a141ae94f7e8
|
[
"Apache-2.0"
] | null | null | null |
openweave/tlv/schema/error.py
|
robszewczyk/openweave-tlv-schema
|
c0acbccce4fcaf213a09261f79d6a141ae94f7e8
|
[
"Apache-2.0"
] | 1
|
2021-02-15T16:14:17.000Z
|
2021-02-15T16:14:17.000Z
|
#
# Copyright (c) 2020 Google LLC.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# @file
# Weave TLV Schema errors.
#
import os
class WeaveTLVSchemaError(Exception):
def __init__(self, msg, detail=None, sourceRef=None):
super(WeaveTLVSchemaError, self).__init__(msg)
self.detail = detail
self.sourceRef = sourceRef
def format(self, withTextMarker=True, withDetail=True):
res = 'ERROR: ' + str(self)
if withDetail and self.detail is not None:
res = res + "\nNOTE: " + self.detail
if self.sourceRef:
res = '%s: %s' % (self.sourceRef.filePosStr(), res)
if withTextMarker:
res += '\n\n' + self.sourceRef.lineSummaryStr()
return res
class AmbiguousTagError(Exception):
pass
| 31.976744
| 77
| 0.648727
|
2facd8f817078d4ea3f04615f6f581489a870ffd
| 14,944
|
py
|
Python
|
skimage/util/dtype.py
|
portugueslab/scikit-image
|
0fa3bcb118bb208a0cc7d3e8b96cd96c1ce7a75b
|
[
"BSD-3-Clause"
] | 2
|
2020-02-24T02:24:43.000Z
|
2021-12-19T11:44:34.000Z
|
skimage/util/dtype.py
|
portugueslab/scikit-image
|
0fa3bcb118bb208a0cc7d3e8b96cd96c1ce7a75b
|
[
"BSD-3-Clause"
] | null | null | null |
skimage/util/dtype.py
|
portugueslab/scikit-image
|
0fa3bcb118bb208a0cc7d3e8b96cd96c1ce7a75b
|
[
"BSD-3-Clause"
] | 2
|
2019-06-16T06:38:28.000Z
|
2021-12-19T11:44:48.000Z
|
from __future__ import division
import numpy as np
from warnings import warn
__all__ = ['img_as_float32', 'img_as_float64', 'img_as_float',
'img_as_int', 'img_as_uint', 'img_as_ubyte',
'img_as_bool', 'dtype_limits']
dtype_range = {np.bool_: (False, True),
np.bool8: (False, True),
np.uint8: (0, 255),
np.uint16: (0, 65535),
np.uint32: (0, 2**32 - 1),
np.uint64: (0, 2**64 - 1),
np.int8: (-128, 127),
np.int16: (-32768, 32767),
np.int32: (-2**31, 2**31 - 1),
np.int64: (-2**63, 2**63 - 1),
np.float16: (-1, 1),
np.float32: (-1, 1),
np.float64: (-1, 1)}
_supported_types = (np.bool_, np.bool8,
np.uint8, np.uint16, np.uint32, np.uint64,
np.int8, np.int16, np.int32, np.int64,
np.float16, np.float32, np.float64)
def dtype_limits(image, clip_negative=None):
"""Return intensity limits, i.e. (min, max) tuple, of the image's dtype.
Parameters
----------
image : ndarray
Input image.
clip_negative : bool, optional
If True, clip the negative range (i.e. return 0 for min intensity)
even if the image dtype allows negative values.
The default behavior (None) is equivalent to True.
Returns
-------
imin, imax : tuple
Lower and upper intensity limits.
"""
if clip_negative is None:
clip_negative = True
warn('The default of `clip_negative` in `skimage.util.dtype_limits` '
'will change to `False` in version 0.15.')
imin, imax = dtype_range[image.dtype.type]
if clip_negative:
imin = 0
return imin, imax
def convert(image, dtype, force_copy=False, uniform=False):
"""
Convert an image to the requested data-type.
Warnings are issued in case of precision loss, or when negative values
are clipped during conversion to unsigned integer types (sign loss).
Floating point values are expected to be normalized and will be clipped
to the range [0.0, 1.0] or [-1.0, 1.0] when converting to unsigned or
signed integers respectively.
Numbers are not shifted to the negative side when converting from
unsigned to signed integer types. Negative values will be clipped when
converting to unsigned integers.
Parameters
----------
image : ndarray
Input image.
dtype : dtype
Target data-type.
force_copy : bool, optional
Force a copy of the data, irrespective of its current dtype.
uniform : bool, optional
Uniformly quantize the floating point range to the integer range.
By default (uniform=False) floating point values are scaled and
rounded to the nearest integers, which minimizes back and forth
conversion errors.
References
----------
.. [1] DirectX data conversion rules.
http://msdn.microsoft.com/en-us/library/windows/desktop/dd607323%28v=vs.85%29.aspx
.. [2] Data Conversions. In "OpenGL ES 2.0 Specification v2.0.25",
pp 7-8. Khronos Group, 2010.
.. [3] Proper treatment of pixels as integers. A.W. Paeth.
In "Graphics Gems I", pp 249-256. Morgan Kaufmann, 1990.
.. [4] Dirty Pixels. J. Blinn. In "Jim Blinn's corner: Dirty Pixels",
pp 47-57. Morgan Kaufmann, 1998.
"""
image = np.asarray(image)
dtypeobj_in = image.dtype
dtypeobj_out = np.dtype(dtype)
dtype_in = dtypeobj_in.type
dtype_out = dtypeobj_out.type
kind_in = dtypeobj_in.kind
kind_out = dtypeobj_out.kind
itemsize_in = dtypeobj_in.itemsize
itemsize_out = dtypeobj_out.itemsize
if dtype_in == dtype_out:
if force_copy:
image = image.copy()
return image
if not (dtype_in in _supported_types and dtype_out in _supported_types):
raise ValueError("Can not convert from {} to {}."
.format(dtypeobj_in, dtypeobj_out))
def sign_loss():
warn("Possible sign loss when converting negative image of type "
"{} to positive image of type {}."
.format(dtypeobj_in, dtypeobj_out))
def prec_loss():
warn("Possible precision loss when converting from {} to {}"
.format(dtypeobj_in, dtypeobj_out))
def _dtype_itemsize(itemsize, *dtypes):
# Return first of `dtypes` with itemsize greater than `itemsize`
return next(dt for dt in dtypes if np.dtype(dt).itemsize >= itemsize)
def _dtype_bits(kind, bits, itemsize=1):
# Return dtype of `kind` that can store a `bits` wide unsigned int
def compare(x, y, kind='u'):
if kind == 'u':
return x <= y
else:
return x < y
s = next(i for i in (itemsize, ) + (2, 4, 8) if compare(bits, i * 8,
kind=kind))
return np.dtype(kind + str(s))
def _scale(a, n, m, copy=True):
"""Scale an array of unsigned/positive integers from `n` to `m` bits.
Numbers can be represented exactly only if `m` is a multiple of `n`.
Parameters
----------
a : ndarray
Input image array.
n : int
Number of bits currently used to encode the values in `a`.
m : int
Desired number of bits to encode the values in `out`.
copy : bool, optional
If True, allocates and returns new array. Otherwise, modifies
`a` in place.
Returns
-------
out : array
Output image array. Has the same kind as `a`.
"""
kind = a.dtype.kind
if n > m and a.max() < 2 ** m:
mnew = int(np.ceil(m / 2) * 2)
if mnew > m:
dtype = "int{}".format(mnew)
else:
dtype = "uint{}".format(mnew)
n = int(np.ceil(n / 2) * 2)
warn("Downcasting {} to {} without scaling because max "
"value {} fits in {}".format(a.dtype, dtype, a.max(), dtype))
return a.astype(_dtype_bits(kind, m))
elif n == m:
return a.copy() if copy else a
elif n > m:
# downscale with precision loss
prec_loss()
if copy:
b = np.empty(a.shape, _dtype_bits(kind, m))
np.floor_divide(a, 2**(n - m), out=b, dtype=a.dtype,
casting='unsafe')
return b
else:
a //= 2**(n - m)
return a
elif m % n == 0:
# exact upscale to a multiple of `n` bits
if copy:
b = np.empty(a.shape, _dtype_bits(kind, m))
np.multiply(a, (2**m - 1) // (2**n - 1), out=b, dtype=b.dtype)
return b
else:
a = a.astype(_dtype_bits(kind, m, a.dtype.itemsize), copy=False)
a *= (2**m - 1) // (2**n - 1)
return a
else:
# upscale to a multiple of `n` bits,
# then downscale with precision loss
prec_loss()
o = (m // n + 1) * n
if copy:
b = np.empty(a.shape, _dtype_bits(kind, o))
np.multiply(a, (2**o - 1) // (2**n - 1), out=b, dtype=b.dtype)
b //= 2**(o - m)
return b
else:
a = a.astype(_dtype_bits(kind, o, a.dtype.itemsize), copy=False)
a *= (2**o - 1) // (2**n - 1)
a //= 2**(o - m)
return a
if kind_in in 'ui':
imin_in = np.iinfo(dtype_in).min
imax_in = np.iinfo(dtype_in).max
if kind_out in 'ui':
imin_out = np.iinfo(dtype_out).min
imax_out = np.iinfo(dtype_out).max
# any -> binary
if kind_out == 'b':
if kind_in in "fi":
sign_loss()
prec_loss()
return image > dtype_in(dtype_range[dtype_in][1] / 2)
# binary -> any
if kind_in == 'b':
result = image.astype(dtype_out)
if kind_out != 'f':
result *= dtype_out(dtype_range[dtype_out][1])
return result
# float -> any
if kind_in == 'f':
if np.min(image) < -1.0 or np.max(image) > 1.0:
raise ValueError("Images of type float must be between -1 and 1.")
if kind_out == 'f':
# float -> float
if itemsize_in > itemsize_out:
prec_loss()
return image.astype(dtype_out)
# floating point -> integer
prec_loss()
# use float type that can represent output integer type
image = image.astype(_dtype_itemsize(itemsize_out, dtype_in,
np.float32, np.float64))
if not uniform:
if kind_out == 'u':
image *= imax_out
else:
image *= imax_out - imin_out
image -= 1.0
image /= 2.0
np.rint(image, out=image)
np.clip(image, imin_out, imax_out, out=image)
elif kind_out == 'u':
image *= imax_out + 1
np.clip(image, 0, imax_out, out=image)
else:
image *= (imax_out - imin_out + 1.0) / 2.0
np.floor(image, out=image)
np.clip(image, imin_out, imax_out, out=image)
return image.astype(dtype_out)
# signed/unsigned int -> float
if kind_out == 'f':
if itemsize_in >= itemsize_out:
prec_loss()
# use float type that can exactly represent input integers
image = image.astype(_dtype_itemsize(itemsize_in, dtype_out,
np.float32, np.float64))
if kind_in == 'u':
image /= imax_in
# DirectX uses this conversion also for signed ints
# if imin_in:
# np.maximum(image, -1.0, out=image)
else:
image *= 2.0
image += 1.0
image /= imax_in - imin_in
return np.asarray(image, dtype_out)
# unsigned int -> signed/unsigned int
if kind_in == 'u':
if kind_out == 'i':
# unsigned int -> signed int
image = _scale(image, 8 * itemsize_in, 8 * itemsize_out - 1)
return image.view(dtype_out)
else:
# unsigned int -> unsigned int
return _scale(image, 8 * itemsize_in, 8 * itemsize_out)
# signed int -> unsigned int
if kind_out == 'u':
sign_loss()
image = _scale(image, 8 * itemsize_in - 1, 8 * itemsize_out)
result = np.empty(image.shape, dtype_out)
np.maximum(image, 0, out=result, dtype=image.dtype, casting='unsafe')
return result
# signed int -> signed int
if itemsize_in > itemsize_out:
return _scale(image, 8 * itemsize_in - 1, 8 * itemsize_out - 1)
image = image.astype(_dtype_bits('i', itemsize_out * 8))
image -= imin_in
image = _scale(image, 8 * itemsize_in, 8 * itemsize_out, copy=False)
image += imin_out
return image.astype(dtype_out)
def img_as_float32(image, force_copy=False):
"""Convert an image to single-precision (32-bit) floating point format.
Parameters
----------
image : ndarray
Input image.
force_copy : bool, optional
Force a copy of the data, irrespective of its current dtype.
Returns
-------
out : ndarray of float32
Output image.
Notes
-----
The range of a floating point image is [0.0, 1.0] or [-1.0, 1.0] when
converting from unsigned or signed datatypes, respectively.
If the input image has a float type, intensity values are not modified
and can be outside the ranges [0.0, 1.0] or [-1.0, 1.0].
"""
return convert(image, np.float32, force_copy)
def img_as_float64(image, force_copy=False):
"""Convert an image to double-precision (64-bit) floating point format.
Parameters
----------
image : ndarray
Input image.
force_copy : bool, optional
Force a copy of the data, irrespective of its current dtype.
Returns
-------
out : ndarray of float64
Output image.
Notes
-----
The range of a floating point image is [0.0, 1.0] or [-1.0, 1.0] when
converting from unsigned or signed datatypes, respectively.
If the input image has a float type, intensity values are not modified
and can be outside the ranges [0.0, 1.0] or [-1.0, 1.0].
"""
return convert(image, np.float64, force_copy)
img_as_float = img_as_float64
def img_as_uint(image, force_copy=False):
"""Convert an image to 16-bit unsigned integer format.
Parameters
----------
image : ndarray
Input image.
force_copy : bool, optional
Force a copy of the data, irrespective of its current dtype.
Returns
-------
out : ndarray of uint16
Output image.
Notes
-----
Negative input values will be clipped.
Positive values are scaled between 0 and 65535.
"""
return convert(image, np.uint16, force_copy)
def img_as_int(image, force_copy=False):
"""Convert an image to 16-bit signed integer format.
Parameters
----------
image : ndarray
Input image.
force_copy : bool, optional
Force a copy of the data, irrespective of its current dtype.
Returns
-------
out : ndarray of uint16
Output image.
Notes
-----
The values are scaled between -32768 and 32767.
If the input data-type is positive-only (e.g., uint8), then
the output image will still only have positive values.
"""
return convert(image, np.int16, force_copy)
def img_as_ubyte(image, force_copy=False):
"""Convert an image to 8-bit unsigned integer format.
Parameters
----------
image : ndarray
Input image.
force_copy : bool, optional
Force a copy of the data, irrespective of its current dtype.
Returns
-------
out : ndarray of ubyte (uint8)
Output image.
Notes
-----
Negative input values will be clipped.
Positive values are scaled between 0 and 255.
"""
return convert(image, np.uint8, force_copy)
def img_as_bool(image, force_copy=False):
"""Convert an image to boolean format.
Parameters
----------
image : ndarray
Input image.
force_copy : bool, optional
Force a copy of the data, irrespective of its current dtype.
Returns
-------
out : ndarray of bool (`bool_`)
Output image.
Notes
-----
The upper half of the input dtype's positive range is True, and the lower
half is False. All negative values (if present) are False.
"""
return convert(image, np.bool_, force_copy)
| 32.206897
| 93
| 0.561229
|
ebb1fd4d04a78b923252181bfd4441d1e1cda4ad
| 8,610
|
py
|
Python
|
tensorflow_probability/python/distributions/exponential_test.py
|
sanket-kamthe/probability
|
c22b6201155c2e58d08a4ad30641d1aff59fbe7c
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_probability/python/distributions/exponential_test.py
|
sanket-kamthe/probability
|
c22b6201155c2e58d08a4ad30641d1aff59fbe7c
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_probability/python/distributions/exponential_test.py
|
sanket-kamthe/probability
|
c22b6201155c2e58d08a4ad30641d1aff59fbe7c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for initializers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
from scipy import stats as sp_stats
import tensorflow.compat.v2 as tf
import tensorflow_probability as tfp
from tensorflow_probability.python.distributions import exponential as exponential_lib
from tensorflow_probability.python.internal import test_util
tfd = tfp.distributions
@test_util.test_all_tf_execution_regimes
class ExponentialTest(test_util.TestCase):
def testExponentialLogPDF(self):
batch_size = 6
lam = tf.constant([2.0] * batch_size)
lam_v = 2.0
x = np.array([2.5, 2.5, 4.0, 0.1, 1.0, 2.0], dtype=np.float32)
exponential = exponential_lib.Exponential(rate=lam, validate_args=True)
log_pdf = exponential.log_prob(x)
self.assertEqual(log_pdf.shape, (6,))
pdf = exponential.prob(x)
self.assertEqual(pdf.shape, (6,))
expected_log_pdf = sp_stats.expon.logpdf(x, scale=1 / lam_v)
self.assertAllClose(self.evaluate(log_pdf), expected_log_pdf)
self.assertAllClose(self.evaluate(pdf), np.exp(expected_log_pdf))
def testExponentialLogPDFBoundary(self):
# Check that Log PDF is finite at 0.
rate = np.array([0.1, 0.5, 1., 2., 5., 10.], dtype=np.float32)
exponential = exponential_lib.Exponential(rate=rate, validate_args=False)
log_pdf = exponential.log_prob(0.)
self.assertAllClose(np.log(rate), self.evaluate(log_pdf))
def testExponentialCDF(self):
batch_size = 6
lam = tf.constant([2.0] * batch_size)
lam_v = 2.0
x = np.array([2.5, 2.5, 4.0, 0.1, 1.0, 2.0], dtype=np.float32)
exponential = exponential_lib.Exponential(rate=lam, validate_args=True)
cdf = exponential.cdf(x)
self.assertEqual(cdf.shape, (6,))
expected_cdf = sp_stats.expon.cdf(x, scale=1 / lam_v)
self.assertAllClose(self.evaluate(cdf), expected_cdf)
def testExponentialLogSurvival(self):
batch_size = 7
lam = tf.constant([2.0] * batch_size)
lam_v = 2.0
x = np.array([2.5, 2.5, 4.0, 0.1, 1.0, 2.0, 10.0], dtype=np.float32)
exponential = exponential_lib.Exponential(rate=lam, validate_args=True)
log_survival = exponential.log_survival_function(x)
self.assertEqual(log_survival.shape, (7,))
expected_log_survival = sp_stats.expon.logsf(x, scale=1 / lam_v)
self.assertAllClose(self.evaluate(log_survival), expected_log_survival)
def testExponentialMean(self):
lam_v = np.array([1.0, 4.0, 2.5])
exponential = exponential_lib.Exponential(rate=lam_v, validate_args=True)
self.assertEqual(exponential.mean().shape, (3,))
expected_mean = sp_stats.expon.mean(scale=1 / lam_v)
self.assertAllClose(self.evaluate(exponential.mean()), expected_mean)
def testExponentialVariance(self):
lam_v = np.array([1.0, 4.0, 2.5])
exponential = exponential_lib.Exponential(rate=lam_v, validate_args=True)
self.assertEqual(exponential.variance().shape, (3,))
expected_variance = sp_stats.expon.var(scale=1 / lam_v)
self.assertAllClose(
self.evaluate(exponential.variance()), expected_variance)
def testExponentialEntropy(self):
lam_v = np.array([1.0, 4.0, 2.5])
exponential = exponential_lib.Exponential(rate=lam_v, validate_args=True)
self.assertEqual(exponential.entropy().shape, (3,))
expected_entropy = sp_stats.expon.entropy(scale=1 / lam_v)
self.assertAllClose(self.evaluate(exponential.entropy()), expected_entropy)
def testExponentialSample(self):
lam = tf.constant([3.0, 4.0])
lam_v = [3.0, 4.0]
n = tf.constant(100000)
exponential = exponential_lib.Exponential(rate=lam, validate_args=True)
samples = exponential.sample(n, seed=test_util.test_seed())
sample_values = self.evaluate(samples)
self.assertEqual(sample_values.shape, (100000, 2))
self.assertFalse(np.any(sample_values < 0.0))
for i in range(2):
self.assertLess(
sp_stats.kstest(sample_values[:, i],
sp_stats.expon(scale=1.0 / lam_v[i]).cdf)[0], 0.01)
def testExponentialSampleMultiDimensional(self):
batch_size = 2
lam_v = [3.0, 22.0]
lam = tf.constant([lam_v] * batch_size)
exponential = exponential_lib.Exponential(rate=lam, validate_args=True)
n = 100000
samples = exponential.sample(n, seed=test_util.test_seed())
self.assertEqual(samples.shape, (n, batch_size, 2))
sample_values = self.evaluate(samples)
self.assertFalse(np.any(sample_values < 0.0))
for i in range(2):
self.assertLess(
sp_stats.kstest(sample_values[:, 0, i],
sp_stats.expon(scale=1.0 / lam_v[i]).cdf)[0], 0.01)
self.assertLess(
sp_stats.kstest(sample_values[:, 1, i],
sp_stats.expon(scale=1.0 / lam_v[i]).cdf)[0], 0.01)
def testFullyReparameterized(self):
lam = tf.constant([0.1, 1.0])
_, grad_lam = tfp.math.value_and_gradient(
lambda l: exponential_lib.Exponential(rate=lam, validate_args=True). # pylint: disable=g-long-lambda
sample(100), lam)
self.assertIsNotNone(grad_lam)
def testExponentialExponentialKL(self):
a_rate = np.arange(0.5, 1.6, 0.1)
b_rate = np.arange(0.5, 1.6, 0.1)
# This reshape is intended to expand the number of test cases.
a_rate = a_rate.reshape((len(a_rate), 1))
b_rate = b_rate.reshape((1, len(b_rate)))
a = exponential_lib.Exponential(rate=a_rate, validate_args=True)
b = exponential_lib.Exponential(rate=b_rate, validate_args=True)
# Consistent with
# http://www.mast.queensu.ca/~communications/Papers/gil-msc11.pdf, page 108
true_kl = np.log(a_rate) - np.log(b_rate) + (b_rate - a_rate) / a_rate
kl = tfd.kl_divergence(a, b)
x = a.sample(int(4e5), seed=test_util.test_seed())
kl_sample = tf.reduce_mean(a.log_prob(x) - b.log_prob(x), axis=0)
kl_, kl_sample_ = self.evaluate([kl, kl_sample])
self.assertAllClose(true_kl, kl_, atol=0., rtol=1e-12)
self.assertAllClose(true_kl, kl_sample_, atol=0., rtol=8e-2)
zero_kl = tfd.kl_divergence(a, a)
true_zero_kl_, zero_kl_ = self.evaluate([tf.zeros_like(zero_kl), zero_kl])
self.assertAllEqual(true_zero_kl_, zero_kl_)
def testGradientThroughRate(self):
rate = tf.Variable(3.)
d = tfd.Exponential(rate=rate)
with tf.GradientTape() as tape:
loss = -d.log_prob([1., 2., 4.])
grad = tape.gradient(loss, d.trainable_variables)
self.assertLen(grad, 1)
self.assertAllNotNone(grad)
def testAssertsPositiveRate(self):
rate = tf.Variable([1., 2., -3.])
self.evaluate(rate.initializer)
with self.assertRaisesOpError("Argument `rate` must be positive."):
d = tfd.Exponential(rate=rate, validate_args=True)
self.evaluate(d.sample())
def testAssertsPositiveRateAfterMutation(self):
rate = tf.Variable([1., 2., 3.])
self.evaluate(rate.initializer)
d = tfd.Exponential(rate=rate, validate_args=True)
self.evaluate(d.mean())
with self.assertRaisesOpError("Argument `rate` must be positive."):
with tf.control_dependencies([rate.assign([1., 2., -3.])]):
self.evaluate(d.sample())
def testExpontentialQuantile(self):
exponential = exponential_lib.Exponential(rate=[1., 2.], validate_args=True)
# Corner cases.
result = self.evaluate(exponential.quantile([0., 1.]))
self.assertAllClose(result, [0., np.inf])
# Two sample values calculated by hand.
result = self.evaluate(exponential.quantile(0.5))
self.assertAllClose(result, [0.693147, 0.346574])
def testExponentialQuantileIsInverseOfCdf(self):
exponential = exponential_lib.Exponential(
rate=[1., 2.], validate_args=False)
values = [2 * [t / 10.] for t in range(0, 11)]
result = self.evaluate(exponential.cdf(exponential.quantile(values)))
self.assertAllClose(result, values)
if __name__ == "__main__":
tf.test.main()
| 37.598253
| 109
| 0.692567
|
312873a453df4cdfba6c0b25f900bc4a3d61c322
| 993
|
py
|
Python
|
subMenus/miscMenu.py
|
PeterVeltman/ArduinoLGServiceRemote
|
9f990ef2ddcaf963ffdb52c201fdbbdd3731c743
|
[
"MIT"
] | 7
|
2019-10-08T02:48:27.000Z
|
2021-12-02T15:46:09.000Z
|
subMenus/miscMenu.py
|
PeterVeltman/ArduinoLGServiceRemote
|
9f990ef2ddcaf963ffdb52c201fdbbdd3731c743
|
[
"MIT"
] | null | null | null |
subMenus/miscMenu.py
|
PeterVeltman/ArduinoLGServiceRemote
|
9f990ef2ddcaf963ffdb52c201fdbbdd3731c743
|
[
"MIT"
] | null | null | null |
global remoteObj
def pressStill():
remoteObj.setKey(18)
def pressRatio():
remoteObj.setKey(19)
def pressUSB():
remoteObj.setKey(20)
def pressHelp():
remoteObj.setKey(21)
def pressKeySUp():
remoteObj.setKey(22)
def pressKeySDown():
remoteObj.setKey(23)
def pressRed():
remoteObj.setKey(29)
def pressGreen():
remoteObj.setKey(30)
def pressYellow():
remoteObj.setKey(31)
def pressBlue():
remoteObj.setKey(32)
def miscMenu(mainMenu,remObj):
global remoteObj
remoteObj = remObj
options = [("Still",pressStill),
("Ratio",pressRatio),
("USB",pressUSB),
("Help",pressHelp),
("KeySUp",pressKeySUp),
("KeySDown",pressKeySDown),
("Red",pressRed),
("Green",pressGreen),
("Yellow",pressYellow),
("Blue",pressBlue),
("Exit",mainMenu.miscMenu.close)]
mainMenu.miscMenu.set_options(options)
| 20.265306
| 48
| 0.58711
|
fd10a8afa1aec1a66a980c58f5aa564aa4f91de3
| 6,955
|
py
|
Python
|
sdk/python/pulumi_azure_nextgen/servicebus/migration_config.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 31
|
2020-09-21T09:41:01.000Z
|
2021-02-26T13:21:59.000Z
|
sdk/python/pulumi_azure_nextgen/servicebus/migration_config.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 231
|
2020-09-21T09:38:45.000Z
|
2021-03-01T11:16:03.000Z
|
sdk/python/pulumi_azure_nextgen/servicebus/migration_config.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 4
|
2020-09-29T14:14:59.000Z
|
2021-02-10T20:38:16.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
__all__ = ['MigrationConfig']
class MigrationConfig(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
config_name: Optional[pulumi.Input[str]] = None,
namespace_name: Optional[pulumi.Input[str]] = None,
post_migration_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
target_namespace: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Single item in List or Get Migration Config operation
API Version: 2017-04-01.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] config_name: The configuration name. Should always be "$default".
:param pulumi.Input[str] namespace_name: The namespace name
:param pulumi.Input[str] post_migration_name: Name to access Standard Namespace after migration
:param pulumi.Input[str] resource_group_name: Name of the Resource group within the Azure subscription.
:param pulumi.Input[str] target_namespace: Existing premium Namespace ARM Id name which has no entities, will be used for migration
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['config_name'] = config_name
if namespace_name is None and not opts.urn:
raise TypeError("Missing required property 'namespace_name'")
__props__['namespace_name'] = namespace_name
if post_migration_name is None and not opts.urn:
raise TypeError("Missing required property 'post_migration_name'")
__props__['post_migration_name'] = post_migration_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
if target_namespace is None and not opts.urn:
raise TypeError("Missing required property 'target_namespace'")
__props__['target_namespace'] = target_namespace
__props__['migration_state'] = None
__props__['name'] = None
__props__['pending_replication_operations_count'] = None
__props__['provisioning_state'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:servicebus/latest:MigrationConfig"), pulumi.Alias(type_="azure-nextgen:servicebus/v20170401:MigrationConfig"), pulumi.Alias(type_="azure-nextgen:servicebus/v20180101preview:MigrationConfig")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(MigrationConfig, __self__).__init__(
'azure-nextgen:servicebus:MigrationConfig',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'MigrationConfig':
"""
Get an existing MigrationConfig resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return MigrationConfig(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="migrationState")
def migration_state(self) -> pulumi.Output[str]:
"""
State in which Standard to Premium Migration is, possible values : Unknown, Reverting, Completing, Initiating, Syncing, Active
"""
return pulumi.get(self, "migration_state")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="pendingReplicationOperationsCount")
def pending_replication_operations_count(self) -> pulumi.Output[float]:
"""
Number of entities pending to be replicated.
"""
return pulumi.get(self, "pending_replication_operations_count")
@property
@pulumi.getter(name="postMigrationName")
def post_migration_name(self) -> pulumi.Output[str]:
"""
Name to access Standard Namespace after migration
"""
return pulumi.get(self, "post_migration_name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
Provisioning state of Migration Configuration
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="targetNamespace")
def target_namespace(self) -> pulumi.Output[str]:
"""
Existing premium Namespace ARM Id name which has no entities, will be used for migration
"""
return pulumi.get(self, "target_namespace")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 43.198758
| 279
| 0.656794
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.