content
stringlengths 7
1.05M
|
|---|
"""
Merge Sort
1. Divide the unsorted list into n sublists, each containing one element (a
list of one element is considered sorted).
2. Repeatedly merge sublists to
produce new sorted sublists until there is only one sublist remaining. This
will be the sorted list.
"""
def merge_sort(arr):
"""
merge sort
Time: O(nlog(n))
Space: O(n)
"""
if len(arr) <= 1:
return arr
mid = len(arr) // 2
left = merge_sort(arr[:mid])
right = merge_sort(arr[mid:])
# Merge the sorted lists into a new one
i = j = k = 0
while i < len(left) and j < len(right):
if left[i] < right[j]:
arr[k] = left[i]
i += 1
else:
arr[k] = right[j]
j += 1
k += 1
# Checking if any element was left
while i < len(left):
arr[k] = left[i]
i += 1
k += 1
while j < len(right):
arr[k] = right[j]
j += 1
k += 1
return arr
|
n = soma = cont = 0
while True:
n = int(input('Digite um número: '))
if n == 999:
break
cont += 1
soma += n
print(f'Foram digitados {cont} números, e a soma total é {soma}.')
|
class IndexingError(Exception):
"""Exception raised for errors in the indexing flow.
Attributes:
type -- One of 'user', 'user_replica_set', 'user_library', 'tracks', 'social_features', 'playlists'
blocknumber -- block number of error
blockhash -- block hash of error
txhash -- transaction hash of error
message -- error message
"""
def __init__(self, type, blocknumber, blockhash, txhash, message):
super().__init__(message)
self.type = type
self.blocknumber = blocknumber
self.blockhash = blockhash
self.txhash = txhash
self.message = message
|
"""
Entradas:
lista-->list-->lista
elemento->str-->elemento
Salidas
lista-->lista
"""
frutas = open('frutas.txt', 'r')
lista_frutas = []
for i in frutas:
lista_frutas.append(i)
def eliminar_un_caracter(lista: list, elemento: str):
auxilar = []
for i in lista:
a = i.replace(elemento, "")
auxilar.append(a)
return auxilar
if __name__ == "__main__":
nueva = eliminar_un_caracter(lista_frutas, ("a"))
print(nueva)
|
##########################################################################################
# district data structure
##########################################################################################
class DistrictData:
def __init__(self):
self.data = ""
self.stato = ""
self.codice_regione = ""
self.denominazione_regione = ""
self.codice_provincia = ""
self.denominazione_provincia = ""
self.sigla_provincia = ""
self.lat = ""
self.long = ""
self.totale_casi = 0
self.note = ""
self.codice_nuts_1 = ""
self.codice_nuts_2 = ""
self.codice_nuts_3 = ""
#data calclulated from the other
self.nuovi_casi = 0
def fillData(self,item_today, j_yesterday, index):
##########################################################################################
#fill the object class attributes
# 1. it fill up with the corrispective key into the JSON dataset, attributes and key that
# are called the same will automatically valued.
# 2. for the attributes that need some king of work you have to handle separately
##########################################################################################
for key in item_today:
if hasattr(self, key):
setattr(self,key,item_today[key])
if item_today["codice_provincia"] == j_yesterday[index]["codice_provincia"]: #check if the two json refer to the same regione code
n_positivi = item_today["totale_casi"]-j_yesterday[index]["totale_casi"]
setattr(self,"nuovi_casi",n_positivi)
else:
print("Provincia non corrispondente")
|
class Solution:
def nextPermutation(self, nums: List[int]) -> None:
"""
Do not return anything, modify nums in-place instead.
"""
i = len(nums) - 2
while (i >= 0 and nums[i+1] <= nums[i]):
i -= 1
# not the first
if i >= 0:
j = len(nums) - 1
while nums[i] >= nums[j]:
j -= 1
nums[i], nums[j] = nums[j], nums[i]
# reverse
j = len(nums)-1
i += 1
while( i < j ):
nums[i], nums[j] = nums[j], nums[i]
i += 1
j -= 1
|
# -*- coding: utf-8 -*-
while True:
try:
hm = list(map(float, input().split()))
h = int((hm[0] / 360) * 12)
m = int((hm[1] / 360) * 60)
if m == 60:
m = 0
print("{:02d}:{:02d}".format(h, m))
except (EOFError, IndexError):
break
|
# -*- coding: utf-8 -*-
__about__ = """
This project comes fully-featured, with everything that Pinax provides enabled
by default. It provides all tabs available, etc. From here you can remove
applications that you do not want to use, and add your own applications as well.
"""
|
# 引数のデフォルト値は、関数宣言時に val = default の形で設定する
def func_power(num, power=2):
return num ** power
print("-------- call func_power(2) --------")
print(func_power(2)) # -> 4
print("-------- call func_power(2, 3) --------")
print(func_power(2, 3)) # -> 8
print()
# 可変長引数
# 引数名にひとつ*をつけると、tupleとして値を受け取る
# 引数名にふたつ*をつけると、dictとして値を受け取る
# リストの例
def func_sum(*num):
ret = 0
for i in num:
ret += i
return ret
print("-------- call func_sum(1, 1) --------")
print(func_sum(1, 1))
print("-------- call func_sum(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) --------")
print(func_sum(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))
print()
# 辞書の例
def func_showPrice(**products):
for name, price in products.items():
print("{}:\t{}".format(name, price))
print("-------- call(func_showPrice(camera = '100000', lens = '25000', film = '6980')) --------")
func_showPrice(camera = '100000', lens = '25000', film = '6980')
print()
# 渡す値はあくまで変数なので、文字列は渡せないっぽい
# func_showPrice("カメラ" = '100000', "レンズ" = '25000', "フィルム" = '6980') # <-これだとエラーになる
# 可変長引数はその引数の数が不定なので、何番目以降が可変長引数に入るものか分からない
# 分かるようにするには、ふつうの引数よりも後に引数を定義する必要がある
def func_set_va(arg, *v_arg):
print("arg: {}".format(arg))
print("v_arg: " + str(v_arg))
print("-------- call func_set_va(1, 2, 3) --------")
func_set_va(1, 2, 3)
print()
# ちなみに…デフォルト付き引数と組み合わせると??
def func_set_va2(arg1 = 0, arg2 = 0, *v_arg):
print("arg1: {}".format(arg1))
print("arg2: {}".format(arg2))
print("v_arg: " + str(v_arg))
print("-------- call func_set_va2(1, 2) --------")
func_set_va2(1, 2, 3, 4,)
print()
# 引数のデフォルト値は「引数の値が、もしなかったら代理で使う値」なので、
# 可変長引数の前に指定された引数の数だけちゃんと指定しないといけない
|
class Solution:
def rob(self, root):
def f(n):
if not n:
return [0, 0]
l, r = f(n.left), f(n.right)
return [l[1] + r[1], max(l[1] + r[1], n.val + l[0] + r[0])]
return max(f(root))
|
# -*- coding: utf-8 -*-
def create_3dskullstrip_arg_string(shrink_fac, var_shrink_fac,
shrink_fac_bot_lim, avoid_vent, niter,
pushout, touchup, fill_hole, avoid_eyes,
use_edge, exp_frac, smooth_final,
push_to_edge, use_skull, perc_int,
max_inter_iter, blur_fwhm, fac):
"""
Method to return option string for 3dSkullStrip
Parameters
----------
shrink_fac : float
Parameter controlling the brain vs non-brain intensity threshold (tb)
var_shrink_fac : boolean
Vary the shrink factor with the number of iterations
shrink_fac_bot_lim : float
Do not allow the varying SF to go below SFBL
avoid_vent : boolean
Avoid ventricles
niter : float
Number of iterations
pushout : boolean
Consider values above each node in addition to values below the node when deciding on expansion
touchup : boolean
Perform touchup operations at end to include areas not covered by surface expansion
fill_hole : float
Fill small holes that can result from small surface intersections caused by the touchup operation
avoid_eyes : boolean
Avoid eyes
use_edge : boolean
Use edge detection to reduce leakage into meninges and eyes
exp_frac : float
Speed of expansion
smooth_final : float
Perform final surface smoothing after all iterations
push_to_edge : boolean
Perform aggressive push to edge at the end
use_skull : boolean
Use outer skull to limit expansion of surface into the skull due to very strong shading artifacts
perc_int : float
Percentage of segments allowed to intersect surface
max_inter_iter : float
Number of iteration to remove intersection problems
blur_fwhm : float
Blur dset after spatial normalization
fac : float
Multiply input dataset by FAC if range of values is too small
Returns
-------
opt_str : string
Command args
"""
expr = ''
defaults = dict(
fill_hole=10 if touchup else 0,
shrink_fac=0.6,
shrink_fac_bot_lim=0.4 if use_edge else 0.65,
niter=250,
exp_frac=0.1,
smooth_final=20,
perc_int=0,
max_inter_iter=4,
blur_fwhm=0,
fac=1.0
)
if float(shrink_fac) != defaults['shrink_fac']:
expr += ' -shrink_fac {0}'.format(shrink_fac)
if not var_shrink_fac:
expr += ' -no_var_shrink_fac'
if float(shrink_fac_bot_lim) != defaults['shrink_fac_bot_lim']:
expr += ' -shrink_fac_bot_lim {0}'.format(shrink_fac_bot_lim)
if not use_edge:
expr += ' -no_use_edge'
if not avoid_vent:
expr += ' -no_avoid_vent'
if int(niter) != defaults['niter']:
expr += ' -niter {0}'.format(niter)
if not pushout:
expr += ' -no_pushout'
if not touchup:
expr += ' -no_touchup'
if int(fill_hole) != defaults['fill_hole']:
expr += ' -fill_hole {0}'.format(fill_hole)
if not avoid_eyes:
expr += ' -no_avoid_eyes'
if float(exp_frac) != defaults['exp_frac']:
expr += ' -exp_frac {0}'.format(exp_frac)
if int(smooth_final) != defaults['smooth_final']:
expr += ' -smooth_final {0}'.format(smooth_final)
if push_to_edge:
expr += ' -push_to_edge'
if use_skull:
expr += ' -use_skull'
if float(perc_int) != defaults['perc_int']:
expr += ' -perc_int {0}'.format(perc_int)
if int(max_inter_iter) != defaults['max_inter_iter']:
expr += ' -max_inter_iter {0}'.format(max_inter_iter)
if float(blur_fwhm) != defaults['blur_fwhm']:
expr += ' -blur_fwhm {0}'.format(blur_fwhm)
if float(fac) != defaults['fac']:
expr += ' -fac {0}'.format(fac)
return expr
|
#!/usr/bin/env python3.7
class Proposal:
"""
Sequence proposed by the player while trying to guess the secret
The problem will add hint information, by setting the value of whites and reds
"""
"""proposed secret sequence"""
sequence = str
"""number of right colours in a wrong position"""
whites = int
"""number of right colours in the correct position"""
reds = int
def __init__(self, sequence):
"""
Create a proposal
:param sequence: secret sequence proposed by the player
"""
self.sequence = sequence
|
# Copyright (c) 2014 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class OperationResult(object):
def __init__(self,
result_status,
result_reason=None,
result_message=None):
self.result_status = result_status
if result_reason is not None:
self.result_reason = result_reason
else:
self.result_reason = None
if result_message is not None:
self.result_message = result_message
else:
self.result_message = None
class CreateResult(OperationResult):
def __init__(self,
result_status,
result_reason=None,
result_message=None,
object_type=None,
uuid=None,
template_attribute=None):
super(CreateResult, self).__init__(
result_status, result_reason, result_message)
if object_type is not None:
self.object_type = object_type
else:
self.object_type = None
if uuid is not None:
self.uuid = uuid
else:
self.uuid = None
if template_attribute is not None:
self.template_attribute = template_attribute
else:
self.template_attribute = None
class CreateKeyPairResult(OperationResult):
def __init__(self,
result_status,
result_reason=None,
result_message=None,
private_key_uuid=None,
public_key_uuid=None,
private_key_template_attribute=None,
public_key_template_attribute=None):
super(CreateKeyPairResult, self).__init__(
result_status, result_reason, result_message)
self.private_key_uuid = private_key_uuid
self.public_key_uuid = public_key_uuid
self.private_key_template_attribute = private_key_template_attribute
self.public_key_template_attribute = public_key_template_attribute
class ActivateResult(OperationResult):
def __init__(self,
result_status,
result_reason=None,
result_message=None,
uuid=None):
super(ActivateResult, self).__init__(
result_status, result_reason, result_message)
if uuid is not None:
self.uuid = uuid
else:
self.uuid = None
class RegisterResult(OperationResult):
def __init__(self,
result_status,
result_reason=None,
result_message=None,
uuid=None,
template_attribute=None):
super(RegisterResult, self).__init__(
result_status, result_reason, result_message)
if uuid is not None:
self.uuid = uuid
else:
self.uuid = None
if template_attribute is not None:
self.template_attribute = template_attribute
else:
self.template_attribute = None
class RekeyKeyPairResult(CreateKeyPairResult):
def __init__(self,
result_status,
result_reason=None,
result_message=None,
private_key_uuid=None,
public_key_uuid=None,
private_key_template_attribute=None,
public_key_template_attribute=None):
super(RekeyKeyPairResult, self).__init__(
result_status, result_reason, result_message, private_key_uuid,
public_key_uuid, private_key_template_attribute,
public_key_template_attribute)
class GetResult(OperationResult):
def __init__(self,
result_status,
result_reason=None,
result_message=None,
object_type=None,
uuid=None,
secret=None):
super(GetResult, self).__init__(
result_status, result_reason, result_message)
if object_type is not None:
self.object_type = object_type
else:
self.object_type = None
if uuid is not None:
self.uuid = uuid
else:
self.uuid = None
if secret is not None:
self.secret = secret
else:
self.secret = None
class GetAttributesResult(OperationResult):
def __init__(
self,
result_status,
result_reason=None,
result_message=None,
uuid=None,
attributes=None
):
super(GetAttributesResult, self).__init__(
result_status,
result_reason,
result_message
)
self.uuid = uuid
self.attributes = attributes
class GetAttributeListResult(OperationResult):
def __init__(
self,
result_status,
result_reason=None,
result_message=None,
uid=None,
names=None):
super(GetAttributeListResult, self).__init__(
result_status, result_reason, result_message)
self.uid = uid
self.names = names
class DestroyResult(OperationResult):
def __init__(self,
result_status,
result_reason=None,
result_message=None,
uuid=None):
super(DestroyResult, self).__init__(
result_status, result_reason, result_message)
if uuid is not None:
self.uuid = uuid
else:
self.uuid = None
class LocateResult(OperationResult):
def __init__(self,
result_status,
result_reason=None,
result_message=None,
uuids=None):
super(LocateResult, self).__init__(
result_status, result_reason, result_message)
self.uuids = uuids
class QueryResult(OperationResult):
"""
A container for the results of a Query operation.
Attributes:
result_status: The status of the Query operation (e.g., success or
failure).
result_reason: The reason for the operation status.
result_message: Extra information pertaining to the status reason.
operations: A list of Operations supported by the server.
object_types: A list of Object Types supported by the server.
vendor_identification:
server_information:
application_namespaces: A list of namespaces supported by the server.
extension_information: A list of extensions supported by the server.
"""
def __init__(self,
result_status,
result_reason=None,
result_message=None,
operations=None,
object_types=None,
vendor_identification=None,
server_information=None,
application_namespaces=None,
extension_information=None):
super(QueryResult, self).__init__(
result_status, result_reason, result_message)
if operations is None:
self.operations = list()
else:
self.operations = operations
if object_types is None:
self.object_types = list()
else:
self.object_types = object_types
self.vendor_identification = vendor_identification
self.server_information = server_information
if application_namespaces is None:
self.application_namespaces = list()
else:
self.application_namespaces = application_namespaces
if extension_information is None:
self.extension_information = list()
else:
self.extension_information = extension_information
class DiscoverVersionsResult(OperationResult):
def __init__(self,
result_status,
result_reason=None,
result_message=None,
protocol_versions=None):
super(DiscoverVersionsResult, self).__init__(
result_status, result_reason, result_message)
self.protocol_versions = protocol_versions
class RevokeResult(OperationResult):
def __init__(self,
result_status,
result_reason=None,
result_message=None,
unique_identifier=None):
super(RevokeResult, self).__init__(
result_status, result_reason, result_message)
self.unique_identifier = unique_identifier
class MACResult(OperationResult):
def __init__(self,
result_status,
result_reason=None,
result_message=None,
uuid=None,
mac_data=None):
super(MACResult, self).__init__(
result_status,
result_reason,
result_message
)
self.uuid = uuid
self.mac_data = mac_data
|
TEMPLATE = {
'schemes': [
'http'
],
'tags': [
{
'name': '계정',
'description': '계정 관련 API'
},
{
'name': '도서관',
'description': '도서관 가입과 탈퇴에 관한 API'
},
{
'name': '책',
'description': '도서관의 책 관리에 관한 API'
},
{
'name': '대출',
'description': '도서관의 책 대출/반납에 관한 API'
}
]
}
|
class BookViewModel(object):
def __init__(self, data):
self.title = data['title']
self.author = '、'.join(data['author'])
self.binding = data['binding']
self.publisher = data['publisher']
self.image = data['image']
self.image = self.image.replace('/view/subject/m/public/','//view//subject//m//public/')
self.price = '¥' + data['price'] if data['price'] else data['price']
self.isbn = data['isbn13']
self.pubdate = data['pubdate']
self.summary = data['summary']
self.pages = data['pages']
@property
def intro(self):
intros = filter(lambda x : True if x else False,[self.author,self.publisher,self.price])
return '/'.join(intros)
class BookCollection(object):
def __int__(self):
self.total = 0
self.books = []
self.keyword = ''
def fill(self, shupiao_book, keyword):
self.total = shupiao_book.total
self.keyword = keyword
self.books = [BookViewModel(book) for book in shupiao_book.books]
class _BookViewModel(object):
@classmethod
def package_single(cls, data, keyword):
returned = {
'books':[],
'total':0,
'keyword': keyword
}
if data:
returned['total'] = 1
returned['books'] = [cls.__cut_book_data(data)]
return returned
@classmethod
def package_collection(cls, data, keyword):
returned = {
'books':[],
'total': 0,
'keyword': keyword
}
if data:
returned['total'] = data['total']
returned['books'] = [cls.__cut_book_data(data) for book in data['books']]
return returned
@classmethod
def __cut_book_data(cls, data):
book = {
'title':data['title'],
'publisher':data['publisher'],
'pages':data['pages'] or '',
'author':'、'.join(data['author']),
'price':data['price'],
'summary':data['summary'] or '',
'image':data['images']['medium']
}
return book
|
'''
If the parameter to the make payment method of the CreditCard class
were a negative number, that would have the effect of raising the balance
on the account. Revise the implementation so that it raises a ValueError if
a negative value is sent.
'''
def make_payment(self,amount):
if amount < 0:
raise ValueError("Payment amount can not be negative")
else:
try:
self._balance -= amount
msg = '$' + str(amount) +' Payment received' + 'New balance is '+'$'+str(self._balance)
return msg
except (ValueError,NameError,SyntaxError) as e:
print('Please input correct number')
print(e)
|
x = 3
def foo():
y = "String"
return y
foo()
|
n = int(input())
for i in range(n):
r,e,c = map(int, input().split())
if((e-c) > r):
print('advertise')
elif((e-c) == r):
print('does not matter')
else:
print('do not advertise')
|
def sum(*args):
total = 0
for arg in args:
total+= arg
return total
a = sum(1200, 300, 500)
print(a)
|
#1)
def isnegative(n):
if n < 0:
return True
else:
return False
isnegative(-6)
#1)
list1 = [1,2,3]
def count_evens(list1):
even_count = 0
for num in list1:
if num % 2 == 0:
even_count += 1
print(even_count)
#1)
def increment_odds(n):
nums = []
for n in range(1, 2*n, 2):
nums.append(n)
return nums
increment_odds(3)
#1)
def average(l):
for n in l:
return round(len(l)/n, 2)
average([1,2,3])
#1)
name_to_dict = dict()
name_to_dict["frist_name"] = "Ada"
name_to_dict["last_name"] = "Lovelace"
name_to_dict
#1)
def capitalize_names(name):
for n in name[0]:
return(f"{name}".capitalize())
capitalize_names("")
#1)
def count_vowels(value):
value = value.lower()
vowel = ['a','e','i','o','u']
count = 0
for a in value:
if a in vowel:
count += 1
return count
count_vowels('abcde')
#1)
def analyze_word(word):
vowels = ['a','e','i','o','u']
og_word = {}
num_of_vowels = {}
num_of_char = {}
for c in word:
if c in num_of_char:
num_of_char[word] += 1
else:
num_of_char[word] = 1
return len(word)
for c in word:
if c in vowels:
num_of_char[word] += 1
else:
num_of_char[word] = 1
return(c)
analyze_word('word')
|
def bio2span(labels):
spans = []
span = [None, -1, -1]
for i, tag in enumerate(labels):
if tag.startswith('B-'):
if span[2] != -1:
spans.append(span)
span = [tag.split('-')[1], i, i]
if i == len(labels) - 1: # 最后一个tag
spans.append(span)
elif tag.startswith('I-') and span[1] != -1:
type_ = tag.split('-')[1]
if span[0] == type_:
span[2] = i
if i == len(labels) - 1: # 最后一个tag
spans.append(span)
else: # O
if span[2] != -1:
spans.append(span)
span = [None, -1, -1]
return spans
def span2bio(spans, seq_len, default_tag='O'):
tags = [default_tag] * seq_len
for span in spans:
type_, s, e = span
if s == e:
tags[s] = 'B-' + type_
elif s < e:
tags[s: e+1] = ['B-' + type_] + ['I-' + type_] * (e - s)
else:
raise IndexError
return tags
def bi2bies(bi_tags):
tag_len = len(bi_tags)
for i, t in enumerate(bi_tags):
if t == 'B':
if i + 1 == tag_len or 'I' != bi_tags[i+1]:
bi_tags[i] = 'S'
elif t == 'I':
if i + 1 == tag_len or 'I' != bi_tags[i+1]:
bi_tags[i] = 'E'
return bi_tags
# BIO -> BIOES
def bio2bioes(bio_tags):
tag_len = len(bio_tags)
for i, t in enumerate(bio_tags):
if 'B-' in t and (i+1 == tag_len or 'I-' not in bio_tags[i+1]):
_type = bio_tags[i].split('-')[1]
bio_tags[i] = 'S-' + _type
elif 'I-' in t and (i+1 == tag_len or 'I-' not in bio_tags[i+1]):
_type = bio_tags[i].split('-')[1]
bio_tags[i] = 'E-' + _type
return bio_tags
def extract_ner_bio_span(tag_seq: list):
span_res = []
n = len(tag_seq)
s = 0
type_b = None
start = False
for i, tag in enumerate(tag_seq):
if tag == 'O':
start = False
elif tag.startswith('B-'):
s = i
type_b = tag.split('-')[1]
if i + 1 == n or not tag_seq[i+1].startswith('I-'):
span_res.append((s, i, type_b))
start = False
else:
start = True
elif tag.startswith('I-'):
if start and tag.split('-')[1] == type_b:
if i + 1 == n or not tag_seq[i+1].startswith('I-'):
span_res.append((s, i, type_b))
start = False
return span_res
def extract_ner_biso_span(tag_seq):
spans = []
s = 0
n = len(tag_seq)
start = False
type_b = None
for i, tag in enumerate(tag_seq):
if tag == 'O':
start = False
elif tag.startswith('S-'):
spans.append((i, i, tag.split('-')[1]))
start = False
elif tag.startswith('B-'):
s = i
start = True
type_b = tag.split('-')[1]
elif tag.startswith('I-'):
if start and type_b == tag.split('-')[1]:
if i + 1 == n or not tag_seq[i+1].startswith('I-'):
spans.append((s, i, type_b))
start = False
return spans
def extract_ner_bieso_span(tag_seq):
spans = []
s = 0
start = False
type_b = None
for i, tag in enumerate(tag_seq):
if tag == 'O':
start = False
elif tag.startswith('S-'):
spans.append((i, i, tag.split('-')[1]))
start = False
elif tag.startswith('B-'):
s = i
type_b = tag.split('-')[1]
start = True
elif tag.startswith('E-'):
if start and tag.split('-')[1] == type_b:
spans.append((s, i, type_b))
start = False
else:
if '-' not in tag or tag.split('-')[1] != type_b:
start = False
return spans
def seq_match(main_str, sub_strs: list):
N = len(main_str)
match_spans = set()
for sub_str in sub_strs:
L = len(sub_str)
for i in range(N - L + 1):
if main_str[i: i+L] == sub_str:
match_spans.add((i, i+L-1))
return match_spans
def span2tags(spans, seq_len, default_tag='O'):
'''
:param spans: [(s, e, cls), ...]
:param seq_len: sequence length
:param default_tag: default tag in sequence
:return:
'''
tags = [default_tag] * seq_len
for one_span in spans:
if len(one_span) == 3:
s, e, cls = one_span
cls = '-'+cls
elif len(one_span) == 2:
s, e = one_span
cls = ''
else:
raise ValueError
tags[s] = 'B' + cls
tags[e] = 'E' + cls
if s == e:
tags[s] = 'S' + cls
elif s < e:
tags[s+1: e] = ['I' + cls] * (e - s - 1)
else:
raise IndexError
return tags
def test_():
# x = 'I I S B S I B X I S B I I S S B I S S B I I'.split()
# x = 'I I B B X I B B I I B I B I B B I B I B I I'.split()
# x = 'B I E S B E S S B I I E B I S B E S E'.split()
# y = extract_cws_bi_span(x)
# y = extract_cws_bis_span(x)
# y = extract_cws_bies_span(x)
# x = 'O I-per S-org S-org I-per S-org B-loc I-loc E-loc O B-org E-org O B-per E-per B-loc E-per O S-LOC E-LOC'.split()
x = 'O B-PER I-PER I-LOC B-PER I-PER B-LOC B-ORG I-ORG O B-LOC I-LOC I-PER'.split()
y = extract_ner_bio_span(x)
# y = extract_ner_biso_span(x)
# y = extract_ner_bieso_span(x)
print(y)
|
N = int(input())
while N > 0:
texto = input().lower().replace(' ', '')
alfabeto = 'abcdefghijklmnopqrstuvwxyz'
contador = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
result = ''
a, i, count, maior = 0, 0, 0, 0
break_ = True
while count < 52:
if break_ == True:
contador[i] = texto.count(alfabeto[i])
if contador[i] > maior:
maior = contador[i]
i += 1
if i == 26:
break_ = False
else:
if maior == texto.count(alfabeto[a]):
result += alfabeto[a]
a += 1
count += 1
print(result)
N -= 1
|
"""
Singleton objects that serve as placeholders in pyll graphs.
These are used by e.g. ./nips2011.py
"""
class train_task(object):
"""`train` argument to skdata.LearningAlgo's best_model method
"""
class valid_task(object):
"""`valid` argument to skdata.LearningAlgo's best_model method
"""
class ctrl(object):
"""Hyperopt Ctrl object passed to worker eval_fn.
"""
|
# Schema used for pre-2013-2014 TAPR data
SCHEMA = {
'staff-and-student-information': {
'all_students_count': 'PETALLC',
'african_american_count': 'PETBLAC',
'african_american_percent': 'PETBLAP',
'american_indian_count': 'PETINDC',
'american_indian_percent': 'PETINDP',
'asian_count': 'PETASIC',
'asian_percent': 'PETASIP',
'hispanic_count': 'PETHISC',
'hispanic_percent': 'PETHISP',
'pacific_islander_count': 'PETPCIC',
'pacific_islander_percent': 'PETPCIP',
'two_or_more_races_count': 'PETTWOC',
'two_or_more_races_percent': 'PETTWOP',
'white_count': 'PETWHIC',
'white_percent': 'PETWHIP',
'early_childhood_education_count': 'PETGEEC',
'early_childhood_education_percent': 'PETGEEP',
'prek_count': 'PETGPKC',
'prek_percent': 'PETGPKP',
'kindergarten_count': 'PETGKNC',
'kindergarten_percent': 'PETGKNP',
'first_count': 'PETG01C',
'first_percent': 'PETG01P',
'second_count': 'PETG02C',
'second_percent': 'PETG02P',
'third_count': 'PETG03C',
'third_percent': 'PETG03P',
'fourth_count': 'PETG04C',
'fourth_percent': 'PETG04P',
'fifth_count': 'PETG05C',
'fifth_percent': 'PETG05P',
'sixth_count': 'PETG06C',
'sixth_percent': 'PETG06P',
'seventh_count': 'PETG07C',
'seventh_percent': 'PETG07P',
'eighth_count': 'PETG08C',
'eighth_percent': 'PETG08P',
'ninth_count': 'PETG09C',
'ninth_percent': 'PETG09P',
'tenth_count': 'PETG10C',
'tenth_percent': 'PETG10P',
'eleventh_count': 'PETG11C',
'eleventh_percent': 'PETG11P',
'twelfth_count': 'PETG12C',
'twelfth_percent': 'PETG12P',
'at_risk_count': 'PETRSKC',
'at_risk_percent': 'PETRSKP',
'economically_disadvantaged_count': 'PETECOC',
'economically_disadvantaged_percent': 'PETECOP',
'limited_english_proficient_count': 'PETLEPC',
'limited_english_proficient_percent': 'PETLEPP',
'bilingual_esl_count': 'PETBILC',
'bilingual_esl_percent': 'PETBILP',
'career_technical_education_count': 'PETVOCC',
'career_technical_education_percent': 'PETVOCP',
'gifted_and_talented_count': 'PETGIFC',
'gifted_and_talented_percent': 'PETGIFP',
'special_education_count': 'PETSPEC',
'special_education_percent': 'PETSPEP',
'class_size_avg_kindergarten': 'PCTGKGA',
'class_size_avg_first': 'PCTG01A',
'class_size_avg_second': 'PCTG02A',
'class_size_avg_third': 'PCTG03A',
'class_size_avg_fourth': 'PCTG04A',
'class_size_avg_fifth': 'PCTG05A',
'class_size_avg_sixth': 'PCTG06A',
'class_size_avg_mixed_elementary': 'PCTGMEA',
'class_size_avg_secondary_english': 'PCTENGA',
'class_size_avg_secondary_foreign_language': 'PCTFLAA',
'class_size_avg_secondary_math': 'PCTMATA',
'class_size_avg_secondary_science': 'PCTSCIA',
'class_size_avg_secondary_social_studies': 'PCTSOCA',
'students_per_teacher': 'PSTKIDR',
# teacher_avg_tenure is Average Years Experience of Teachers with District
'teacher_avg_tenure': 'PSTTENA',
# teacher_avg_experience is Average Years Experience of Teachers
'teacher_avg_experience': 'PSTEXPA',
'teacher_avg_base_salary': 'PSTTOSA',
'teacher_avg_beginning_salary': 'PST00SA',
'teacher_avg_1_to_5_year_salary': 'PST01SA',
'teacher_avg_6_to_10_year_salary': 'PST06SA',
'teacher_avg_11_to_20_year_salary': 'PST11SA',
'teacher_avg_20_plus_year_salary': 'PST20SA',
'teacher_total_fte_count': 'PSTTOFC',
'teacher_african_american_fte_count': 'PSTBLFC',
'teacher_american_indian_fte_count': 'PSTINFC',
'teacher_asian_fte_count': 'PSTASFC',
'teacher_hispanic_fte_count': 'PSTHIFC',
'teacher_pacific_islander_fte_count': 'PSTPIFC',
'teacher_two_or_more_races_fte_count': 'PSTTWFC',
'teacher_white_fte_count': 'PSTWHFC',
'teacher_total_fte_percent': 'PSTTOFC',
'teacher_african_american_fte_percent': 'PSTBLFP',
'teacher_american_indian_fte_percent': 'PSTINFP',
'teacher_asian_fte_percent': 'PSTASFP',
'teacher_hispanic_fte_percent': 'PSTHIFP',
'teacher_pacific_islander_fte_percent': 'PSTPIFP',
'teacher_two_or_more_races_fte_percent': 'PSTTWFP',
'teacher_white_fte_percent': 'PSTWHFP',
# 'teacher_no_degree_count': 'PSTNOFC',
# 'teacher_bachelors_count': 'PSTBAFC',
# 'teacher_masters_count': 'PSTMSFC',
# 'teacher_doctorate_count': 'PSTPHFC',
# 'teacher_no_degree_percent': 'PSTNOFP',
# 'teacher_bachelors_percent': 'PSTBAFP',
# 'teacher_masters_percent': 'PSTMSFP',
# 'teacher_doctorate_percent': 'PSTPHFP',
},
'postsecondary-readiness-and-non-staar-performance-indicators': {
# 'college_ready_graduates_english_all_students_count': 'ACRR',
'college_ready_graduates_english_all_students_percent': 'ACRR',
# 'college_ready_graduates_english_african_american_count': 'BCRR',
'college_ready_graduates_english_african_american_percent': 'BCRR',
# 'college_ready_graduates_english_american_indian_count': 'ICRR',
'college_ready_graduates_english_american_indian_percent': 'ICRR',
# 'college_ready_graduates_english_asian_count': '3CRR',
'college_ready_graduates_english_asian_percent': '3CRR',
# 'college_ready_graduates_english_hispanic_count': 'HCRR',
'college_ready_graduates_english_hispanic_percent': 'HCRR',
# 'college_ready_graduates_english_pacific_islander_count': '4CRR',
'college_ready_graduates_english_pacific_islander_percent': '4CRR',
# 'college_ready_graduates_english_two_or_more_races_count': '2CRR',
'college_ready_graduates_english_two_or_more_races_percent': '2CRR',
# 'college_ready_graduates_english_white_count': 'WCRR',
'college_ready_graduates_english_white_percent': 'WCRR',
# 'college_ready_graduates_english_economically_disadvantaged_count': 'ECRR',
'college_ready_graduates_english_economically_disadvantaged_percent': 'ECRR',
# 'college_ready_graduates_english_limited_english_proficient_count': 'LCRR',
'college_ready_graduates_english_limited_english_proficient_percent': 'LCRR',
# 'college_ready_graduates_english_at_risk_count': 'RCRR',
'college_ready_graduates_english_at_risk_percent': 'RCRR',
# 'college_ready_graduates_math_all_students_count': 'ACRM',
'college_ready_graduates_math_all_students_percent': 'ACRM',
# 'college_ready_graduates_math_african_american_count': 'BCRM',
'college_ready_graduates_math_african_american_percent': 'BCRM',
# 'college_ready_graduates_math_american_indian_count': 'ICRM',
'college_ready_graduates_math_american_indian_percent': 'ICRM',
# 'college_ready_graduates_math_asian_count': '3CRM',
'college_ready_graduates_math_asian_percent': '3CRM',
# 'college_ready_graduates_math_hispanic_count': 'HCRM',
'college_ready_graduates_math_hispanic_percent': 'HCRM',
# 'college_ready_graduates_math_pacific_islander_count': '4CRM',
'college_ready_graduates_math_pacific_islander_percent': '4CRM',
# 'college_ready_graduates_math_two_or_more_races_count': '2CRM',
'college_ready_graduates_math_two_or_more_races_percent': '2CRM',
# 'college_ready_graduates_math_white_count': 'WCRM',
'college_ready_graduates_math_white_percent': 'WCRM',
# 'college_ready_graduates_math_economically_disadvantaged_count': 'ECRM',
'college_ready_graduates_math_economically_disadvantaged_percent': 'ECRM',
# 'college_ready_graduates_math_limited_english_proficient_count': 'LCRM',
'college_ready_graduates_math_limited_english_proficient_percent': 'LCRM',
# 'college_ready_graduates_math_at_risk_count': 'RCRM',
'college_ready_graduates_math_at_risk_percent': 'RCRM',
# 'college_ready_graduates_both_all_students_count': 'ACRB',
'college_ready_graduates_both_all_students_percent': 'ACRB',
# 'college_ready_graduates_both_african_american_count': 'BCRB',
'college_ready_graduates_both_african_american_percent': 'BCRB',
# 'college_ready_graduates_both_asian_count': '3CRB',
'college_ready_graduates_both_asian_percent': '3CRB',
# 'college_ready_graduates_both_hispanic_count': 'HCRB',
'college_ready_graduates_both_hispanic_percent': 'HCRB',
# 'college_ready_graduates_both_american_indian_count': 'ICRB',
'college_ready_graduates_both_american_indian_percent': 'ICRB',
# 'college_ready_graduates_both_pacific_islander_count': '4CRB',
'college_ready_graduates_both_pacific_islander_percent': '4CRB',
# 'college_ready_graduates_both_two_or_more_races_count': '2CRB',
'college_ready_graduates_both_two_or_more_races_percent': '2CRB',
# 'college_ready_graduates_both_white_count': 'WCRB',
'college_ready_graduates_both_white_percent': 'WCRB',
# 'college_ready_graduates_both_economically_disadvantaged_count': 'ECRB',
'college_ready_graduates_both_economically_disadvantaged_percent': 'ECRB',
# 'college_ready_graduates_both_limited_english_proficient_count': 'LCRB',
'college_ready_graduates_both_limited_english_proficient_percent': 'LCRB',
# 'college_ready_graduates_both_at_risk_count': 'RCRB',
'college_ready_graduates_both_at_risk_percent': 'RCRB',
'avg_sat_score_all_students': 'A0CSA',
'avg_sat_score_african_american': 'B0CSA',
'avg_sat_score_american_indian': 'I0CSA',
'avg_sat_score_asian': '30CSA',
'avg_sat_score_hispanic': 'H0CSA',
'avg_sat_score_pacific_islander': '40CSA',
'avg_sat_score_two_or_more_races': '20CSA',
'avg_sat_score_white': 'W0CSA',
'avg_sat_score_economically_disadvantaged': 'E0CSA',
'avg_act_score_all_students': 'A0CAA',
'avg_act_score_african_american': 'B0CAA',
'avg_act_score_american_indian': 'I0CAA',
'avg_act_score_asian': '30CAA',
'avg_act_score_hispanic': 'H0CAA',
'avg_act_score_pacific_islander': '40CAA',
'avg_act_score_two_or_more_races': '20CAA',
'avg_act_score_white': 'W0CAA',
'avg_act_score_economically_disadvantaged': 'E0CAA',
# 'ap_ib_all_students_count_above_criterion': 'A0BKA',
'ap_ib_all_students_percent_above_criterion': 'A0BKA',
# 'ap_ib_african_american_count_above_criterion': 'B0BKA',
'ap_ib_african_american_percent_above_criterion': 'B0BKA',
# 'ap_ib_asian_count_above_criterion': '30BKA',
'ap_ib_asian_percent_above_criterion': '30BKA',
# 'ap_ib_hispanic_count_above_criterion': 'H0BKA',
'ap_ib_hispanic_percent_above_criterion': 'H0BKA',
# 'ap_ib_american_indian_count_above_criterion': 'I0BKA',
'ap_ib_american_indian_percent_above_criterion': 'I0BKA',
# 'ap_ib_pacific_islander_count_above_criterion': '40BKA',
'ap_ib_pacific_islander_percent_above_criterion': '40BKA',
# 'ap_ib_two_or_more_races_count_above_criterion': '20BKA',
'ap_ib_two_or_more_races_percent_above_criterion': '20BKA',
# 'ap_ib_white_count_above_criterion': 'W0BKA',
'ap_ib_white_percent_above_criterion': 'W0BKA',
# 'ap_ib_economically_disadvantaged_count_above_criterion': 'E0BKA',
'ap_ib_economically_disadvantaged_percent_above_criterion': 'E0BKA',
'ap_ib_all_students_percent_taking': 'A0BTA',
'ap_ib_african_american_percent_taking': 'B0BTA',
'ap_ib_asian_percent_taking': '30BTA',
'ap_ib_hispanic_percent_taking': 'H0BTA',
'ap_ib_american_indian_percent_taking': 'I0BTA',
'ap_ib_pacific_islander_percent_taking': '40BTA',
'ap_ib_two_or_more_races_percent_taking': '20BTA',
'ap_ib_white_percent_taking': 'W0BTA',
'ap_ib_economically_disadvantaged_percent_taking': 'E0BTA',
# 'dropout_all_students_count': 'A0912DR',
'dropout_all_students_percent': 'A0912DR',
# 'dropout_african_american_count': 'B0912DR',
'dropout_african_american_percent': 'B0912DR',
# 'dropout_asian_count': '30912DR',
'dropout_asian_percent': '30912DR',
# 'dropout_hispanic_count': 'H0912DR',
'dropout_hispanic_percent': 'H0912DR',
# 'dropout_american_indian_count': 'I0912DR',
'dropout_american_indian_percent': 'I0912DR',
# 'dropout_pacific_islander_count': '40912DR',
'dropout_pacific_islander_percent': '40912DR',
# 'dropout_two_or_more_races_count': '20912DR',
'dropout_two_or_more_races_percent': '20912DR',
# 'dropout_white_count': 'W0912DR',
'dropout_white_percent': 'W0912DR',
# 'dropout_at_risk_count': 'R0912DR',
'dropout_at_risk_percent': 'R0912DR',
# 'dropout_economically_disadvantaged_count': 'E0912DR',
'dropout_economically_disadvantaged_percent': 'E0912DR',
# 'dropout_limited_english_proficient_count': 'E0912DR',
'dropout_limited_english_proficient_percent': 'E0912DR',
# 'four_year_graduate_all_students_count': 'AGC4X',
'four_year_graduate_all_students_percent': 'AGC4X',
# 'four_year_graduate_african_american_count': 'BGC4X',
'four_year_graduate_african_american_percent': 'BGC4X',
# 'four_year_graduate_american_indian_count': 'IGC4X',
'four_year_graduate_american_indian_percent': 'IGC4X',
# 'four_year_graduate_asian_count': '3GC4X',
'four_year_graduate_asian_percent': '3GC4X',
# 'four_year_graduate_hispanic_count': 'HGC4X',
'four_year_graduate_hispanic_percent': 'HGC4X',
# 'four_year_graduate_pacific_islander_count': '4GC4X',
'four_year_graduate_pacific_islander_percent': '4GC4X',
# 'four_year_graduate_two_or_more_races_count': '2GC4X',
'four_year_graduate_two_or_more_races_percent': '2GC4X',
# 'four_year_graduate_white_count': 'WGC4X',
'four_year_graduate_white_percent': 'WGC4X',
# 'four_year_graduate_at_risk_count': 'RGC4X',
'four_year_graduate_at_risk_percent': 'RGC4X',
# 'four_year_graduate_economically_disadvantaged_count': 'EGC4X',
'four_year_graduate_economically_disadvantaged_percent': 'EGC4X',
# 'four_year_graduate_limited_english_proficient_count': 'L3C4X',
'four_year_graduate_limited_english_proficient_percent': 'L3C4X',
'attendance_rate': 'A0AT',
},
'reference': {
'accountability_rating': '_RATING',
},
}
|
class ClientError(Exception):
"""Common base class for all client errors."""
class NotFoundError(ClientError):
"""URL was not found."""
class InvalidRequestError(ClientError):
"""The API request was invalid."""
class TimeoutError(ClientError): # pylint: disable=redefined-builtin
"""The API server did not respond before the timeout expired."""
|
# Leo colorizer control file for bbj mode.
# This file is in the public domain.
# Properties for bbj mode.
properties = {
"commentEnd": "*/",
"commentStart": "/*",
"wordBreakChars": ",+-=<>/?^&*",
}
# Attributes dict for bbj_main ruleset.
bbj_main_attributes_dict = {
"default": "null",
"digit_re": "",
"escape": "\\",
"highlight_digits": "true",
"ignore_case": "true",
"no_word_sep": "",
}
# Dictionary of attributes dictionaries for bbj mode.
attributesDictDict = {
"bbj_main": bbj_main_attributes_dict,
}
# Keywords dict for bbj_main ruleset.
bbj_main_keywords_dict = {
"abs": "keyword1",
"addr": "keyword3",
"adjn": "keyword1",
"all": "keyword3",
"argc": "keyword1",
"argv": "keyword1",
"asc": "keyword1",
"ath": "keyword1",
"atn": "keyword1",
"auto": "keyword3",
"background": "keyword1",
"begin": "keyword3",
"bin": "keyword1",
"break": "keyword3",
"bsz": "keyword1",
"call": "keyword3",
"callback": "keyword1",
"case": "keyword3",
"chanopt": "keyword1",
"chdir": "keyword2",
"chn": "keyword3",
"chr": "keyword1",
"cisam": "keyword2",
"clear": "keyword3",
"clipclear": "keyword1",
"clipfromfile": "keyword1",
"clipfromstr": "keyword1",
"clipisformat": "keyword1",
"cliplock": "keyword1",
"clipregformat": "keyword1",
"cliptofile": "keyword1",
"cliptostr": "keyword1",
"clipunlock": "keyword1",
"close": "keyword2",
"continue": "keyword2",
"cos": "keyword1",
"cpl": "keyword1",
"crc": "keyword1",
"crc16": "keyword1",
"ctl": "keyword3",
"ctrl": "keyword1",
"cvs": "keyword1",
"cvt": "keyword1",
"data": "keyword3",
"date": "keyword1",
"day": "keyword3",
"dec": "keyword1",
"def": "keyword3",
"default": "keyword3",
"defend": "keyword3",
"delete": "keyword3",
"dim": "keyword3",
"dims": "keyword1",
"dir": "keyword2",
"direct": "keyword2",
"disable": "keyword2",
"dom": "keyword2",
"dread": "keyword3",
"drop": "keyword3",
"dsk": "keyword1",
"dsz": "keyword1",
"dump": "keyword2",
"edit": "keyword3",
"else": "keyword3",
"enable": "keyword2",
"end": "keyword2",
"endif": "keyword3",
"endtrace": "keyword2",
"enter": "keyword3",
"ept": "keyword1",
"erase": "keyword2",
"err": "keyword3",
"errmes": "keyword1",
"escape": "keyword3",
"escoff": "keyword3",
"escon": "keyword3",
"execute": "keyword3",
"exit": "keyword3",
"exitto": "keyword3",
"extract": "keyword2",
"fattr": "keyword1",
"fbin": "keyword1",
"fdec": "keyword1",
"fi": "keyword3",
"fid": "keyword2",
"field": "keyword1",
"file": "keyword2",
"fileopt": "keyword1",
"fill": "keyword1",
"fin": "keyword2",
"find": "keyword2",
"floatingpoint": "keyword1",
"for": "keyword3",
"fpt": "keyword1",
"from": "keyword2",
"gap": "keyword1",
"gosub": "keyword3",
"goto": "keyword3",
"hsa": "keyword1",
"hsh": "keyword1",
"hta": "keyword1",
"if": "keyword3",
"iff": "keyword3",
"imp": "keyword1",
"ind": "keyword2",
"indexed": "keyword2",
"info": "keyword1",
"initfile": "keyword3",
"input": "keyword2",
"inpute": "keyword2",
"inputn": "keyword2",
"int": "keyword1",
"iol": "keyword2",
"iolist": "keyword2",
"ior": "keyword3",
"jul": "keyword1",
"key": "keyword2",
"keyf": "keyword2",
"keyl": "keyword2",
"keyn": "keyword2",
"keyp": "keyword2",
"kgen": "keyword2",
"knum": "keyword2",
"lcheckin": "keyword1",
"lcheckout": "keyword1",
"len": "keyword1",
"let": "keyword3",
"linfo": "keyword1",
"list": "keyword2",
"load": "keyword2",
"lock": "keyword2",
"log": "keyword1",
"lrc": "keyword1",
"lst": "keyword1",
"mask": "keyword1",
"max": "keyword1",
"menuinfo": "keyword1",
"merge": "keyword2",
"min": "keyword1",
"mkdir": "keyword2",
"mkeyed": "keyword2",
"mod": "keyword1",
"msgbox": "keyword1",
"neval": "keyword1",
"next": "keyword3",
"nfield": "keyword1",
"not": "keyword3",
"notice": "keyword1",
"noticetpl": "keyword1",
"num": "keyword1",
"on": "keyword3",
"open": "keyword2",
"opts": "keyword3",
"or": "keyword3",
"pad": "keyword1",
"pck": "keyword1",
"pfx": "keyword3",
"pgm": "keyword1",
"pos": "keyword1",
"precision": "keyword3",
"prefix": "keyword2",
"print": "keyword2",
"process_events": "keyword1",
"program": "keyword1",
"psz": "keyword1",
"pub": "keyword1",
"read": "keyword2",
"read_resource": "keyword2",
"record": "keyword2",
"release": "keyword3",
"remove": "keyword2",
"remove_callback": "keyword1",
"rename": "keyword2",
"renum": "keyword3",
"repeat": "keyword3",
"resclose": "keyword2",
"reserve": "keyword1",
"reset": "keyword3",
"resfirst": "keyword2",
"resget": "keyword2",
"resinfo": "keyword2",
"resnext": "keyword2",
"resopen": "keyword2",
"restore": "keyword3",
"retry": "keyword3",
"return": "keyword3",
"rev": "keyword2",
"rmdir": "keyword2",
"rnd": "keyword1",
"round": "keyword1",
"run": "keyword3",
"save": "keyword2",
"scall": "keyword1",
"select": "keyword2",
"sendmsg": "keyword1",
"serial": "keyword2",
"set_case_sensitive_off": "keyword3",
"set_case_sensitive_on": "keyword3",
"setday": "keyword2",
"setdrive": "keyword2",
"seterr": "keyword3",
"setesc": "keyword3",
"setopts": "keyword3",
"settime": "keyword3",
"settrace": "keyword2",
"seval": "keyword1",
"sgn": "keyword1",
"sin": "keyword1",
"siz": "keyword2",
"sort": "keyword2",
"sqlchn": "keyword2",
"sqlclose": "keyword2",
"sqlerr": "keyword2",
"sqlexec": "keyword2",
"sqlfetch": "keyword2",
"sqllist": "keyword2",
"sqlopen": "keyword2",
"sqlprep": "keyword2",
"sqlset": "keyword2",
"sqltables": "keyword2",
"sqltmpl": "keyword2",
"sqlunt": "keyword2",
"sqr": "keyword1",
"ssn": "keyword3",
"ssort": "keyword1",
"ssz": "keyword1",
"start": "keyword3",
"stbl": "keyword1",
"step": "keyword3",
"stop": "keyword3",
"str": "keyword1",
"string": "keyword2",
"swap": "keyword1",
"swend": "keyword3",
"switch": "keyword3",
"sys": "keyword1",
"table": "keyword2",
"tbl": "keyword2",
"tcb": "keyword1",
"then": "keyword3",
"tim": "keyword2",
"tmpl": "keyword1",
"to": "keyword3",
"tsk": "keyword1",
"unlock": "keyword2",
"unt": "keyword3",
"until": "keyword3",
"upk": "keyword1",
"wait": "keyword3",
"wend": "keyword3",
"where": "keyword2",
"while": "keyword3",
"winfirst": "keyword1",
"wininfo": "keyword1",
"winnext": "keyword1",
"write": "keyword2",
"xfid": "keyword2",
"xfile": "keyword2",
"xfin": "keyword2",
"xor": "keyword3",
}
# Dictionary of keywords dictionaries for bbj mode.
keywordsDictDict = {
"bbj_main": bbj_main_keywords_dict,
}
# Rules for bbj_main ruleset.
def bbj_rule0(colorer, s, i):
return colorer.match_span(s, i, kind="comment1", begin="/*", end="*/",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="",exclude_match=False,
no_escape=False, no_line_break=False, no_word_break=False)
def bbj_rule1(colorer, s, i):
return colorer.match_span(s, i, kind="literal1", begin="\"", end="\"",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="",exclude_match=False,
no_escape=False, no_line_break=True, no_word_break=False)
def bbj_rule2(colorer, s, i):
return colorer.match_eol_span(s, i, kind="comment2", seq="//",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="", exclude_match=False)
def bbj_rule3(colorer, s, i):
return colorer.match_eol_span(s, i, kind="comment2", seq="REM",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="", exclude_match=False)
def bbj_rule4(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="=",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def bbj_rule5(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq=">=",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def bbj_rule6(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="<=",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def bbj_rule7(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="+",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def bbj_rule8(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="-",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def bbj_rule9(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="/",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def bbj_rule10(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="*",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def bbj_rule11(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq=">",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def bbj_rule12(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="<",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def bbj_rule13(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="<>",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def bbj_rule14(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="^",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def bbj_rule15(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="and",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def bbj_rule16(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="or",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def bbj_rule17(colorer, s, i):
return colorer.match_mark_previous(s, i, kind="label", pattern=":",
at_line_start=True, at_whitespace_end=False, at_word_start=False, exclude_match=True)
def bbj_rule18(colorer, s, i):
return colorer.match_mark_previous(s, i, kind="function", pattern="(",
at_line_start=False, at_whitespace_end=False, at_word_start=False, exclude_match=True)
def bbj_rule19(colorer, s, i):
return colorer.match_keywords(s, i)
# Rules dict for bbj_main ruleset.
rulesDict1 = {
"\"": [bbj_rule1,],
"(": [bbj_rule18,],
"*": [bbj_rule10,],
"+": [bbj_rule7,],
"-": [bbj_rule8,],
"/": [bbj_rule0,bbj_rule2,bbj_rule9,],
"0": [bbj_rule19,],
"1": [bbj_rule19,],
"2": [bbj_rule19,],
"3": [bbj_rule19,],
"4": [bbj_rule19,],
"5": [bbj_rule19,],
"6": [bbj_rule19,],
"7": [bbj_rule19,],
"8": [bbj_rule19,],
"9": [bbj_rule19,],
":": [bbj_rule17,],
"<": [bbj_rule6,bbj_rule12,bbj_rule13,],
"=": [bbj_rule4,],
">": [bbj_rule5,bbj_rule11,],
"@": [bbj_rule19,],
"A": [bbj_rule19,],
"B": [bbj_rule19,],
"C": [bbj_rule19,],
"D": [bbj_rule19,],
"E": [bbj_rule19,],
"F": [bbj_rule19,],
"G": [bbj_rule19,],
"H": [bbj_rule19,],
"I": [bbj_rule19,],
"J": [bbj_rule19,],
"K": [bbj_rule19,],
"L": [bbj_rule19,],
"M": [bbj_rule19,],
"N": [bbj_rule19,],
"O": [bbj_rule19,],
"P": [bbj_rule19,],
"Q": [bbj_rule19,],
"R": [bbj_rule3,bbj_rule19,],
"S": [bbj_rule19,],
"T": [bbj_rule19,],
"U": [bbj_rule19,],
"V": [bbj_rule19,],
"W": [bbj_rule19,],
"X": [bbj_rule19,],
"Y": [bbj_rule19,],
"Z": [bbj_rule19,],
"^": [bbj_rule14,],
"_": [bbj_rule19,],
"a": [bbj_rule15,bbj_rule19,],
"b": [bbj_rule19,],
"c": [bbj_rule19,],
"d": [bbj_rule19,],
"e": [bbj_rule19,],
"f": [bbj_rule19,],
"g": [bbj_rule19,],
"h": [bbj_rule19,],
"i": [bbj_rule19,],
"j": [bbj_rule19,],
"k": [bbj_rule19,],
"l": [bbj_rule19,],
"m": [bbj_rule19,],
"n": [bbj_rule19,],
"o": [bbj_rule16,bbj_rule19,],
"p": [bbj_rule19,],
"q": [bbj_rule19,],
"r": [bbj_rule19,],
"s": [bbj_rule19,],
"t": [bbj_rule19,],
"u": [bbj_rule19,],
"v": [bbj_rule19,],
"w": [bbj_rule19,],
"x": [bbj_rule19,],
"y": [bbj_rule19,],
"z": [bbj_rule19,],
}
# x.rulesDictDict for bbj mode.
rulesDictDict = {
"bbj_main": rulesDict1,
}
# Import dict for bbj mode.
importDict = {}
|
#
# Time complexity:
# O(lines*columns) (worst case, where all the neighbours have the same color)
# O(1) (best case, where no neighbour has the same color)
#
# Space complexity:
# O(1) (color changes applied in place)
#
def flood_fill(screen, lines, columns, line, column, color):
def inbound(l, c):
return (l >= 0 and l < lines) and (c >= 0 and c < columns)
def key(l, c):
return "{},{}".format(l, c)
stack = [[line, column]]
visited = set()
while stack:
l, c = stack.pop()
# Mark the cell as visited
visited.add(key(l, c))
# Schedule the visit to all neighbours (except diagonal),
# which weren't visited yet and has the same color
neighbours = [
[l-1, c ],
[l+1, c ],
[l , c-1],
[l , c+1]
]
for nl, nc in neighbours:
if inbound(nl, nc) and key(nl, nc) not in visited and screen[nl][nc] == screen[l][c]:
stack.append([nl, nc])
# Paint the current cell
screen[l][c] = color
return screen
|
def __residuumSign(self):
if self.outcome == 0:
return -1
else: return 1
|
# -*- coding: utf-8 -*-
class RedisServiceException(Exception):
pass
|
def main(request, response):
headers = [("Content-Type", "text/javascript")]
values = []
for key in request.cookies:
for cookie in request.cookies.get_list(key):
values.append('"%s": "%s"' % (key, cookie.value))
# Update the counter to change the script body for every request to trigger
# update of the service worker.
key = request.GET['key']
counter = request.server.stash.take(key)
if counter is None:
counter = 0
counter += 1
request.server.stash.put(key, counter)
body = """
// %d
self.addEventListener('message', e => {
e.source.postMessage({%s})
});""" % (counter, ','.join(values))
return headers, body
|
class Solution:
def minStickers(self, stickers: List[str], target: str) -> int:
n = len(target)
maxMask = 1 << n
# dp[i] := min # of stickers to spell out i,
# where i is the bit representation of target
dp = [math.inf] * maxMask
dp[0] = 0
for mask in range(maxMask):
if dp[mask] == math.inf:
continue
# try to expand from `mask` by using each sticker
for sticker in stickers:
superMask = mask
for c in sticker:
for i, t in enumerate(target):
# try to apply it on a missing char
if c == t and not (superMask >> i & 1):
superMask |= 1 << i
break
dp[superMask] = min(dp[superMask], dp[mask] + 1)
return -1 if dp[-1] == math.inf else dp[-1]
|
# -----------------------------------------------------------
# Copyright (c) 2021. Danil Smirnov
# A positive real number is given. Print its fractional part.
# -----------------------------------------------------------
def get_fractional_part(number: float)-> float:
return float(number - (int(number // 1)))
print(get_fractional_part(float(input())))
|
# N개의 수가 주어질때 세자리 수 두개를 곱하여 만들 수 있는 대칭수 중 각 수보다 작은 것들 중 가장 큰 것을 고르시오
# Given N integers, find the largest palindrome made from the product of two 3-digit numbers which is less than each integer
palindrome = set()
for i in range(999, 99, -1):
for j in range(999, 99, -1):
if (i*j) % 11 == 0:
s = str(i*j)
if s == s[::-1]:
palindrome.add(i*j)
palindrome = sorted(palindrome)
for _ in range(int(input())):
n = int(input())
begin, end = 0, len(palindrome)-1
while begin <= end:
mid = (begin+end)//2
if palindrome[mid] >= n:
end = mid - 1
else:
begin = mid + 1
print(palindrome[(begin+end)//2])
|
"""
File: booleans.py
Copyright (c) 2016 Callie Enfield
License: MIT
This code was used to simply gain a better understanding of what different boolean expressions will do.
"""
C = 41 #There will be no output. This expression is setting the variable 'C' equal to 41.
C == 40 #The output will be 'False'. 40 is being compared to 0.
C != 40 and C < 41 #The output will be 'False',since both conditions are not true.
C != 40 or C < 41 #The output will be 'True', since the first condition is true, despite the second condition being false.
not C == 40 #The output will be 'True'. Because of the 'not' at the beginning, the output is reversed. The condition is False, but because of the 'not' the output will be 'True'.
not C > 40 #The output will be 'False'. The condition is true, but because of the 'not' at the beginning of the expression, the output will be reversed.
C <= 41 #The output will be 'True'.
not False #The output will be 'True'.
True and False #The output will be 'False'.
False or True #The output will be 'True'.
False or False or False #The output will be 'False'.
True and True and False #The output will be 'False'.
False == 0 #The output will be 'True'.
True == 0 #The output will be 'False'.
True == 1 #The output will be 'True'.
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Solutions to chapter 5 exercises.
###############################################################################
# chapter5_exercises.py
#
# Revision: 1.00
# Date: 6/29/2021
# Author: Alex
#
# Purpose: Solutions to chapter 5 exercises from "Data Structures and
# Algorithms in Python" by Goodrich et. al.
#
###############################################################################
"""
# %% Imports
# Standard system imports
# Related third party imports
# Local application/library specific imports
# %% Reinforcement Exercises
def sum_matrix(list2d):
"""Solution to exercise R-5.11.
Use standard control structures to compute the sum of all numbers in an
n × n data set, represented as a list of lists.
--------------------------------------------------------------------------
Solution:
--------------------------------------------------------------------------
I will create an nxn array, the values of which range from 1 to n^2. The
sum of this matrix then is the sum of the first n^2 integers. The formula
to calculate this sum is then: n^2 * (n^2 + 1) / 2.
This will allow me to verify that my solution works as expected.
"""
total = 0
for row in list2d:
for element in row:
total += element
return total
def sum_matrix2(list2d):
"""Solution to exercise R-5.12.
Describe how the built-in sum function can be combined with Python’s
comprehension syntax to compute the sum of all numbers in an n × n data
set, represented as a list of lists.
--------------------------------------------------------------------------
Solution:
--------------------------------------------------------------------------
I use a generator comprehension to save memory, as I am only interested in
the resulting sum after iterating through all of the values in the matrix.
The inner sum() function sums each list representing a row in the matrix,
and the outer sum() function sums these sums, giving the final sum.
"""
return sum(sum(row) for row in list2d)
# %% Project Exercises
class Matrix:
"""Solution to exercise P-5.33.
Write a Python program for a matrix class that can add and multiply two-
dimensional arrays of numbers, assuming the dimensions agree
appropriately for the operation.
--------------------------------------------------------------------------
Solution:
--------------------------------------------------------------------------
I used Python's dunder methods to overload the '+' and '@' operators for
addition and matrix multiplication, respectively. I also used the
__getitem__ and __setitem__ dunder methods to allow the user to access the
matrix values via 2D index. Finally, I created representations of the
Matrix() object using the __repr__ and __str__ dunder methods.
"""
def __init__(self, matrix):
"""Accept a 2D list of lists and calculates its shape."""
self._matrix = matrix
self._rows = len(matrix)
self._cols = len(matrix[0])
def __add__(self, other):
"""Perform matrix addition if matrices have compatible dimensions."""
assert self._rows == other._rows, 'Matrix dimensions not compatible'
assert self._cols == other._cols, 'Matrix dimensions not compatible'
new_mat = [[0] * self._cols for j in range(self._rows)]
for i, row in enumerate(self._matrix):
for j, element in enumerate(row):
new_mat[i][j] = element + other._matrix[i][j]
return Matrix(new_mat)
def __matmul__(self, other):
"""Perform matrix multiplication if dimensions are compatible."""
assert self._cols == other._rows, 'Matrix dimensions not compatible'
new_mat = [[0] * other._cols for j in range(self._rows)]
for row in range(self._rows):
for col in range(other._cols):
total = 0
for k in range(self._cols):
total += self._matrix[row][k] * other._matrix[k][col]
new_mat[row][col] = total
return Matrix(new_mat)
def __getitem__(self, idx_tup):
"""Return data located at (row, column) indices."""
row_idx, col_idx = idx_tup
return self._matrix[row_idx][col_idx]
def __setitem__(self, idx_tup, value):
"""Set data located at (row, column) indices to supplied value."""
row_idx, col_idx = idx_tup
self._matrix[row_idx][col_idx] = value
def __repr__(self):
"""Matrix representation defined as class and instance variables."""
return str(self.__class__) + ", " + str(self.__dict__)
def __str__(self):
"""Representation of 2D list formatted for pretty printing."""
output = ['[']
for row in self._matrix:
output.append(str(row))
output.append(',\n ')
output.pop() # Remove last comma and newline
output.append(']')
return ''.join(output)
|
def hello():
print(f"Hello, world!")
if __name__ == '__main__':
hello()
|
class IAuthenticationModule:
""" Provides the base authentication interface for Web client authentication modules. """
def Authenticate(self, challenge, request, credentials):
"""
Authenticate(self: IAuthenticationModule,challenge: str,request: WebRequest,credentials: ICredentials) -> Authorization
Returns an instance of the System.Net.Authorization class in respose to an authentication
challenge from a server.
challenge: The authentication challenge sent by the server.
request: The System.Net.WebRequest instance associated with the challenge.
credentials: The credentials associated with the challenge.
Returns: An System.Net.Authorization instance containing the authorization message for the request,or
null if the challenge cannot be handled.
"""
pass
def PreAuthenticate(self, request, credentials):
"""
PreAuthenticate(self: IAuthenticationModule,request: WebRequest,credentials: ICredentials) -> Authorization
Returns an instance of the System.Net.Authorization class for an authentication request to a
server.
request: The System.Net.WebRequest instance associated with the authentication request.
credentials: The credentials associated with the authentication request.
Returns: An System.Net.Authorization instance containing the authorization message for the request.
"""
pass
def __init__(self, *args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
AuthenticationType = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Gets the authentication type provided by this authentication module.
Get: AuthenticationType(self: IAuthenticationModule) -> str
"""
CanPreAuthenticate = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Gets a value indicating whether the authentication module supports preauthentication.
Get: CanPreAuthenticate(self: IAuthenticationModule) -> bool
"""
|
js = """
const quote = String.fromCharCode(34);
const newline = String.fromCharCode(10);
const marker = quote + quote + quote;
const quine = 'js = ' + marker + js + marker + newline + 'py = ' + marker + py + marker + py;
exports.handler = async (body, ctx) => {
return new ctx.HTTPResponse({
body: Buffer.from(quine),
});
};
"""
py = """
backtick = chr(96)
newline = chr(10)
quine = 'const js = ' + backtick + js + backtick + ';' + newline + 'const py = ' + backtick + py + backtick + ';' + js
def handler(body, ctx):
return ctx.HTTPResponse(body='{}'.format(quine))
"""
backtick = chr(96)
newline = chr(10)
quine = 'const js = ' + backtick + js + backtick + ';' + newline + 'const py = ' + backtick + py + backtick + ';' + js
def handler(body, ctx):
return ctx.HTTPResponse(body='{}'.format(quine))
|
# 8-3
def make_shirt(size, string):
"""Make shirt"""
print('Size: ' + size + ', String: ' + string)
make_shirt('M', 'Hello, World')
make_shirt(size='M', string='Hello, World again')
# 8-4
def make_shirt(size='L', string='I love Python'):
"""Make python shirt"""
print('Size: ' + size + ', String: ' + string)
make_shirt()
make_shirt(size='M')
make_shirt(string='Hello, World')
# 8-5
def describe_city(name='guangzhou', country='china'):
"""Describe a city you lived in"""
print(name.title() + ' is in ' + country.title())
describe_city('Nanjing')
describe_city(country='united states')
describe_city('palo alto', 'united states')
|
variant = dict(
mlflow_uri="http://128.2.210.74:8080",
gpu=False,
algorithm="PPO",
version="normal",
actor_width=64, # Need to tune
critic_width=256,
replay_buffer_size=int(3E3),
algorithm_kwargs=dict(
min_num_steps_before_training=0,
num_epochs=150,
num_eval_steps_per_epoch=1000,
num_train_loops_per_epoch=10,
num_expl_steps_per_train_loop=2048,
num_trains_per_train_loop=100,
batch_size=256,
max_path_length=1000,
clear_buffer_every_train_loop=True,
),
trainer_kwargs=dict(
epsilon=0.2, # Need to tune
discount=.99, # Need to tune
intrinsic_discount=.9999,
policy_lr=3E-4, # Need to tune
val_lr=3E-4, # No need to use different
use_rnd=False,
rnd_coef=5,
predictor_update_proportion=0.05,
),
rnd_kwargs=dict(
rnd_output_size=2,
rnd_lr=3E-4,
rnd_latent_size=2,
use_normaliser=True, # Specifies whether to use observation normalisation for actor & critic
),
target_kwargs=dict(
tdlambda=0.95,
target_lookahead=15,
use_dones_for_rnd_critic=False,
),
policy_kwargs=dict(
std=0.1, # This is a non-learnable constant if set to a scalar value
),
)
# env_variant = dict(
# env_str='Swimmer-v2',
# )
env_variant = dict(
env_str='agnosticmaas-v0',
lam=1,
sigma2=1,
agent_lambda=2,
num_hypotheses=30,
num_timesteps=150,
num_EA_iterations=10,
EA_tolerance=0.0001,
cost_iterations=10,
upper_limit_N=10,
log_space_resolution=100,
MLE_regularizer=.1,
WASSERSTEIN_ITERS=100,
verbose=False,
adaptive_grid=False,
direct_wasserstein=True,
fisher_in_state=True,
reward_shaping=False,
)
|
#
# PySNMP MIB module ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:50:07 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueSizeConstraint, ConstraintsUnion, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueSizeConstraint", "ConstraintsUnion", "SingleValueConstraint", "ValueRangeConstraint")
etsysModules, = mibBuilder.importSymbols("ENTERASYS-MIB-NAMES", "etsysModules")
InetAddress, InetAddressType = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress", "InetAddressType")
ObjectGroup, ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "ModuleCompliance", "NotificationGroup")
IpAddress, Counter32, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, Integer32, Bits, ModuleIdentity, TimeTicks, ObjectIdentity, MibIdentifier, Counter64, Unsigned32, NotificationType, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "IpAddress", "Counter32", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Integer32", "Bits", "ModuleIdentity", "TimeTicks", "ObjectIdentity", "MibIdentifier", "Counter64", "Unsigned32", "NotificationType", "Gauge32")
RowStatus, TextualConvention, DisplayString, TruthValue = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "TextualConvention", "DisplayString", "TruthValue")
etsysRadiusAcctClientMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27))
etsysRadiusAcctClientMIB.setRevisions(('2009-08-07 15:48', '2004-11-12 15:23', '2004-09-09 14:37', '2004-08-30 15:55', '2004-08-25 15:03', '2002-09-13 19:30',))
if mibBuilder.loadTexts: etsysRadiusAcctClientMIB.setLastUpdated('200908071548Z')
if mibBuilder.loadTexts: etsysRadiusAcctClientMIB.setOrganization('Enterasys Networks')
etsysRadiusAcctClientMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1))
etsysRadiusAcctClientEnable = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysRadiusAcctClientEnable.setStatus('current')
etsysRadiusAcctClientUpdateInterval = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(1800)).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysRadiusAcctClientUpdateInterval.setStatus('current')
etsysRadiusAcctClientIntervalMinimum = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(60, 2147483647)).clone(600)).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysRadiusAcctClientIntervalMinimum.setStatus('current')
etsysRadiusAcctClientServerTable = MibTable((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4), )
if mibBuilder.loadTexts: etsysRadiusAcctClientServerTable.setStatus('current')
etsysRadiusAcctClientServerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1), ).setIndexNames((0, "ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerIndex"))
if mibBuilder.loadTexts: etsysRadiusAcctClientServerEntry.setStatus('current')
etsysRadiusAcctClientServerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: etsysRadiusAcctClientServerIndex.setStatus('current')
etsysRadiusAcctClientServerAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 2), InetAddressType().clone('ipv4')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerAddressType.setStatus('current')
etsysRadiusAcctClientServerAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 3), InetAddress().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerAddress.setStatus('current')
etsysRadiusAcctClientServerPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(1813)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerPortNumber.setStatus('current')
etsysRadiusAcctClientServerSecret = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerSecret.setStatus('current')
etsysRadiusAcctClientServerSecretEntered = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 6), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerSecretEntered.setStatus('current')
etsysRadiusAcctClientServerRetryTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 10)).clone(5)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerRetryTimeout.setStatus('current')
etsysRadiusAcctClientServerRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 20)).clone(2)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerRetries.setStatus('current')
etsysRadiusAcctClientServerClearTime = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerClearTime.setStatus('deprecated')
etsysRadiusAcctClientServerStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 10), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerStatus.setStatus('current')
etsysRadiusAcctClientServerUpdateInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(-1, -1), ValueRangeConstraint(0, 2147483647), )).clone(-1)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerUpdateInterval.setStatus('current')
etsysRadiusAcctClientServerIntervalMinimum = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 1, 4, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(-1, -1), ValueRangeConstraint(60, 2147483647), )).clone(-1)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysRadiusAcctClientServerIntervalMinimum.setStatus('current')
etsysRadiusAcctClientMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 2))
etsysRadiusAcctClientMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 2, 1))
etsysRadiusAcctClientMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 2, 2))
etsysRadiusAcctClientMIBGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 2, 2, 1)).setObjects(("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientEnable"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientUpdateInterval"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientIntervalMinimum"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerAddressType"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerAddress"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerPortNumber"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerSecret"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerSecretEntered"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerRetryTimeout"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerRetries"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerClearTime"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysRadiusAcctClientMIBGroup = etsysRadiusAcctClientMIBGroup.setStatus('deprecated')
etsysRadiusAcctClientMIBGroupV2 = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 2, 2, 2)).setObjects(("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientEnable"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientUpdateInterval"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientIntervalMinimum"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerAddressType"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerAddress"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerPortNumber"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerSecret"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerSecretEntered"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerRetryTimeout"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerRetries"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysRadiusAcctClientMIBGroupV2 = etsysRadiusAcctClientMIBGroupV2.setStatus('deprecated')
etsysRadiusAcctClientMIBGroupV3 = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 2, 2, 3)).setObjects(("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientEnable"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientUpdateInterval"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientIntervalMinimum"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerAddressType"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerAddress"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerPortNumber"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerSecret"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerSecretEntered"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerRetryTimeout"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerRetries"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerStatus"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerIntervalMinimum"), ("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientServerUpdateInterval"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysRadiusAcctClientMIBGroupV3 = etsysRadiusAcctClientMIBGroupV3.setStatus('current')
etsysRadiusAcctClientMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 2, 1, 2)).setObjects(("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientMIBGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysRadiusAcctClientMIBCompliance = etsysRadiusAcctClientMIBCompliance.setStatus('deprecated')
etsysRadiusAcctClientMIBComplianceV2 = ModuleCompliance((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 2, 1, 3)).setObjects(("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientMIBGroupV2"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysRadiusAcctClientMIBComplianceV2 = etsysRadiusAcctClientMIBComplianceV2.setStatus('deprecated')
etsysRadiusAcctClientMIBComplianceV3 = ModuleCompliance((1, 3, 6, 1, 4, 1, 5624, 1, 2, 27, 2, 1, 4)).setObjects(("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", "etsysRadiusAcctClientMIBGroupV3"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysRadiusAcctClientMIBComplianceV3 = etsysRadiusAcctClientMIBComplianceV3.setStatus('current')
mibBuilder.exportSymbols("ENTERASYS-RADIUS-ACCT-CLIENT-EXT-MIB", etsysRadiusAcctClientMIBComplianceV2=etsysRadiusAcctClientMIBComplianceV2, etsysRadiusAcctClientServerClearTime=etsysRadiusAcctClientServerClearTime, etsysRadiusAcctClientServerPortNumber=etsysRadiusAcctClientServerPortNumber, etsysRadiusAcctClientServerAddressType=etsysRadiusAcctClientServerAddressType, etsysRadiusAcctClientIntervalMinimum=etsysRadiusAcctClientIntervalMinimum, etsysRadiusAcctClientServerAddress=etsysRadiusAcctClientServerAddress, etsysRadiusAcctClientServerSecret=etsysRadiusAcctClientServerSecret, etsysRadiusAcctClientMIBCompliances=etsysRadiusAcctClientMIBCompliances, etsysRadiusAcctClientServerIndex=etsysRadiusAcctClientServerIndex, etsysRadiusAcctClientServerRetryTimeout=etsysRadiusAcctClientServerRetryTimeout, etsysRadiusAcctClientMIB=etsysRadiusAcctClientMIB, etsysRadiusAcctClientServerUpdateInterval=etsysRadiusAcctClientServerUpdateInterval, PYSNMP_MODULE_ID=etsysRadiusAcctClientMIB, etsysRadiusAcctClientMIBObjects=etsysRadiusAcctClientMIBObjects, etsysRadiusAcctClientMIBGroupV2=etsysRadiusAcctClientMIBGroupV2, etsysRadiusAcctClientMIBGroup=etsysRadiusAcctClientMIBGroup, etsysRadiusAcctClientServerSecretEntered=etsysRadiusAcctClientServerSecretEntered, etsysRadiusAcctClientServerStatus=etsysRadiusAcctClientServerStatus, etsysRadiusAcctClientServerTable=etsysRadiusAcctClientServerTable, etsysRadiusAcctClientMIBCompliance=etsysRadiusAcctClientMIBCompliance, etsysRadiusAcctClientMIBGroupV3=etsysRadiusAcctClientMIBGroupV3, etsysRadiusAcctClientMIBGroups=etsysRadiusAcctClientMIBGroups, etsysRadiusAcctClientEnable=etsysRadiusAcctClientEnable, etsysRadiusAcctClientServerRetries=etsysRadiusAcctClientServerRetries, etsysRadiusAcctClientUpdateInterval=etsysRadiusAcctClientUpdateInterval, etsysRadiusAcctClientServerEntry=etsysRadiusAcctClientServerEntry, etsysRadiusAcctClientServerIntervalMinimum=etsysRadiusAcctClientServerIntervalMinimum, etsysRadiusAcctClientMIBConformance=etsysRadiusAcctClientMIBConformance, etsysRadiusAcctClientMIBComplianceV3=etsysRadiusAcctClientMIBComplianceV3)
|
par = []
impar = []
print('Para o inicio da contagem, inicie com 1 para mostrar os impares e 2 para os pares.')
n1 = int(input('Digite o inico da contagem [ 1 ] [ 2 ]:'))
n2 = int(input('Digite o final da contagem:'))
for c in range(n1, n2+1, 2):
if n1 == 2:
par.append(c)
elif n1 == 1:
impar.append(c)
print(f'Com a contagem de {n1} a {n2}.')
if n1 == 2:
print(f'Temos um total de {par} pares.')
elif n1 == 1:
print(f'E temos o total de {impar} impares.')
|
""" Data structures to store CommCareHQ reports """
class Report(object):
""" This class is a generic object for representing data
intended for specific reports. It is mostly useful so that
we can transform these structures arbitrarily into xml,
csv, json, etc. without changing our report generators
"""
def __init__(self, title=''):
self.title = title
self.generating_url = ''
# should be a list of DataSets
self.datasets = []
def __unicode__(self):
string = "Report: " + unicode(self.title) + "\n"
for dataset in self.datasets:
string = string + unicode(dataset)
return string + "\n\n"
def __str__(self):
return unicode(self)
class DataSet(object):
""" represents a set or multiple sets of data
with a common index (x-axis). So, for example, one dataset
could be composed of registrations per x, visits per x,
closures per x, etc. (x being the same for all sets)
"""
def __init__(self, name=''):
self.name = name
self.params = {}
# should be a list of valuesets
self.valuesets = []
self.indices = ''
def __unicode__(self):
string = "DataSet: " + unicode(self.name) + "\n"
for valueset in self.valuesets:
for value in valueset:
string = string + " " + unicode(value) + "\n"
string = string + "\n\n"
return string
class Values(list):
""" represents a set of index/value pairs """
def __init__(self, name=''):
self.stats = {}
# indices are determined on a per-dataset basis
self.name = name
def run_stats(self, stats):
""" calculates statistics
stats: specifies the statistics to return
Given a list of requested statistics, this function populates
self.stats with the computed values. Currently we only support 'sum',
but one can imagine supporting std dev, mean, variance, etc.
"""
if not stats: return
for stat in stats:
if stat == 'sum':
sum = 0
for v in self:
sum = sum + long(v[-1])
self.stats[stat] = sum
|
# Crie um programa onde o usuario digite
# uma expressao qualquer que use parenteses.
# Seu aplicativo devera analisar se a
# expressao passada esta com os
# parenteses abertos e fechados na ordem correta.
expr = str(input('Digite a expressao: '))
pilha = list()
for simb in expr:
if simb == '(':
pilha.append('(')
elif simb == ')':
if len(pilha) > 0:
pilha.pop()
else:
pilha.append(')')
break
if len(pilha) == 0:
print('Sua expressao esta valida!')
else:
print('Sua expressao esta errada!')
|
# Python Program To Sort The Elements Of A Dictionary Based On A Key Or Value
'''
Function Name : Sort Elements Of Dictionary Based On Key, Value.
Function Date : 13 Sep 2020
Function Author : Prasad Dangare
Input : String
Output : String
'''
colors = {10: "Red", 35: "Green", 15: "Blue", 25: "White"}
# Sort The Dictionary By Keys i.e. 0th Elements
c1 = sorted(colors.items(), key = lambda t: t[0])
print(c1)
# Sort The Dictionary By Values , i.e. 1st Elements
c2 = sorted(colors.items(), key = lambda t: t[1])
print(c2)
|
# Edit this file to change settings
CONFIG = {
# Uncomment the following two lines to set your username and password.
# 'userName': '',
# 'passWord': '',
# The beginDate of the term in %Y-%m-%d
# Should be a Monday
'beginDate': '2019-09-02',
# 'default' sets whether you want to use the newest classTable or not
# if default is False, you need to select which term will you use
'default': True,
# Due to dumb programmers of jiaowu system, the class table has some
# different formats. The application has not implement auto mode
# detection. So if one mode doesn't work, try other modes.
# Modes available:
# 1. Used in 2019 spring for 187324
# 2. Used in 2019 autumn for 183911
# 0. Custom mode, your custom regexp will be used.
'regexMode': 2,
# Only available when regexMode is set to custom mode.
# You need these named groups to make things work properly:
# tachername, begindate, enddate, location
# These groups are optional:
# classname, classtime
# In most occasions, classname group is optional
# If you encounter exceptions, add a classname group.
# classtime is a number group splitted by ,
# It's for capturing precise time in classtime (for example:第1,2节)
# A valid classtime group will enable precise time calculation
# (Useful for PE lessons!).
# See main.py for some samples.
# You are welcome to make PRs to commit your own working modes.
'customRegex': r''
}
|
# Mergesort is the best sorting algorithm for use with a linked-list
# Time Complexity O(nlogn)
# Merging will require log(n) doublings from subarrays of size (1) to a single array of size length(n),
# where each pass will require (n) iterations to compare and sort each element
# Space Complexity O(n) arrays
# O(1) linked-list
def mergesort(a):
if len(a) > 1: # divide array into subarrays of length one
m = len(a)//2 # floor division returns a truncated integer by rounding towards negative infinity
l = a[:m]
r = a[m:]
mergesort(l) # create auxillary arrays requiring O(n) memory in addition to the call stack overhead
mergesort(r)
i=0 # left half index
j=0 # right half index
k=0 # merge array index
while i < len(l) and j < len(r): # WHILE left and right halves both contain elements
if l[i] < r[j]:
a[k]=l[i]
i += 1
else:
a[k]=r[j]
j += 1
k += 1
while i < len(l): # WHILE only left half contains elements
a[k]=l[i]
i += 1
k += 1
while j < len(r): # WHILE only right half contains elements
a[k]=r[j]
j += 1
k += 1
x = [68,99,49,54,26,93,17,1,0,33,77]
print("Sorting {}" .format(x))
mergesort(x)
print("Result is {}" .format(x))
|
"""VTK/FURY Tools
This module implements a set o tools to enhance VTK given new functionalities.
"""
class Uniform:
"""This creates a uniform shader variable
It's responsible to store the value of a given uniform
variable and call the related vtk_program
"""
def __init__(self, name, uniform_type, value):
"""
Parameters
----------
name: str
name of the uniform variable
uniform_type: str
Uniform variable type which will be used inside the shader.
Any of this are valid: 1fv, 1iv, 2f, 2fv, 2i, 3f, 3fv,
3uc, 4f, 4fv, 4uc, GroupUpdateTime, Matrix,
Matrix3x3, Matrix4x4, Matrix4x4v, f, i
value: float or ndarray
value: type(uniform_type)
should be a value which represent's the shader uniform
variable. For example, if uniform_type is 'f' then value
should be a float; if uniform_type is '3f' then value
should be a 1x3 array.
"""
self.name = name
self.value = value
self.uniform_type = uniform_type
self.valid_types = [
'1fv', '1iv', '2f', '2fv', '2i', '3f', '3fv',
'3uc', '4f', '4fv', '4uc', 'GroupUpdateTime', 'Matrix',
'Matrix3x3', 'Matrix4x4', 'Matrix4x4v', 'f', 'i']
if self.uniform_type not in self.valid_types:
raise ValueError(
f"""Uniform type {self.uniform_type} not valid.
Choose one of this values: {self.valid_types}""")
self.vtk_func_uniform = f'SetUniform{self.uniform_type}'
def execute_program(self, program):
""" Given a shader program, this method
will update the value with the associated uniform variable
in a draw call
Parameters
----------
program: vtkmodules.vtkRenderingOpenGL2.vtkShaderProgram
A shader program which will be used to update the uniform
"""
program.__getattribute__(self.vtk_func_uniform)(
self.name, self.value)
def __repr__(self):
return f'Uniform(name={self.name}, value={self.value})'
class Uniforms:
def __init__(self, uniforms):
"""Creates an object which store and execute an uniform variable.
Parameters
-----------
uniforms: list
List of Uniform objects.
Examples
--------
.. highlight:: python
.. code-block:: python
uniforms = [
Uniform(name='edgeWidth', uniform_type='f', value=edgeWidth)...
]
CustomUniforms = Uniforms(markerUniforms)
add_shader_callback(
sq_actor, CustomUniforms)
sq_actor.CustomUniforms = CustomUniforms
sq_actor.CustomUniforms.edgeWidth = 0.5
"""
self.uniforms = uniforms
for obj in self.uniforms:
# if isinstance(obj, Uniform) is False:
# raise ValueError(f"""{obj} it's not an Uniform object""")
setattr(self, obj.name, obj)
def __call__(self, _caller, _event, calldata=None,):
"""This method should be used as a callback for a vtk Observer
Execute the shader program with the given uniform variables.
"""
program = calldata
if program is None:
return None
for uniform in self.uniforms:
uniform.execute_program(program)
def __repr__(self):
return f'Uniforms({[obj.name for obj in self.uniforms]})'
|
def solution(inp):
data = [row.split() for row in inp.splitlines()]
count = 0
for passphrase in data:
if len(passphrase) == len(set(passphrase)):
count = count + 1
return count
def main():
with open('input.txt', 'r') as f:
inp = f.read()
print('[*] Reading input from input.txt...')
print('[*] The solution is: ')
print(solution(inp))
if __name__=='__main__':
main()
|
# -*- coding: utf-8 -*-
__author__ = "venkat"
__author_email__ = "venkatram0273@gmail.com"
|
# version code 80e56511a793+
# Please fill out this stencil and submit using the provided submission script.
# Be sure that the file voting_record_dump109.txt is in the matrix/ directory.
## 1: (Task 2.12.1) Create Voting Dict
def create_voting_dict(strlist):
"""
Input: a list of strings. Each string represents the voting record of a senator.
The string consists of
- the senator's last name,
- a letter indicating the senator's party,
- a couple of letters indicating the senator's home state, and
- a sequence of numbers (0's, 1's, and negative 1's) indicating the senator's
votes on bills
all separated by spaces.
Output: A dictionary that maps the last name of a senator
to a list of numbers representing the senator's voting record.
Example:
>>> vd = create_voting_dict(['Kennedy D MA -1 -1 1 1', 'Snowe R ME 1 1 1 1'])
>>> vd == {'Snowe': [1, 1, 1, 1], 'Kennedy': [-1, -1, 1, 1]}
True
You can use the .split() method to split each string in the
strlist into a list; the first element of the list will be the senator's
name, the second will be his/her party affiliation (R or D), the
third will be his/her home state, and the remaining elements of
the list will be that senator's voting record on a collection of bills.
You can use the built-in procedure int() to convert a string
representation of an integer (e.g. '1') to the actual integer
(e.g. 1).
The lists for each senator should preserve the order listed in voting data.
In case you're feeling clever, this can be done in one line.
"""
pass
## 2: (Task 2.12.2) Policy Compare
def policy_compare(sen_a, sen_b, voting_dict):
"""
Input: last names of sen_a and sen_b, and a voting dictionary mapping senator
names to lists representing their voting records.
Output: the dot-product (as a number) representing the degree of similarity
between two senators' voting policies
Example:
>>> voting_dict = {'Fox-Epstein':[-1,-1,-1,1],'Ravella':[1,1,1,1]}
>>> policy_compare('Fox-Epstein','Ravella', voting_dict)
-2
The code should correct compute dot-product even if the numbers are not all in {0,1,-1}.
>>> policy_compare('A', 'B', {'A':[100,10,1], 'B':[2,5,3]})
253
You should definitely try to write this in one line.
"""
pass
## 3: (Task 2.12.3) Most Similar
def most_similar(sen, voting_dict):
"""
Input: the last name of a senator, and a dictionary mapping senator names
to lists representing their voting records.
Output: the last name of the senator whose political mindset is most
like the input senator (excluding, of course, the input senator
him/herself). Resolve ties arbitrarily.
Example:
>>> vd = {'Klein': [1,1,1], 'Fox-Epstein': [1,-1,0], 'Ravella': [-1,0,0]}
>>> most_similar('Klein', vd)
'Fox-Epstein'
>>> vd == {'Klein': [1,1,1], 'Fox-Epstein': [1,-1,0], 'Ravella': [-1,0,0]}
True
>>> vd = {'a': [1,1,1,0], 'b': [1,-1,0,0], 'c': [-1,0,0,0], 'd': [-1,0,0,1], 'e': [1, 0, 0,0]}
>>> most_similar('c', vd)
'd'
Note that you can (and are encouraged to) re-use your policy_compare procedure.
"""
return ""
## 4: (Task 2.12.4) Least Similar
def least_similar(sen, voting_dict):
"""
Input: the last name of a senator, and a dictionary mapping senator names
to lists representing their voting records.
Output: the last name of the senator whose political mindset is least like the input
senator.
Example:
>>> vd = {'a': [1,1,1], 'b': [1,-1,0], 'c': [-1,0,0]}
>>> least_similar('a', vd)
'c'
>>> vd == {'a': [1,1,1], 'b': [1,-1,0], 'c': [-1,0,0]}
True
>>> vd = {'a': [-1,0,0], 'b': [1,0,0], 'c': [-1,1,0], 'd': [-1,1,1]}
>>> least_similar('c', vd)
'b'
"""
pass
## 5: (Task 2.12.5) Chafee, Santorum
most_like_chafee = ''
least_like_santorum = ''
## 6: (Task 2.12.7) Most Average Democrat
def find_average_similarity(sen, sen_set, voting_dict):
"""
Input: the name of a senator, a set of senator names, and a voting dictionary.
Output: the average dot-product between sen and those in sen_set.
Example:
>>> vd = {'Klein':[1,1,1], 'Fox-Epstein':[1,-1,0], 'Ravella':[-1,0,0], 'Oyakawa':[-1,-1,-1], 'Loery':[0,1,1]}
>>> sens = {'Fox-Epstein','Ravella','Oyakawa','Loery'}
>>> find_average_similarity('Klein', sens, vd)
-0.5
>>> sens == {'Fox-Epstein','Ravella', 'Oyakawa', 'Loery'}
True
>>> vd == {'Klein':[1,1,1], 'Fox-Epstein':[1,-1,0], 'Ravella':[-1,0,0], 'Oyakawa':[-1,-1,-1], 'Loery':[0,1,1]}
True
"""
return ...
most_average_Democrat = ... # give the last name (or code that computes the last name)
## 7: (Task 2.12.8) Average Record
def find_average_record(sen_set, voting_dict):
"""
Input: a set of last names, a voting dictionary
Output: a vector containing the average components of the voting records
of the senators in the input set
Example:
>>> voting_dict = {'Klein': [-1,0,1], 'Fox-Epstein': [-1,-1,-1], 'Ravella': [0,0,1]}
>>> senators = {'Fox-Epstein','Ravella'}
>>> find_average_record(senators, voting_dict)
[-0.5, -0.5, 0.0]
>>> voting_dict == {'Klein': [-1,0,1], 'Fox-Epstein': [-1,-1,-1], 'Ravella': [0,0,1]}
True
>>> senators
{'Fox-Epstein','Ravella'}
>>> d = {'c': [-1,-1,0], 'b': [0,1,1], 'a': [0,1,1], 'e': [-1,-1,1], 'd': [-1,1,1]}
>>> find_average_record({'a','c','e'}, d)
[-0.6666666666666666, -0.3333333333333333, 0.6666666666666666]
>>> find_average_record({'a','c','e','b'}, d)
[-0.5, 0.0, 0.75]
>>> find_average_record({'a'}, d)
[0.0, 1.0, 1.0]
"""
return ...
average_Democrat_record = ... # give the vector as a list
## 8: (Task 2.12.9) Bitter Rivals
def bitter_rivals(voting_dict):
"""
Input: a dictionary mapping senator names to lists representing
their voting records
Output: a tuple containing the two senators who most strongly
disagree with one another.
Example:
>>> voting_dict = {'Klein':[-1,0,1], 'Fox-Epstein':[-1,-1,-1], 'Ravella':[0,0,1], 'Oyakawa':[1,1,1], 'Loery':[1,1,0]}
>>> br = bitter_rivals(voting_dict)
>>> br == ('Fox-Epstein', 'Oyakawa') or br == ('Oyakawa', 'Fox-Epstein')
True
"""
return (..., ...)
|
valores = list()
maior = menor = 0
for index in range(0, 5):
valores.append(int(input(f'Digite um valor para a posição {index}: ')))
print('-=-' * 30)
print(f'Você digitou os valores {valores}')
print(f'O maior valor digitado foi {max(valores)} nas posições ', end='')
for index, valor in enumerate(valores):
maior = max(valores)
if valor == maior:
print(f'{index}... ', end='')
print(f'\nO menor valor digitado foi {min(valores)} nas posições', end='')
for index, valor in enumerate(valores):
menor = min(valores)
if valor == menor:
print(f' {index}... ', end='')
|
while True:
n1 = float(input("\n Escreva o lado 1 do triângulo: "))
n2 = float(input(" Escreva o lado 2 do triângulo: "))
n3 = float(input(" Escreva o lado 3 do triângulo: "))
if(n1 > 0 and n2 > 0 and n3 > 0 and n1 <= n2+n3 and n2 <= n1+n3 and n3 <= n1+n2):
if(n1 == n2 and n1 != n3 or n1 == n3 and n1 != n2 or n2 == n3 and n1 != n2):
print("\n - O Triângulo é Isósceles !")
elif(n1 == n2 and n2 == n3):
print("\n - O Triângulo é Equilátero !")
else:
print("\n - O Triângulo é Escaleno !")
print("-=-"*31)
else:
print(" - Não pode formar um triângulo")
|
s1=set([1,3,7,94])
s2=set([2,3])
print(s1)
print(s2)
print(s1.intersection(s2))
print(s1.difference(s2))
print(s2.difference(s1))
print(s1.symmetric_difference(s2))
print(s1.union(s2))
s1.difference_update(s2) #S1 becomes equal to the difference
print(s1)
s1=set([1,3])
s1.discard(1)
s1.remove(3)
print(s1)
s1.add(5)
print(s1)
t=([6,7])
s2.update(t)
print(s2)
x=s2.pop()
print(x)
|
def convert_to_alt_caps(message):
lower = message.lower()
upper = message.upper()
data = []
space_offset = 0
for i in range(len(lower)):
if not lower[i].isalpha():
space_offset += 1
if (i + space_offset) % 2 == 0:
data.append(lower[i])
else:
data.append(upper[i])
return ''.join(data)
def main():
input_str = "Pikachu"
print(input_str)
print(convert_to_alt_caps(input_str))
input_str = "section is AWESOME"
print(input_str)
print(convert_to_alt_caps(input_str))
if __name__ == '__main__':
main()
|
TIME_FORMAT = ('day', 'hour')
OPERATORS = {
'==': lambda x, y: x == y,
'<=': lambda x, y: x <= y,
'>=': lambda x, y: x >= y,
'>': lambda x, y: x > y,
'<': lambda x, y: x < y,
}
class TimeDelta(object):
def __init__(self, amount, fmt, operator):
if int(amount) < 0:
raise ValueError('amount must be over zero')
if fmt not in TIME_FORMAT:
raise ValueError(
'Time format must be one of {}'
''.format(', '.join(TIME_FORMAT))
)
if operator not in OPERATORS.keys():
raise ValueError(
'Comparision type must be one of {}'
''.format(', '.join(OPERATORS.keys()))
)
if int(amount) > 1:
fmt += 's'
self.amount = amount
self.format = fmt
self.operator = operator
def parse(self, results):
d = []
for content in results:
# O(N) - hacky as fuck! :(
val, fmt = content['age'].split(' ')
if fmt == self.format and \
OPERATORS[self.operator](int(val), int(self.amount)):
d.append(content)
return d
|
# -*- coding: utf-8 -*-
"""
"""
#make noarg a VERY unique value. Was using empty tuple, but python caches it, this should be quite unique
def unique_generator():
def UNIQUE_CLOSURE():
return UNIQUE_CLOSURE
return ("<UNIQUE VALUE>", UNIQUE_CLOSURE,)
NOARG = unique_generator()
|
print('-='*30)
print('Olá, seja bem vindo!')
nome = str(input('Digite o seu nome completo: ')).strip()
print('')
print('Seu nome em letras maiúsculas: {}'.format(nome.upper()))
print('Seu nome em letras minúsculas: {}'.format(nome.lower()))
print('Total de letras do seu nome: {}'.format(len(nome.replace(' ', ''))))
dividido = nome.split()
print('E por fim, quantas letras tem seu primeiro nome: {}!'.format(len(dividido[0])))
print('')
print('Até logo {}!'.format(nome))
print('-='*30)
|
class Solution:
def mostCommonWord(self, paragraph: str, banned: List[str]) -> str:
banset = set(banned)
for c in "!?',;.":
paragraph = paragraph.replace(c, ' ')
cnt = Counter(word for word in paragraph.lower().split())
ans, best = '', 0
for word in cnt:
if cnt[word] > best and word not in banset:
ans, best = word, cnt[word]
return ans
|
#program to input a number, if it is not a number generate an error message.
while True:
try:
a = int(input("Input a number: "))
break
except ValueError:
print("\nThis is not a number. Try again...")
print()
break
|
# Introduction to deep learning with Python
# A. Forward propogation
# Process of working from the input layer to the hidden layer to the final output layer
# Values contained within the input layer are multiplied by the weights that are connected to the interactions for the hidden layer.
# Details contained within this hidden layer nodes are then multiplied by the weights that connect to the output layer to create the prediction
# First two chapters are aiming to predict the number of bank transactions that a customer will make
# Calculate node 0 value: node_0_value
node_0_value = (input_data * weights['node_0']).sum()
# Calculate node 1 value: node_1_value
node_1_value = (input_data * weights['node_1']).sum()
# Put node values into array: hidden_layer_outputs
hidden_layer_outputs = np.array([node_0_value, node_1_value])
# Calculate output: output
output = (hidden_layer_outputs * weights['output']).sum()
# Print output
print(output)
# B. Activation functions
# Used to introduce non-linear interactions that occur. Aims to move away from simple linear interactions
# These functions are applied at each node to convert the inputs into an output value for that node.
# 1. The Rectified Linear Activation Function. Aims to predict only positive values by taking the maximum of input and zero. Don't want to
# predict negative transaction values
def relu(input):
'''Define your relu activation function here'''
# Calculate the value for the output of the relu function: output
output = max(input, 0)
# Return the value just calculated
return(output)
# Calculate node 0 value: node_0_output
node_0_input = (input_data * weights['node_0']).sum()
node_0_output = relu(node_0_input)
# Calculate node 1 value: node_1_output
node_1_input = (input_data * weights['node_1']).sum()
node_1_output = relu(node_1_input)
# Put node values into array: hidden_layer_outputs
hidden_layer_outputs = np.array([node_0_output, node_1_output])
# Calculate model output (do not apply relu)
model_output = (hidden_layer_outputs * weights['output']).sum()
# Print model output
print(model_output)
# 2. Applying the network to many observations
# Define predict_with_network()
def predict_with_network(input_data_row, weights):
# Calculate node 0 value
node_0_input = (input_data_row * weights['node_0']).sum()
node_0_output = relu(node_0_input)
# Calculate node 1 value
node_1_input = (input_data_row * weights['node_1']).sum()
node_1_output = relu(node_1_input)
# Put node values into array: hidden_layer_outputs
hidden_layer_outputs = np.array([node_0_output, node_1_output])
# Calculate model output
input_to_final_layer = (hidden_layer_outputs * weights['output']).sum()
model_output = relu(input_to_final_layer)
# Return model output
return(model_output)
# Create empty list to store prediction results
results = []
for input_data_row in input_data:
# Append prediction to results
results.append(predict_with_network(input_data_row, weights))
# Print results
print(results)
# C. Deeper networks
# 1. Multi-layer neural network. The hidden layers build on top of each other. In this example there are four layers; input, hidden_1, hidden_2 and output.
# The model works to calulate predictions by working through each layer until the prediction is ready for the output layer
def predict_with_network(input_data):
# Calculate node 0 in the first hidden layer
node_0_0_input = (input_data * weights['node_0_0']).sum()
node_0_0_output = relu(node_0_0_input)
# Calculate node 1 in the first hidden layer
node_0_1_input = (input_data * weights['node_0_1']).sum()
node_0_1_output = relu(node_0_1_input)
# Put node values into array: hidden_0_outputs
hidden_0_outputs = np.array([node_0_0_output, node_0_1_output])
# Calculate node 0 in the second hidden layer
node_1_0_input = (hidden_0_outputs * weights['node_1_0']).sum()
node_1_0_output = relu(node_1_0_input)
# Calculate node 1 in the second hidden layer
node_1_1_input = (hidden_0_outputs * weights['node_1_1']).sum()
node_1_1_output = relu(node_1_1_input)
# Put node values into array: hidden_1_outputs
hidden_1_outputs = np.array([node_1_0_output, node_1_1_output])
# Calculate model output: model_output
model_output = (hidden_1_outputs * weights['output']).sum()
# Return model_output
return(model_output)
output = predict_with_network(input_data)
print(output)
|
class Destiny2APIError(Exception):
pass
class Destiny2InvalidParameters(Destiny2APIError):
pass
class Destiny2APICooldown(Destiny2APIError):
pass
class Destiny2RefreshTokenError(Destiny2APIError):
pass
class Destiny2MissingAPITokens(Destiny2APIError):
pass
class Destiny2MissingManifest(Destiny2APIError):
pass
|
class Solution:
def twoSum(self, numbers: List[int], target: int) -> List[int]:
dic = {}
for i, num in enumerate(numbers):
if target - num in dic:
return [dic[target - num] + 1, i + 1]
dic[num] = i
|
"""
Представлен список чисел.
Определить элементы списка, не имеющие повторений.
Сформировать итоговый массив чисел, соответствующих требованию.
Элементы вывести в порядке их следования в исходном списке.
Для выполнения задания обязательно использовать генератор.
Пример исходного списка: [2, 2, 2, 7, 23, 1, 44, 44, 3, 2, 10, 7, 4, 11].
Результат: [23, 1, 3, 10, 4, 11]
"""
input_list = [2, 2, 2, 7, 23, 1, 44, 44, 3, 2, 10, 7, 4, 11]
G = (input_list[i]
for i in range(len(input_list))
if i in input_list[:i] + input_list[i:])
# проверка
[print(i) for i in G]
|
# DROP TABLES
songplay_table_drop = "DROP TABLE IF EXISTS songplays;"
user_table_drop = "DROP TABLE IF EXISTS users;"
song_table_drop = "DROP TABLE IF EXISTS songs;"
artist_table_drop = "DROP TABLE IF EXISTS artists;"
time_table_drop = "DROP TABLE IF EXISTS time;"
# CREATE TABLES
songplay_table_create = ("""CREATE TABLE IF NOT EXISTS songplays (
songplay_id SERIAL PRIMARY KEY,
start_time TIMESTAMP NOT NULL,
user_id INT NOT NULL,
level VARCHAR(4),
song_id VARCHAR,
artist_id VARCHAR,
session_id INT NOT NULL,
location TEXT,
user_agent TEXT
)
""")
user_table_create = ("""CREATE TABLE IF NOT EXISTS users (
user_id INT UNIQUE NOT NULL PRIMARY KEY,
first_name TEXT,
last_name TEXT,
gender VARCHAR(1),
level VARCHAR(4)
)
""")
song_table_create = ("""CREATE TABLE IF NOT EXISTS songs (
song_id VARCHAR UNIQUE NOT NULL PRIMARY KEY,
title TEXT,
artist_id VARCHAR,
year INT,
duration NUMERIC
)
""")
artist_table_create = ("""CREATE TABLE IF NOT EXISTS artists (
artist_id VARCHAR UNIQUE NOT NULL PRIMARY KEY,
name TEXT,
location TEXT,
latitude NUMERIC,
longitude NUMERIC
)
""")
time_table_create = ("""CREATE TABLE IF NOT EXISTS time (
start_time TIME UNIQUE NOT NULL,
hour INT,
day INT,
week INT,
month VARCHAR(10),
year INT,
weekday VARCHAR(10)
)
""")
# INSERT RECORDS
songplay_table_insert = ("""INSERT INTO songplays (
start_time,
user_id,
level,
song_id,
artist_id,
session_id,
location,
user_agent
)
VALUES (to_timestamp(%s), %s, %s, %s, %s, %s, %s, %s)
""")
user_table_insert = ("""INSERT INTO users (
user_id,
first_name,
last_name,
gender,
level
)
VALUES (%s, %s, %s, %s, %s)
ON CONFLICT (user_id)
DO UPDATE SET level = EXCLUDED.level
""")
song_table_insert = ("""INSERT INTO songs (
song_id,
title,
artist_id,
year,
duration
)
VALUES (%s, %s, %s, %s, %s)
ON CONFLICT (song_id)
DO NOTHING
""")
artist_table_insert = ("""INSERT INTO artists (
artist_id,
name,
location,
latitude,
longitude
)
VALUES (%s, %s, %s, %s, %s)
ON CONFLICT (artist_id)
DO NOTHING
""")
time_table_insert = ("""INSERT INTO time (
start_time,
hour,
day,
week,
month,
year,
weekday
)
VALUES (%s, %s, %s, %s, %s, %s, %s)
ON CONFLICT (start_time)
DO NOTHING
""")
# FIND SONGS
song_select = ("""SELECT songs.song_id, songs.artist_id
FROM songs JOIN artists ON songs.artist_id = artists.artist_id
WHERE songs.title = (%s) AND artists.name = (%s) AND songs.duration = (%s);
""")
# QUERY LISTS
create_table_queries = [songplay_table_create, user_table_create, song_table_create, artist_table_create, time_table_create]
drop_table_queries = [songplay_table_drop, user_table_drop, song_table_drop, artist_table_drop, time_table_drop]
|
# OpenWeatherMap API Key
weather_api_key = "601b4c14f4ddb46a0080bbfb5ca51d3e"
# Google API Key
g_key = "AIzaSyDNUFB01N6sBwZfPznGBiHayHJrON12pYw"
|
class BaseModel:
def __init__(self):
self.ops = {}
|
# user.py
__all__ = ['User']
class User:
pass
def user_helper_1():
pass
|
description = 'IPC Motor bus device configuration'
group = 'lowlevel'
instrument_values = configdata('instrument.values')
tango_base = instrument_values['tango_base']
# data from instrument.inf
# used for:
# - shutter_gamma (addr 0x31)
# - nok2 (addr 0x32, 0x33)
# - nok3 (addr 0x34, 0x35)
# - nok4 reactor side (addr 0x36)
# - zb0 (addr 0x37)
# - zb1 (addr 0x38)
#
devices = dict(
nokbus1 = device('nicos.devices.vendor.ipc.IPCModBusTango',
tangodevice = tango_base + 'test/ipcsms_a/bio',
lowlevel = True,
),
)
|
def extractMichilunWordpressCom(item):
'''
Parser for 'michilun.wordpress.com'
'''
bad = [
'Recommendations and Reviews',
]
if any([tmp in item['tags'] for tmp in bad]):
return None
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('Side Projects - Scheme of the Official Descendant', 'Scheme of the Official Descendant', 'translated'),
('Song in the Peach Blossoms', 'Song in the Peach Blossoms', 'translated'),
('Onrain (Online - The Novel)', 'Onrain (Online - The Novel)', 'translated'),
('At the End of the Wish', 'At the End of the Wish', 'translated'),
('Bringing Calamity to the Nation', 'Bringing Calamity to the Nation', 'translated'),
('Side Projects - The Flame\'s Daughter', 'The Flame\'s Daughter', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
class Solution:
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
lookup = dict(((v, i) for i, v in enumerate(nums)))
return next(( (i+1, lookup.get(target-v)+1)
for i, v in enumerate(nums)
if lookup.get(target-v, i) != i), None)
a = Solution()
print(a.twoSum([2, 11, 7, 15],9))
# 越简单的问题越要小心
|
# -*- coding: utf-8 -*-
check_state = 0
d = {}
p = []
e = []
m = []
n = int(input())
for _ in range(n):
ln = input().split()
d[ln[0]] = (int(ln[1]), int(ln[2]), int(ln[3]))
p.append(int(ln[1]))
e.append(int(ln[2]))
m.append(int(ln[3]))
while True:
if check_state == 0:
if p.count(max(p)) == 1:
for k in d:
if d[k][0] == max(p):
print(k)
break
break
else:
del_list = []
for k in d:
if d[k][0] != max(p):
p.remove(d[k][0])
e.remove(d[k][1])
m.remove(d[k][2])
del_list.append(k)
for k in del_list:
del d[k]
if check_state == 1:
if e.count(max(e)) == 1:
for k in d:
if d[k][1] == max(e):
print(k)
break
break
else:
del_list = []
for k in d:
if d[k][1] != max(e):
p.remove(d[k][0])
e.remove(d[k][1])
m.remove(d[k][2])
del_list.append(k)
for k in del_list:
del d[k]
if check_state == 2:
if m.count(min(m)) == 1:
for k in d:
if d[k][2] == min(m):
print(k)
break
break
else:
del_list = []
for k in d:
if d[k][2] != min(m):
p.remove(d[k][0])
e.remove(d[k][1])
m.remove(d[k][2])
del_list.append(k)
for k in del_list:
del d[k]
# Ordem lexicográfica é a mesma coisa que afabética nesse caso
keys = sorted(d.keys())
print(keys[0])
break
check_state += 1
|
"""
Definition of TreeNode:
class TreeNode:
def __init__(self, val):
self.val = val
self.left, self.right = None, None
"""
class Solution:
"""
@param: root: The root of the binary search tree.
@param: A: A TreeNode in a Binary.
@param: B: A TreeNode in a Binary.
@return: Return the least common ancestor(LCA) of the two nodes.
"""
def lowestCommonAncestor(self, root, A, B):
if root is None or root is A or root is B:
return root
left = self.lowestCommonAncestor(root.left, A, B)
right = self.lowestCommonAncestor(root.right, A, B)
if left and right:
return root
if left:
return left
return right
|
def main():
with open('inputs/01.in') as f:
data = [int(line) for line in f]
print(sum(data))
result = 0
seen = {0}
while True:
for item in data:
result += item
if result in seen:
print(result)
return
seen.add(result)
if __name__ == '__main__':
main()
|
strikeout = 'K'
className = "This is CS50."
age = 30
anotherAge = 63
pi = 3.14
morePi = 3.1415962
fun = True
print("strikeout: {}".format(strikeout))
print("className: {}".format(className))
print("age: {}".format(age))
print("anotherAge: {}".format(anotherAge))
print("pi: {}".format(pi))
print("morePi: {}".format(morePi))
print("fun: {}".format(fun))
|
def is_all_strings(iterable):
return all(isinstance(string, str) for string in iterable)
print(['a', 'b', 'c'])
print([2, 'a', 'b', 'c'])
|
# -*- coding: utf-8 -*-
class InsufficientInputError(Exception):
"""入力が不足していることを知らせる例外クラス"""
pass
class InvalidInputError(Exception):
"""入力が相応しくないことを知らせる例外クラス"""
pass
|
"""
You are playing the following Bulls and Cows game with your friend: You
write down a number and ask your friend to guess what the number is. Each
time your friend makes a guess, you provide a hint that indicates how many
digits in said guess match your secret number exactly in both digit and
position (called "bulls") and how many digits match the secret number but
locate in the wrong position (called "cows"). Your friend will use
successive guesses and hints to eventually derive the secret number.
Write a function to return a hint according to the secret number and
friend's guess, use A to indicate the bulls and B to indicate the cows.
Please note that both secret number and friend's guess may contain
duplicate digits.
Example:
Input: secret = "1807", guess = "7810"
Output: "1A3B"
Explanation: 1 bull and 3 cows. The bull is 8, the cows are 0, 1 and 7.
Example:
Input: secret = "1123", guess = "0111"
Output: "1A1B"
Explanation: The 1st 1 in friend's guess is a bull, the 2nd or 3rd 1 is
a cow.
Note: You may assume that the secret number and your friend's guess only
contain digits, and their lengths are always equal.
"""
#Difficulty: Easy
#152 / 152 test cases passed.
#Runtime: 84 ms
#Memory Usage: 13.7 MB
#Runtime: 84 ms, faster than 12.39% of Python3 online submissions for Bulls and Cows.
#Memory Usage: 13.7 MB, less than 83.60% of Python3 online submissions for Bulls and Cows.
class Solution:
def getHint(self, secret: str, guess: str) -> str:
secret = list(secret)
guess = list(guess)
i, bulls, cows = 0, 0, 0
l = len(secret)
while i < l:
if guess[i] == secret[i]:
bulls += 1
guess.pop(i)
secret.pop(i)
l = len(secret)
continue
i += 1
i = 0
while i < l:
if guess[i] in secret:
cows += 1
secret.remove(guess[i])
guess.pop(i)
l = len(secret)
continue
i += 1
return str(bulls) + 'A' + str(cows) + 'B'
|
def concat_multiples(num, multiples):
return int("".join([str(num*multiple) for multiple in range(1,multiples+1)]))
def is_pandigital(num):
return sorted([int(digit) for digit in str(num)]) == list(range(1,10))
def solve_p038():
# retrieve only 9 digit concatinations of multiples where n = (1,2,..n)
n6 = [concat_multiples(num, 6) for num in [3]]
n5 = [concat_multiples(num, 5) for num in range(5,10)]
n4 = [concat_multiples(num, 4) for num in range(25,33)]
n3 = [concat_multiples(num, 3) for num in range(100,333)]
n2 = [concat_multiples(num, 2) for num in range(5000,9999)]
all_concats = set(n2 + n3 + n4 + n5 + n6)
return max([num for num in all_concats if is_pandigital(num)])
if __name__ == '__main__':
print((solve_p038()))
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'targets': [
{
# GN version: //ui/aura_extra
'target_name': 'aura_extra',
'type': '<(component)',
'dependencies': [
'../../base/base.gyp:base',
'../../skia/skia.gyp:skia',
'../aura/aura.gyp:aura',
'../base/ui_base.gyp:ui_base',
'../events/events.gyp:events',
'../gfx/gfx.gyp:gfx',
'../gfx/gfx.gyp:gfx_geometry',
],
'defines': [
'AURA_EXTRA_IMPLEMENTATION',
],
'sources': [
'aura_extra_export.h',
'image_window_delegate.cc',
'image_window_delegate.h',
],
},
],
}
|
class Env:
__table = None
_prev = None
def __init__(self, n):
self.__table = {}
self._prev = n
def put(self, w, i):
self.__table[w] = i
def get(self, w):
e = self
while e is not None:
found = e.__table.get(w)
if found is not None:
return found
e = e._prev
return None
|
def foo(*args, **kwargs):
pass
fo<caret>o(1, 2, 3, x = 4)
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Copyright (C) 2017, Kai Raphahn <kai.raphahn@laburec.de>
#
__all__ = [
"logging",
"lang",
"data",
"utils"
]
#: package name
__name__ = "bbutils"
#: package author
__author__ = "Kai Raphahn"
#: email of package maintainer
__email__ = "kai.raphahn@laburec.de"
#: copyright year
__year__ = 2020
#: package copyright
__copyright__ = "Copyright (C) {0:d}, {1:s} <{2:s}>".format(__year__, __author__, __email__)
#: package description
__description__ = "Small collection of stuff for all my other python projects (including logging)."
#: package license
__license__ = "Apache License, Version 2.0"
#: package credits
__credits__ = [__author__]
#: version milestone
__milestone__ = 0
#: version major
__major__ = 4
#: version minor
__minor__ = 0
#: version patch
__patch__ = 6
#: package version
__version__ = "{0:d}.{1:d}.{2:d}.{3:d}".format(__milestone__, __major__, __minor__, __patch__)
#: package maintainer
__maintainer__ = __author__
|
class physics:#universal physics(excluding projectiles because i hate them)
def __init__(self,world,gravity = 2):
self.gravity = gravity
self.world = world
def isAtScreenBottom(self,obj):#unused
if obj.y + obj.size[1] <= screenSize[1]:
return True
else:
return False
def colliding(self,xy1, size1, xy2, size2):#returns true if two rectangles are touching
if xy1[0] + size1[0] > xy2[0] and xy1[0] < xy2[0] + size2[0] and xy1[1] + size1[1] > xy2[1] and xy1[1] < xy2[1] + size2[1]:
return True
def touchingTile(self,xy,z=1,filtered = True):
try:
if self.world.tiles[xy[0]//32][xy[1]//32][z] != None:
if self.world.tiles[xy[0]//32][xy[1]//32][z].tileType != 24:
if filtered == True:
return self.world.tiles[xy[0]//32][xy[1]//32][z].physical
else:
return True
return False
except:
return False
def applyPhys(self,obj):#applies physics to an object
below = False
beside = False
above = False
if self.touchingTile((obj.x + (obj.legLeft.size[0] * 2),(obj.y + obj.size[1]) + obj.yVel)) == True or self.touchingTile((obj.x + obj.armLeft.size[0],(obj.y + obj.size[1]) + obj.yVel)) == True:
obj.yVel = 0
below = True
if self.touchingTile(((obj.x + obj.size[0] + obj.xVel),obj.y)) == True or self.touchingTile(((obj.x + obj.size[0] + obj.xVel),obj.y + obj.legLeft.size[1])) == True or self.touchingTile(((obj.x + obj.size[0] + obj.xVel),obj.y + obj.size[1] - 2)) == True :
obj.xVel = 0
beside = True
if self.touchingTile(((obj.x + obj.armLeft.size[0]) + obj.xVel,obj.y)) == True or self.touchingTile(((obj.x + obj.armLeft.size[0]) + obj.xVel,obj.y + obj.legLeft.size[1])) == True or self.touchingTile(((obj.x + obj.armLeft.size[0]) + obj.xVel,obj.y + obj.size[1] - 2)) == True :
obj.xVel = 0
beside = True
if self.touchingTile((obj.x + (obj.legLeft.size[0] * 2), obj.y + obj.yVel)) == True or self.touchingTile((obj.x + obj.armLeft.size[0], obj.y + obj.yVel)) == True:
obj.yVel = 0
above = True
if below == False:
obj.addSpeed(0,self.gravity)
else:
if self.touchingTile((obj.x + (obj.legLeft.size[0] * 2), obj.y + obj.size[1] - 1)) == True or self.touchingTile((obj.x + obj.armLeft.size[0],obj.y + obj.size[1] - 1)) == True:
obj.changeXY(0,-1)
if beside == True:
obj.faceTile = True
else:
obj.faceTile = False
if obj.xVel != 0:
obj.changeXY(obj.xVel)
obj.walkAnim()
if obj.xVel > 0:
obj.xVel -= 1
if obj.faceRight == False:
obj.flip()
else:
obj.xVel += 1
if obj.faceRight == True:
obj.flip()
if obj.yVel != 0:
obj.changeXY(0,obj.yVel)
objType = str(type(obj))#find the object type
objType = objType.split("'")
objType = objType[1]
objType = objType.split('.')[1]
if objType == 'character':
if obj.isJump == True:
if below == True:#moving down
obj.isJump = False#no longer jumping
obj.y -= 1#lower the y coordinate to prevent some bad stuff probably
obj.limbReset()#reset limbs
obj.attacking = False#no attack
else:
obj.fallAnim()#fall
|
# -*- coding: utf-8 -*-
# Copyright 2021 Cohesity Inc.
class EnvironmentListProtectionSourcesEnum(object):
"""Implementation of the 'environment_ListProtectionSources' enum.
TODO: type enum description here.
Attributes:
K_VMWARE: TODO: type description here.
KSQL: TODO: type description here.
KVIEW: TODO: type description here.
KPUPPETEER: TODO: type description here.
KPHYSICAL: TODO: type description here.
KPURE: TODO: type description here.
KNETAPP: TODO: type description here.
KGENERICNAS: TODO: type description here.
K_HYPERV: TODO: type description here.
KACROPOLIS: TODO: type description here.
KAZURE: TODO: type description here.
KKUBERNETES: TODO: type description here.
KCASSANDRA: TODO: type description here.
KMONGODB: TODO: type description here.
KCOUCHBASE: TODO: type description here.
KHDFS: TODO: type description here.
KHIVE: TODO: type description here.
KHBASE: TODO: type description here.
KUDA: TODO: type description here.
"""
K_VMWARE = 'kVMware'
KSQL = 'kSQL'
KVIEW = 'kView'
KPUPPETEER = 'kPuppeteer'
KPHYSICAL = 'kPhysical'
KPURE = 'kPure'
KNETAPP = 'kNetapp'
KGENERICNAS = 'kGenericNas'
K_HYPERV = 'kHyperV'
KACROPOLIS = 'kAcropolis'
KAZURE = 'kAzure'
KKUBERNETES = 'kKubernetes'
KCASSANDRA = 'kCassandra'
KMONGODB = 'kMongoDB'
KCOUCHBASE = 'kCouchbase'
KHDFS = 'kHdfs'
KHIVE = 'kHive'
KHBASE = 'kHBase'
KUDA = 'kUDA'
|
class DoublyNode:
def __init__(self, data):
self.data = data
self.leftlink = None
self.rightlink = None
def __str__(self):
return '| {0} |'.format(self.data)
def __repr__(self):
return "Node('{0}')".format(self.data)
def getdata(self):
return self.data
class DoublyLinkedList:
def __init__(self, list_of_values):
if not list_of_values:
raise IndexError
self.first_node = self.last_node = DoublyNode(list_of_values[0])
for val in list_of_values[1:]:
self.insert_to_the_bottom(val)
def __str__(self):
node, i = self.first_node, 0
repres = list()
repres.append('None <-> ')
while node:
repres.append(str(node))
repres.append(' <-> ')
node, i = node.rightlink, i+1
repres.append('None')
return ''.join(repres)
def __repr__(self):
node, i = self.first_node, 0
values = list()
while node:
values.append(node.data)
node, i = node.rightlink, i+1
return "DoublyLinkedList({0})".format(values)
def get_node_by_index_from_top(self, index):
node, i = self.first_node, 0
while i < index:
node, i = node.rightlink, i+1
return node
def get_node_by_index_from_bottom(self, index):
node, i = self.last_node, 0
while i < index:
node, i = node.leftlink, i+1
return node
def get_node_by_value_from_top(self, value):
node = self.first_node
while node:
if node.data == value:
return node
node = node.rightlink
return None
def get_node_by_value_from_bottom(self, value):
node = self.last_node
while node:
if node.getdata() == value:
return node
node = node.leftlink
return None
def get_index_by_value_from_top(self, value):
node, i = self.first_node, 0
while node:
if node.data == value:
return i
node, i = node.rightlink, i+1
return None
def get_index_by_value_from_bottom(self, value):
node, i = self.last_node, 0
while node:
if node.data == value:
return i
node, i = node.leftlink, i+1
return None
def insert_to_the_top(self, value):
new_node = DoublyNode(value)
new_node.rightlink = self.first_node
self.first_node.leftlink = new_node
self.first_node = new_node
def insert_to_the_bottom(self, value):
new_node = DoublyNode(value)
new_node.leftlink = self.last_node
self.last_node.rightlink = new_node
self.last_node = new_node
def insert_at_index(self, index, value):
new_node = DoublyNode(value)
if index == 0:
self.insert_to_the_top(value)
return index
node, i = self.first_node, 0
while node:
if index == i+1 and node == self.last_node:
self.insert_to_the_bottom(value)
break
if index == i:
new_node.leftlink = node.leftlink
new_node.rightlink = node
node.leftlink.rightlink = new_node
node.leftlink = new_node
return index
node, i = node.rightlink, i+1
return None
def delete_from_the_top(self):
if self.last_node == self.first_node:
raise IndexError
self.first_node.rightlink.leftlink = None
self.first_node = self.first_node.rightlink
def delete_from_the_bottom(self):
if self.last_node == self.first_node:
raise IndexError
self.last_node.leftlink.rightlink = None
self.last_node = self.last_node.leftlink
def delete_at_index(self, index):
if index == 0:
self.delete_from_the_top()
node, i = self.first_node.rightlink, 1
while i <= index:
if not node:
raise IndexError
if i == index:
if node == self.last_node:
self.delete_from_the_bottom()
break
node.leftlink.rightlink, node.rightlink.leftlink = node.rightlink, node.leftlink
node, i = node.rightlink, i+1
return None
|
class Solution:
def twoSum(self, nums: List[int], target: int) -> List[int]:
"""Hash table.
Running time: O(n) where n == len(nums).
"""
d = {nums[0]: 0}
for i in range(1, len(nums)):
if target - nums[i] in d:
return [i, d[target - nums[i]]]
d[nums[i]] = i
|
def distinct_values_bt(bin_tree):
"""Find distinct values in a binary tree."""
distinct = {}
result = []
def _walk(node=None):
if node is None:
return
if node.left is not None:
_walk(node.left)
if distinct.get(node.val):
distinct[node.val] = distinct[node.val] + 1
else:
distinct[node.val] = 1
if node.right is not None:
_walk(node.right)
_walk(bin_tree.root)
# for key in list(distinct):
# if distinct[key] != 1:
# del distinct[key]
# return list(distinct.keys())
# lesser of 2 evils:
for key in distinct:
if distinct[key] == 1:
result.append(key)
return result
# or cut 5 lines of code into 1 using list comprehension:
# return [key for key, val in distinct.items() if val == 1]
# Big O:
# time: O(N)
# space: O(2N) = O(N)
|
{
'variables': {
'SMRF_LIB_DIR': '/usr/local/lib',
'SMRF_INCLUDE_DIR': '/usr/local/include'
},
'targets': [
{
'target_name': 'smrf-native-cpp',
'sources': [ 'src/smrf.cpp' ],
'cflags_cc': [ '-std=c++14' ],
'cflags!': [ '-fno-exceptions'],
'cflags_cc!': [ '-fno-exceptions'],
'include_dirs': [
"<!(node -e \"require('nan')\")",
'<@(SMRF_INCLUDE_DIR)'
],
'libraries': ['-Wl,-rpath,<@(SMRF_LIB_DIR) -L<@(SMRF_LIB_DIR) -lsmrf'],
'conditions': [
['OS=="mac"',
{
'xcode_settings': {
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
'OTHER_CPLUSPLUSFLAGS': ['-std=c++14']
},
}
]
],
}
]
}
|
# Definition for a undirected graph node
# class UndirectedGraphNode:
# def __init__(self, x):
# self.label = x
# self.neighbors = []
class Solution:
# @param node, a undirected graph node
# @return a undirected graph node
def cloneGraph(self, node):
root = UndirectedGraphNode(node.label)
node.clone = root
stack = [(root, x) for x in node.neighbors]
while stack:
connectee, node = stack.pop()
if hasattr(node, 'clone'):
connectee.neighbors.append(node.clone)
else:
node.clone = UndirectedGraphNode(node.label)
connectee.neighbors.append(node.clone)
stack.extend([(node.clone, x) for x in node.neighbors])
return root
|
class ApplicationId(object):
"""
Contains information used to uniquely identify a manifest-based application. This class cannot be inherited.
ApplicationId(publicKeyToken: Array[Byte],name: str,version: Version,processorArchitecture: str,culture: str)
"""
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return ApplicationId()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Copy(self):
"""
Copy(self: ApplicationId) -> ApplicationId
Creates and returns an identical copy of the current application identity.
Returns: An System.ApplicationId object that represents an exact copy of the original.
"""
pass
def Equals(self,o):
"""
Equals(self: ApplicationId,o: object) -> bool
Determines whether the specified System.ApplicationId object is equivalent to the current System.ApplicationId.
o: The System.ApplicationId object to compare to the current System.ApplicationId.
Returns: true if the specified System.ApplicationId object is equivalent to the current System.ApplicationId; otherwise,false.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: ApplicationId) -> int
Gets the hash code for the current application identity.
Returns: The hash code for the current application identity.
"""
pass
def ToString(self):
"""
ToString(self: ApplicationId) -> str
Creates and returns a string representation of the application identity.
Returns: A string representation of the application identity.
"""
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
@staticmethod
def __new__(self,publicKeyToken,name,version,processorArchitecture,culture):
""" __new__(cls: type,publicKeyToken: Array[Byte],name: str,version: Version,processorArchitecture: str,culture: str) """
pass
def __ne__(self,*args):
pass
Culture=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a string representing the culture information for the application.
Get: Culture(self: ApplicationId) -> str
"""
Name=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the name of the application.
Get: Name(self: ApplicationId) -> str
"""
ProcessorArchitecture=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the target processor architecture for the application.
Get: ProcessorArchitecture(self: ApplicationId) -> str
"""
PublicKeyToken=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the public key token for the application.
Get: PublicKeyToken(self: ApplicationId) -> Array[Byte]
"""
Version=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the version of the application.
Get: Version(self: ApplicationId) -> Version
"""
|
#!/usr/bin/env python
# encoding: utf-8
# This file is made available under Elastic License 2.0
# This file is based on code available under the Apache license here:
# https://github.com/apache/incubator-doris/blob/master/gensrc/script/doris_builtins_functions.py
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# This is a list of all the functions that are not auto-generated.
# It contains all the meta data that describes the function.
# The format is:
# [sql aliases], <return_type>, [<args>], <backend symbol>,
# With an optional
# <prepare symbol>, <close symbol>
#
# 'sql aliases' are the function names that can be used from sql. There must be at least
# one per function.
#
# The symbol can be empty for functions that are not yet implemented or are special-cased
# in Expr::CreateExpr() (i.e., functions that are implemented via a custom Expr class
# rather than a single function).
visible_functions = [
# Bit and Byte functions
# For functions corresponding to builtin operators, we can reuse the implementations
[['bitand'], 'TINYINT', ['TINYINT', 'TINYINT'],
'_ZN9starrocks9Operators32bitand_tiny_int_val_tiny_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_10TinyIntValES6_'],
[['bitand'], 'SMALLINT', ['SMALLINT', 'SMALLINT'],
'_ZN9starrocks9Operators34bitand_small_int_val_small_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_11SmallIntValES6_'],
[['bitand'], 'INT', ['INT', 'INT'],
'_ZN9starrocks9Operators22bitand_int_val_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_6IntValES6_'],
[['bitand'], 'BIGINT', ['BIGINT', 'BIGINT'],
'_ZN9starrocks9Operators30bitand_big_int_val_big_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_9BigIntValES6_'],
[['bitand'], 'LARGEINT', ['LARGEINT', 'LARGEINT'],
'_ZN9starrocks9Operators34bitand_large_int_val_large_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_11LargeIntValES6_'],
[['bitor'], 'TINYINT', ['TINYINT', 'TINYINT'],
'_ZN9starrocks9Operators31bitor_tiny_int_val_tiny_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_10TinyIntValES6_'],
[['bitor'], 'SMALLINT', ['SMALLINT', 'SMALLINT'],
'_ZN9starrocks9Operators33bitor_small_int_val_small_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_11SmallIntValES6_'],
[['bitor'], 'INT', ['INT', 'INT'],
'_ZN9starrocks9Operators21bitor_int_val_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_6IntValES6_'],
[['bitor'], 'BIGINT', ['BIGINT', 'BIGINT'],
'_ZN9starrocks9Operators29bitor_big_int_val_big_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_9BigIntValES6_'],
[['bitor'], 'LARGEINT', ['LARGEINT', 'LARGEINT'],
'_ZN9starrocks9Operators33bitor_large_int_val_large_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_11LargeIntValES6_'],
[['bitxor'], 'TINYINT', ['TINYINT', 'TINYINT'],
'_ZN9starrocks9Operators32bitxor_tiny_int_val_tiny_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_10TinyIntValES6_'],
[['bitxor'], 'SMALLINT', ['SMALLINT', 'SMALLINT'],
'_ZN9starrocks9Operators34bitxor_small_int_val_small_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_11SmallIntValES6_'],
[['bitxor'], 'INT', ['INT', 'INT'],
'_ZN9starrocks9Operators22bitxor_int_val_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_6IntValES6_'],
[['bitxor'], 'BIGINT', ['BIGINT', 'BIGINT'],
'_ZN9starrocks9Operators30bitxor_big_int_val_big_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_9BigIntValES6_'],
[['bitxor'], 'LARGEINT', ['LARGEINT', 'LARGEINT'],
'_ZN9starrocks9Operators34bitxor_large_int_val_large_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_11LargeIntValES6_'],
[['bitnot'], 'TINYINT', ['TINYINT'],
'_ZN9starrocks9Operators19bitnot_tiny_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_10TinyIntValE'],
[['bitnot'], 'SMALLINT', ['SMALLINT'],
'_ZN9starrocks9Operators20bitnot_small_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_11SmallIntValE'],
[['bitnot'], 'INT', ['INT'],
'_ZN9starrocks9Operators14bitnot_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_6IntValE'],
[['bitnot'], 'BIGINT', ['BIGINT'],
'_ZN9starrocks9Operators18bitnot_big_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_9BigIntValE'],
[['bitnot'], 'LARGEINT', ['LARGEINT'],
'_ZN9starrocks9Operators20bitnot_large_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_11LargeIntValE'],
# Timestamp functions
[['unix_timestamp'], 'INT', [],
'_ZN9starrocks18TimestampFunctions7to_unixEPN13starrocks_udf15FunctionContextE'],
[['unix_timestamp'], 'INT', ['DATETIME'],
'_ZN9starrocks18TimestampFunctions7to_unixEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValE'],
[['unix_timestamp'], 'INT', ['DATE'],
'_ZN9starrocks18TimestampFunctions7to_unixEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValE'],
[['unix_timestamp'], 'INT', ['VARCHAR', 'VARCHAR'],
'_ZN9starrocks18TimestampFunctions7to_unixEPN13starrocks_udf15FunctionContextERKNS1_9StringValES6_'],
[['from_unixtime'], 'VARCHAR', ['INT'],
'_ZN9starrocks18TimestampFunctions9from_unixEPN13starrocks_udf15FunctionContextERKNS1_6IntValE'],
[['from_unixtime'], 'VARCHAR', ['INT', 'VARCHAR'],
'_ZN9starrocks18TimestampFunctions9from_unixEPN13starrocks_udf'
'15FunctionContextERKNS1_6IntValERKNS1_9StringValE',
'_ZN9starrocks18TimestampFunctions14format_prepareEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE',
'_ZN9starrocks18TimestampFunctions12format_closeEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE'],
[['now', 'current_timestamp', 'localtime', 'localtimestamp'], 'DATETIME', [],
'_ZN9starrocks18TimestampFunctions3nowEPN13starrocks_udf15FunctionContextE'],
[['curtime', 'current_time'], 'TIME', [],
'_ZN9starrocks18TimestampFunctions7curtimeEPN13starrocks_udf15FunctionContextE'],
[['curdate', 'current_date'], 'DATE', [],
'_ZN9starrocks18TimestampFunctions7curdateEPN13starrocks_udf15FunctionContextE'],
[['utc_timestamp'], 'DATETIME', [],
'_ZN9starrocks18TimestampFunctions13utc_timestampEPN13starrocks_udf15FunctionContextE'],
[['timestamp'], 'DATETIME', ['DATETIME'],
'_ZN9starrocks18TimestampFunctions9timestampEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValE'],
[['from_days'], 'DATE', ['INT'],
'_ZN9starrocks18TimestampFunctions9from_daysEPN13starrocks_udf15FunctionContextERKNS1_6IntValE'],
[['to_days'], 'INT', ['DATE'],
'_ZN9starrocks18TimestampFunctions7to_daysEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValE'],
[['year'], 'INT', ['DATETIME'],
'_ZN9starrocks18TimestampFunctions4yearEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValE'],
[['month'], 'INT', ['DATETIME'],
'_ZN9starrocks18TimestampFunctions5monthEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValE'],
[['quarter'], 'INT', ['DATETIME'],
'_ZN9starrocks18TimestampFunctions7quarterEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValE'],
[['dayofweek'], 'INT', ['DATETIME'],
'_ZN9starrocks18TimestampFunctions11day_of_weekEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValE'],
[['day', 'dayofmonth'], 'INT', ['DATETIME'],
'_ZN9starrocks18TimestampFunctions12day_of_monthEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValE'],
[['dayofyear'], 'INT', ['DATETIME'],
'_ZN9starrocks18TimestampFunctions11day_of_yearEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValE'],
[['weekofyear'], 'INT', ['DATETIME'],
'_ZN9starrocks18TimestampFunctions12week_of_yearEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValE'],
[['hour'], 'INT', ['DATETIME'],
'_ZN9starrocks18TimestampFunctions4hourEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValE'],
[['minute'], 'INT', ['DATETIME'],
'_ZN9starrocks18TimestampFunctions6minuteEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValE'],
[['second'], 'INT', ['DATETIME'],
'_ZN9starrocks18TimestampFunctions6secondEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValE'],
[['years_add'], 'DATETIME', ['DATETIME', 'INT'],
'_ZN9starrocks18TimestampFunctions9years_addEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValERKNS1_6IntValE'],
[['years_sub'], 'DATETIME', ['DATETIME', 'INT'],
'_ZN9starrocks18TimestampFunctions9years_subEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValERKNS1_6IntValE'],
[['months_add', 'add_months'], 'DATETIME', ['DATETIME', 'INT'],
'_ZN9starrocks18TimestampFunctions10months_addEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValERKNS1_6IntValE'],
[['months_sub'], 'DATETIME', ['DATETIME', 'INT'],
'_ZN9starrocks18TimestampFunctions10months_subEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValERKNS1_6IntValE'],
[['weeks_add'], 'DATETIME', ['DATETIME', 'INT'],
'_ZN9starrocks18TimestampFunctions9weeks_addEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValERKNS1_6IntValE'],
[['weeks_sub'], 'DATETIME', ['DATETIME', 'INT'],
'_ZN9starrocks18TimestampFunctions9weeks_subEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValERKNS1_6IntValE'],
[['days_add', 'date_add', 'adddate'], 'DATETIME', ['DATETIME', 'INT'],
'_ZN9starrocks18TimestampFunctions8days_addEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValERKNS1_6IntValE'],
[['days_sub', 'date_sub', 'subdate'], 'DATETIME', ['DATETIME', 'INT'],
'_ZN9starrocks18TimestampFunctions8days_subEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValERKNS1_6IntValE'],
[['hours_add'], 'DATETIME', ['DATETIME', 'INT'],
'_ZN9starrocks18TimestampFunctions9hours_addEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValERKNS1_6IntValE'],
[['hours_sub'], 'DATETIME', ['DATETIME', 'INT'],
'_ZN9starrocks18TimestampFunctions9hours_subEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValERKNS1_6IntValE'],
[['minutes_add'], 'DATETIME', ['DATETIME', 'INT'],
'_ZN9starrocks18TimestampFunctions11minutes_addEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValERKNS1_6IntValE'],
[['minutes_sub'], 'DATETIME', ['DATETIME', 'INT'],
'_ZN9starrocks18TimestampFunctions11minutes_subEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValERKNS1_6IntValE'],
[['seconds_add'], 'DATETIME', ['DATETIME', 'INT'],
'_ZN9starrocks18TimestampFunctions11seconds_addEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValERKNS1_6IntValE'],
[['seconds_sub'], 'DATETIME', ['DATETIME', 'INT'],
'_ZN9starrocks18TimestampFunctions11seconds_subEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValERKNS1_6IntValE'],
[['microseconds_add'], 'DATETIME', ['DATETIME', 'INT'],
'_ZN9starrocks18TimestampFunctions10micros_addEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValERKNS1_6IntValE'],
[['microseconds_sub'], 'DATETIME', ['DATETIME', 'INT'],
'_ZN9starrocks18TimestampFunctions10micros_subEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValERKNS1_6IntValE'],
[['datediff'], 'INT', ['DATETIME', 'DATETIME'],
'_ZN9starrocks18TimestampFunctions9date_diffEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValES6_'],
[['timediff'], 'TIME', ['DATETIME', 'DATETIME'],
'_ZN9starrocks18TimestampFunctions9time_diffEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValES6_'],
[['date_trunc'], 'DATETIME', ['VARCHAR', 'DATETIME'],
'_ZN9starrocks18TimestampFunctions14datetime_truncEPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValERKNS1_11DateTimeValE',
'_ZN9starrocks18TimestampFunctions22datetime_trunc_prepareEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE',
'_ZN9starrocks18TimestampFunctions20datetime_trunc_closeEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE'],
[['date_trunc'], 'DATE', ['VARCHAR', 'DATE'],
'_ZN9starrocks18TimestampFunctions10date_truncEPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValERKNS1_11DateTimeValE',
'_ZN9starrocks18TimestampFunctions18date_trunc_prepareEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE',
'_ZN9starrocks18TimestampFunctions16date_trunc_closeEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE'],
[['str_to_date'], 'DATETIME', ['VARCHAR', 'VARCHAR'],
'_ZN9starrocks18TimestampFunctions11str_to_dateEPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValES6_'],
[['date_format'], 'VARCHAR', ['DATETIME', 'VARCHAR'],
'_ZN9starrocks18TimestampFunctions11date_formatEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValERKNS1_9StringValE',
'_ZN9starrocks18TimestampFunctions14format_prepareEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE',
'_ZN9starrocks18TimestampFunctions12format_closeEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE'],
[['date_format'], 'VARCHAR', ['DATE', 'VARCHAR'],
'_ZN9starrocks18TimestampFunctions11date_formatEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValERKNS1_9StringValE',
'_ZN9starrocks18TimestampFunctions14format_prepareEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE',
'_ZN9starrocks18TimestampFunctions12format_closeEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE'],
[['date', 'to_date'], 'DATE', ['DATETIME'],
'_ZN9starrocks18TimestampFunctions7to_dateEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValE'],
[['dayname'], 'VARCHAR', ['DATETIME'],
'_ZN9starrocks18TimestampFunctions8day_nameEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValE'],
[['monthname'], 'VARCHAR', ['DATETIME'],
'_ZN9starrocks18TimestampFunctions10month_nameEPN13starrocks_udf'
'15FunctionContextERKNS1_11DateTimeValE'],
[['convert_tz'], 'DATETIME', ['DATETIME', 'VARCHAR', 'VARCHAR'],
'_ZN9starrocks18TimestampFunctions10convert_tzEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValERKNS1_9StringValES9_',
'_ZN9starrocks18TimestampFunctions18convert_tz_prepareEPN13starrocks_udf15FunctionContextENS2_18FunctionStateScopeE',
'_ZN9starrocks18TimestampFunctions16convert_tz_closeEPN13starrocks_udf15FunctionContextENS2_18FunctionStateScopeE'],
[['years_diff'], 'BIGINT', ['DATETIME', 'DATETIME'],
'_ZN9starrocks18TimestampFunctions10years_diffEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValES6_'],
[['months_diff'], 'BIGINT', ['DATETIME', 'DATETIME'],
'_ZN9starrocks18TimestampFunctions11months_diffEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValES6_'],
[['weeks_diff'], 'BIGINT', ['DATETIME', 'DATETIME'],
'_ZN9starrocks18TimestampFunctions10weeks_diffEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValES6_'],
[['days_diff'], 'BIGINT', ['DATETIME', 'DATETIME'],
'_ZN9starrocks18TimestampFunctions9days_diffEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValES6_'],
[['hours_diff'], 'BIGINT', ['DATETIME', 'DATETIME'],
'_ZN9starrocks18TimestampFunctions10hours_diffEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValES6_'],
[['minutes_diff'], 'BIGINT', ['DATETIME', 'DATETIME'],
'_ZN9starrocks18TimestampFunctions12minutes_diffEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValES6_'],
[['seconds_diff'], 'BIGINT', ['DATETIME', 'DATETIME'],
'_ZN9starrocks18TimestampFunctions12seconds_diffEPN13starrocks_udf15FunctionContextERKNS1_11DateTimeValES6_'],
# Math builtin functions
[['pi'], 'DOUBLE', [],
'_ZN9starrocks13MathFunctions2piEPN13starrocks_udf15FunctionContextE'],
[['e'], 'DOUBLE', [],
'_ZN9starrocks13MathFunctions1eEPN13starrocks_udf15FunctionContextE'],
[['abs'], 'DOUBLE', ['DOUBLE'],
'_ZN9starrocks13MathFunctions3absEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['abs'], 'FLOAT', ['FLOAT'],
'_ZN9starrocks13MathFunctions3absEPN13starrocks_udf15FunctionContextERKNS1_8FloatValE'],
[['abs'], 'LARGEINT', ['LARGEINT'],
'_ZN9starrocks13MathFunctions3absEPN13starrocks_udf15FunctionContextERKNS1_11LargeIntValE'],
[['abs'], 'LARGEINT', ['BIGINT'],
'_ZN9starrocks13MathFunctions3absEPN13starrocks_udf15FunctionContextERKNS1_9BigIntValE'],
[['abs'], 'INT', ['SMALLINT'],
'_ZN9starrocks13MathFunctions3absEPN13starrocks_udf15FunctionContextERKNS1_11SmallIntValE'],
[['abs'], 'BIGINT', ['INT'],
'_ZN9starrocks13MathFunctions3absEPN13starrocks_udf15FunctionContextERKNS1_6IntValE'],
[['abs'], 'SMALLINT', ['TINYINT'],
'_ZN9starrocks13MathFunctions3absEPN13starrocks_udf15FunctionContextERKNS1_10TinyIntValE'],
[['abs'], 'DECIMALV2', ['DECIMALV2'],
'_ZN9starrocks13MathFunctions3absEPN13starrocks_udf15FunctionContextERKNS1_12DecimalV2ValE'],
[['abs'], 'DECIMAL32', ['DECIMAL32'],
'_ZN9starrocks13MathFunctions3absEPN13starrocks_udf15FunctionContextERKNS1_6IntValE'],
[['abs'], 'DECIMAL64', ['DECIMAL64'],
'_ZN9starrocks13MathFunctions3absEPN13starrocks_udf15FunctionContextERKNS1_9BigIntValE'],
[['abs'], 'DECIMAL128', ['DECIMAL128'],
'_ZN9starrocks13MathFunctions3absEPN13starrocks_udf15FunctionContextERKNS1_11LargeIntValE'],
[['sign'], 'FLOAT', ['DOUBLE'],
'_ZN9starrocks13MathFunctions4signEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['sin'], 'DOUBLE', ['DOUBLE'],
'_ZN9starrocks13MathFunctions3sinEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['asin'], 'DOUBLE', ['DOUBLE'],
'_ZN9starrocks13MathFunctions4asinEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['cos'], 'DOUBLE', ['DOUBLE'],
'_ZN9starrocks13MathFunctions3cosEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['acos'], 'DOUBLE', ['DOUBLE'],
'_ZN9starrocks13MathFunctions4acosEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['tan'], 'DOUBLE', ['DOUBLE'],
'_ZN9starrocks13MathFunctions3tanEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['atan'], 'DOUBLE', ['DOUBLE'],
'_ZN9starrocks13MathFunctions4atanEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['ceil', 'ceiling', 'dceil'], 'BIGINT', ['DOUBLE'],
'_ZN9starrocks13MathFunctions4ceilEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['floor', 'dfloor'], 'BIGINT', ['DOUBLE'],
'_ZN9starrocks13MathFunctions5floorEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['round', 'dround'], 'BIGINT', ['DOUBLE'],
'_ZN9starrocks13MathFunctions5roundEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['round', 'dround'], 'DOUBLE', ['DOUBLE', 'INT'],
'_ZN9starrocks13MathFunctions11round_up_toEPN13starrocks_udf'
'15FunctionContextERKNS1_9DoubleValERKNS1_6IntValE'],
[['truncate'], 'DOUBLE', ['DOUBLE', 'INT'],
'_ZN9starrocks13MathFunctions8truncateEPN13starrocks_udf'
'15FunctionContextERKNS1_9DoubleValERKNS1_6IntValE'],
[['ln', 'dlog1'], 'DOUBLE', ['DOUBLE'],
'_ZN9starrocks13MathFunctions2lnEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['log'], 'DOUBLE', ['DOUBLE', 'DOUBLE'],
'_ZN9starrocks13MathFunctions3logEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValES6_'],
[['log2'], 'DOUBLE', ['DOUBLE'],
'_ZN9starrocks13MathFunctions4log2EPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['log10', 'dlog10'], 'DOUBLE', ['DOUBLE'],
'_ZN9starrocks13MathFunctions5log10EPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['exp', 'dexp'], 'DOUBLE', ['DOUBLE'],
'_ZN9starrocks13MathFunctions3expEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['radians'], 'DOUBLE', ['DOUBLE'],
'_ZN9starrocks13MathFunctions7radiansEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['degrees'], 'DOUBLE', ['DOUBLE'],
'_ZN9starrocks13MathFunctions7degreesEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['sqrt', 'dsqrt'], 'DOUBLE', ['DOUBLE'],
'_ZN9starrocks13MathFunctions4sqrtEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['pow', 'power', 'dpow', 'fpow'], 'DOUBLE', ['DOUBLE', 'DOUBLE'],
'_ZN9starrocks13MathFunctions3powEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValES6_'],
[['rand', 'random'], 'DOUBLE', [],
'_ZN9starrocks13MathFunctions4randEPN13starrocks_udf15FunctionContextE',
'_ZN9starrocks13MathFunctions12rand_prepareEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE'],
[['rand', 'random'], 'DOUBLE', ['BIGINT'],
'_ZN9starrocks13MathFunctions9rand_seedEPN13starrocks_udf15FunctionContextERKNS1_9BigIntValE',
'_ZN9starrocks13MathFunctions12rand_prepareEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE'],
[['bin'], 'VARCHAR', ['BIGINT'],
'_ZN9starrocks13MathFunctions3binEPN13starrocks_udf15FunctionContextERKNS1_9BigIntValE'],
[['hex'], 'VARCHAR', ['BIGINT'],
'_ZN9starrocks13MathFunctions7hex_intEPN13starrocks_udf15FunctionContextERKNS1_9BigIntValE'],
[['hex'], 'VARCHAR', ['VARCHAR'],
'_ZN9starrocks13MathFunctions10hex_stringEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['unhex'], 'VARCHAR', ['VARCHAR'],
'_ZN9starrocks13MathFunctions5unhexEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['conv'], 'VARCHAR', ['BIGINT', 'TINYINT', 'TINYINT'],
'_ZN9starrocks13MathFunctions8conv_intEPN13starrocks_udf'
'15FunctionContextERKNS1_9BigIntValERKNS1_10TinyIntValES9_'],
[['conv'], 'VARCHAR', ['VARCHAR', 'TINYINT', 'TINYINT'],
'_ZN9starrocks13MathFunctions11conv_stringEPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValERKNS1_10TinyIntValES9_'],
[['pmod'], 'BIGINT', ['BIGINT', 'BIGINT'],
'_ZN9starrocks13MathFunctions11pmod_bigintEPN13starrocks_udf'
'15FunctionContextERKNS1_9BigIntValES6_'],
[['pmod'], 'DOUBLE', ['DOUBLE', 'DOUBLE'],
'_ZN9starrocks13MathFunctions11pmod_doubleEPN13starrocks_udf'
'15FunctionContextERKNS1_9DoubleValES6_'],
[['mod'], 'TINYINT', ['TINYINT', 'TINYINT'],
'_ZN9starrocks9Operators29mod_tiny_int_val_tiny_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_10TinyIntValES6_'],
[['mod'], 'SMALLINT', ['SMALLINT', 'SMALLINT'],
'_ZN9starrocks9Operators31mod_small_int_val_small_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_11SmallIntValES6_'],
[['mod'], 'INT', ['INT', 'INT'],
'_ZN9starrocks9Operators19mod_int_val_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_6IntValES6_'],
[['mod'], 'BIGINT', ['BIGINT', 'BIGINT'],
'_ZN9starrocks9Operators27mod_big_int_val_big_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_9BigIntValES6_'],
[['mod'], 'LARGEINT', ['LARGEINT', 'LARGEINT'],
'_ZN9starrocks9Operators31mod_large_int_val_large_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_11LargeIntValES6_'],
[['mod'], 'DECIMALV2', ['DECIMALV2', 'DECIMALV2'],
'_ZN9starrocks18DecimalV2Operators31mod_decimalv2_val_decimalv2_valEPN13starrocks_udf'
'15FunctionContextERKNS1_12DecimalV2ValES6_'],
[['mod', 'fmod'], 'DOUBLE', ['DOUBLE', 'DOUBLE'],
'_ZN9starrocks13MathFunctions11fmod_doubleEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValES6_'],
[['mod', 'fmod'], 'FLOAT', ['FLOAT', 'FLOAT'],
'_ZN9starrocks13MathFunctions10fmod_floatEPN13starrocks_udf15FunctionContextERKNS1_8FloatValES6_'],
[['mod'], 'DECIMAL32', ['DECIMAL32', 'DECIMAL32'],
'_ZN9starrocks9Operators19mod_int_val_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_6IntValES6_'],
[['mod'], 'DECIMAL64', ['DECIMAL64', 'DECIMAL64'],
'_ZN9starrocks9Operators27mod_big_int_val_big_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_9BigIntValES6_'],
[['mod'], 'DECIMAL128', ['DECIMAL128', 'DECIMAL128'],
'_ZN9starrocks9Operators31mod_large_int_val_large_int_valEPN13starrocks_udf'
'15FunctionContextERKNS1_11LargeIntValES6_'],
[['positive'], 'BIGINT', ['BIGINT'],
'_ZN9starrocks13MathFunctions15positive_bigintEPN13starrocks_udf'
'15FunctionContextERKNS1_9BigIntValE'],
[['positive'], 'DOUBLE', ['DOUBLE'],
'_ZN9starrocks13MathFunctions15positive_doubleEPN13starrocks_udf'
'15FunctionContextERKNS1_9DoubleValE'],
[['positive'], 'DECIMALV2', ['DECIMALV2'],
'_ZN9starrocks13MathFunctions16positive_decimalEPN13starrocks_udf'
'15FunctionContextERKNS1_12DecimalV2ValE'],
[['positive'], 'DECIMAL32', ['DECIMAL32'],
'_ZN9starrocks13MathFunctions15positive_bigintEPN13starrocks_udf'
'15FunctionContextERKNS1_9BigIntValE'],
[['positive'], 'DECIMAL64', ['DECIMAL64'],
'_ZN9starrocks13MathFunctions15positive_bigintEPN13starrocks_udf'
'15FunctionContextERKNS1_9BigIntValE'],
[['positive'], 'DECIMAL128', ['DECIMAL128'],
'_ZN9starrocks13MathFunctions15positive_bigintEPN13starrocks_udf'
'15FunctionContextERKNS1_9BigIntValE'],
[['negative'], 'BIGINT', ['BIGINT'],
'_ZN9starrocks13MathFunctions15negative_bigintEPN13starrocks_udf'
'15FunctionContextERKNS1_9BigIntValE'],
[['negative'], 'DOUBLE', ['DOUBLE'],
'_ZN9starrocks13MathFunctions15negative_doubleEPN13starrocks_udf'
'15FunctionContextERKNS1_9DoubleValE'],
[['negative'], 'DECIMALV2', ['DECIMALV2'],
'_ZN9starrocks13MathFunctions16negative_decimalEPN13starrocks_udf'
'15FunctionContextERKNS1_12DecimalV2ValE'],
[['negative'], 'DECIMAL32', ['DECIMAL32'],
'_ZN9starrocks13MathFunctions15negative_bigintEPN13starrocks_udf'
'15FunctionContextERKNS1_9BigIntValE'],
[['negative'], 'DECIMAL64', ['DECIMAL64'],
'_ZN9starrocks13MathFunctions15negative_bigintEPN13starrocks_udf'
'15FunctionContextERKNS1_9BigIntValE'],
[['negative'], 'DECIMAL128', ['DECIMAL128'],
'_ZN9starrocks13MathFunctions15negative_bigintEPN13starrocks_udf'
'15FunctionContextERKNS1_9BigIntValE'],
[['least'], 'TINYINT', ['TINYINT', '...'],
'_ZN9starrocks13MathFunctions5leastEPN13starrocks_udf15FunctionContextEiPKNS1_10TinyIntValE'],
[['least'], 'SMALLINT', ['SMALLINT', '...'],
'_ZN9starrocks13MathFunctions5leastEPN13starrocks_udf15FunctionContextEiPKNS1_11SmallIntValE'],
[['least'], 'INT', ['INT', '...'],
'_ZN9starrocks13MathFunctions5leastEPN13starrocks_udf15FunctionContextEiPKNS1_6IntValE'],
[['least'], 'BIGINT', ['BIGINT', '...'],
'_ZN9starrocks13MathFunctions5leastEPN13starrocks_udf15FunctionContextEiPKNS1_9BigIntValE'],
[['least'], 'LARGEINT', ['LARGEINT', '...'],
'_ZN9starrocks13MathFunctions5leastEPN13starrocks_udf15FunctionContextEiPKNS1_11LargeIntValE'],
[['least'], 'DECIMAL32', ['DECIMAL32', '...'],
'_ZN9starrocks13MathFunctions5leastEPN13starrocks_udf15FunctionContextEiPKNS1_6IntValE'],
[['least'], 'DECIMAL64', ['DECIMAL64', '...'],
'_ZN9starrocks13MathFunctions5leastEPN13starrocks_udf15FunctionContextEiPKNS1_9BigIntValE'],
[['least'], 'DECIMAL128', ['DECIMAL128', '...'],
'_ZN9starrocks13MathFunctions5leastEPN13starrocks_udf15FunctionContextEiPKNS1_11LargeIntValE'],
[['least'], 'FLOAT', ['FLOAT', '...'],
'_ZN9starrocks13MathFunctions5leastEPN13starrocks_udf15FunctionContextEiPKNS1_8FloatValE'],
[['least'], 'DOUBLE', ['DOUBLE', '...'],
'_ZN9starrocks13MathFunctions5leastEPN13starrocks_udf15FunctionContextEiPKNS1_9DoubleValE'],
[['least'], 'DATETIME', ['DATETIME', '...'],
'_ZN9starrocks13MathFunctions5leastEPN13starrocks_udf15FunctionContextEiPKNS1_11DateTimeValE'],
[['least'], 'DECIMALV2', ['DECIMALV2', '...'],
'_ZN9starrocks13MathFunctions5leastEPN13starrocks_udf15FunctionContextEiPKNS1_12DecimalV2ValE'],
[['least'], 'VARCHAR', ['VARCHAR', '...'],
'_ZN9starrocks13MathFunctions5leastEPN13starrocks_udf15FunctionContextEiPKNS1_9StringValE'],
[['greatest'], 'TINYINT', ['TINYINT', '...'],
'_ZN9starrocks13MathFunctions8greatestEPN13starrocks_udf15FunctionContextEiPKNS1_10TinyIntValE'],
[['greatest'], 'SMALLINT', ['SMALLINT', '...'],
'_ZN9starrocks13MathFunctions8greatestEPN13starrocks_udf15FunctionContextEiPKNS1_11SmallIntValE'],
[['greatest'], 'INT', ['INT', '...'],
'_ZN9starrocks13MathFunctions8greatestEPN13starrocks_udf15FunctionContextEiPKNS1_6IntValE'],
[['greatest'], 'BIGINT', ['BIGINT', '...'],
'_ZN9starrocks13MathFunctions8greatestEPN13starrocks_udf15FunctionContextEiPKNS1_9BigIntValE'],
[['greatest'], 'LARGEINT', ['LARGEINT', '...'],
'_ZN9starrocks13MathFunctions8greatestEPN13starrocks_udf15FunctionContextEiPKNS1_11LargeIntValE'],
[['greatest'], 'DECIMAL32', ['DECIMAL32', '...'],
'_ZN9starrocks13MathFunctions8greatestEPN13starrocks_udf15FunctionContextEiPKNS1_6IntValE'],
[['greatest'], 'DECIMAL64', ['DECIMAL64', '...'],
'_ZN9starrocks13MathFunctions8greatestEPN13starrocks_udf15FunctionContextEiPKNS1_9BigIntValE'],
[['greatest'], 'DECIMAL128', ['DECIMAL128', '...'],
'_ZN9starrocks13MathFunctions8greatestEPN13starrocks_udf15FunctionContextEiPKNS1_11LargeIntValE'],
[['greatest'], 'FLOAT', ['FLOAT', '...'],
'_ZN9starrocks13MathFunctions8greatestEPN13starrocks_udf15FunctionContextEiPKNS1_8FloatValE'],
[['greatest'], 'DOUBLE', ['DOUBLE', '...'],
'_ZN9starrocks13MathFunctions8greatestEPN13starrocks_udf15FunctionContextEiPKNS1_9DoubleValE'],
[['greatest'], 'DECIMALV2', ['DECIMALV2', '...'],
'_ZN9starrocks13MathFunctions8greatestEPN13starrocks_udf15FunctionContextEiPKNS1_12DecimalV2ValE'],
[['greatest'], 'DATETIME', ['DATETIME', '...'],
'_ZN9starrocks13MathFunctions8greatestEPN13starrocks_udf15FunctionContextEiPKNS1_11DateTimeValE'],
[['greatest'], 'VARCHAR', ['VARCHAR', '...'],
'_ZN9starrocks13MathFunctions8greatestEPN13starrocks_udf15FunctionContextEiPKNS1_9StringValE'],
# Conditional Functions
# Some of these have empty symbols because the BE special-cases them based on the
# function name
[['if'], 'BOOLEAN', ['BOOLEAN', 'BOOLEAN', 'BOOLEAN'], ''],
[['if'], 'TINYINT', ['BOOLEAN', 'TINYINT', 'TINYINT'], ''],
[['if'], 'SMALLINT', ['BOOLEAN', 'SMALLINT', 'SMALLINT'], ''],
[['if'], 'INT', ['BOOLEAN', 'INT', 'INT'], ''],
[['if'], 'BIGINT', ['BOOLEAN', 'BIGINT', 'BIGINT'], ''],
[['if'], 'LARGEINT', ['BOOLEAN', 'LARGEINT', 'LARGEINT'], ''],
[['if'], 'FLOAT', ['BOOLEAN', 'FLOAT', 'FLOAT'], ''],
[['if'], 'DOUBLE', ['BOOLEAN', 'DOUBLE', 'DOUBLE'], ''],
[['if'], 'DATETIME', ['BOOLEAN', 'DATETIME', 'DATETIME'], ''],
[['if'], 'DATE', ['BOOLEAN', 'DATE', 'DATE'], ''],
[['if'], 'DECIMALV2', ['BOOLEAN', 'DECIMALV2', 'DECIMALV2'], ''],
# The priority of varchar should be lower than decimal in IS_SUPERTYPE_OF mode.
[['if'], 'VARCHAR', ['BOOLEAN', 'VARCHAR', 'VARCHAR'], ''],
[['if'], 'BITMAP', ['BOOLEAN', 'BITMAP', 'BITMAP'], ''],
[['if'], 'PERCENTILE', ['BOOLEAN', 'PERCENTILE', 'PERCENTILE'], ''],
[['if'], 'HLL', ['BOOLEAN', 'HLL', 'HLL'], ''],
[['if'], 'DECIMAL32', ['BOOLEAN', 'DECIMAL32', 'DECIMAL32'], ''],
[['if'], 'DECIMAL64', ['BOOLEAN', 'DECIMAL64', 'DECIMAL64'], ''],
[['if'], 'DECIMAL128', ['BOOLEAN', 'DECIMAL128', 'DECIMAL128'], ''],
[['nullif'], 'BOOLEAN', ['BOOLEAN', 'BOOLEAN'], ''],
[['nullif'], 'TINYINT', ['TINYINT', 'TINYINT'], ''],
[['nullif'], 'SMALLINT', ['SMALLINT', 'SMALLINT'], ''],
[['nullif'], 'INT', ['INT', 'INT'], ''],
[['nullif'], 'BIGINT', ['BIGINT', 'BIGINT'], ''],
[['nullif'], 'LARGEINT', ['LARGEINT', 'LARGEINT'], ''],
[['nullif'], 'FLOAT', ['FLOAT', 'FLOAT'], ''],
[['nullif'], 'DOUBLE', ['DOUBLE', 'DOUBLE'], ''],
[['nullif'], 'DATETIME', ['DATETIME', 'DATETIME'], ''],
[['nullif'], 'DATE', ['DATE', 'DATE'], ''],
[['nullif'], 'DECIMALV2', ['DECIMALV2', 'DECIMALV2'], ''],
# The priority of varchar should be lower than decimal in IS_SUPERTYPE_OF mode.
[['nullif'], 'VARCHAR', ['VARCHAR', 'VARCHAR'], ''],
[['nullif'], 'BITMAP', ['BITMAP', 'BITMAP'], ''],
[['nullif'], 'PERCENTILE', ['PERCENTILE', 'PERCENTILE'], ''],
[['nullif'], 'HLL', ['HLL', 'HLL'], ''],
[['nullif'], 'DECIMAL32', ['DECIMAL32', 'DECIMAL32'], ''],
[['nullif'], 'DECIMAL64', ['DECIMAL64', 'DECIMAL64'], ''],
[['nullif'], 'DECIMAL128', ['DECIMAL128', 'DECIMAL128'], ''],
[['ifnull'], 'BOOLEAN', ['BOOLEAN', 'BOOLEAN'], ''],
[['ifnull'], 'TINYINT', ['TINYINT', 'TINYINT'], ''],
[['ifnull'], 'SMALLINT', ['SMALLINT', 'SMALLINT'], ''],
[['ifnull'], 'INT', ['INT', 'INT'], ''],
[['ifnull'], 'BIGINT', ['BIGINT', 'BIGINT'], ''],
[['ifnull'], 'LARGEINT', ['LARGEINT', 'LARGEINT'], ''],
[['ifnull'], 'FLOAT', ['FLOAT', 'FLOAT'], ''],
[['ifnull'], 'DOUBLE', ['DOUBLE', 'DOUBLE'], ''],
[['ifnull'], 'DATE', ['DATE', 'DATE'], ''],
[['ifnull'], 'DATETIME', ['DATETIME', 'DATETIME'], ''],
[['ifnull'], 'DECIMALV2', ['DECIMALV2', 'DECIMALV2'], ''],
# The priority of varchar should be lower than decimal in IS_SUPERTYPE_OF mode.
[['ifnull'], 'VARCHAR', ['VARCHAR', 'VARCHAR'], ''],
[['ifnull'], 'BITMAP', ['BITMAP', 'BITMAP'], ''],
[['ifnull'], 'PERCENTILE', ['PERCENTILE', 'PERCENTILE'], ''],
[['ifnull'], 'HLL', ['HLL', 'HLL'], ''],
[['ifnull'], 'DECIMAL32', ['DECIMAL32', 'DECIMAL32'], ''],
[['ifnull'], 'DECIMAL64', ['DECIMAL64', 'DECIMAL64'], ''],
[['ifnull'], 'DECIMAL128', ['DECIMAL128', 'DECIMAL128'], ''],
[['coalesce'], 'BOOLEAN', ['BOOLEAN', '...'], ''],
[['coalesce'], 'TINYINT', ['TINYINT', '...'], ''],
[['coalesce'], 'SMALLINT', ['SMALLINT', '...'], ''],
[['coalesce'], 'INT', ['INT', '...'], ''],
[['coalesce'], 'BIGINT', ['BIGINT', '...'], ''],
[['coalesce'], 'LARGEINT', ['LARGEINT', '...'], ''],
[['coalesce'], 'FLOAT', ['FLOAT', '...'], ''],
[['coalesce'], 'DOUBLE', ['DOUBLE', '...'], ''],
[['coalesce'], 'DATETIME', ['DATETIME', '...'], ''],
[['coalesce'], 'DATE', ['DATE', '...'], ''],
[['coalesce'], 'DECIMALV2', ['DECIMALV2', '...'], ''],
# The priority of varchar should be lower than decimal in IS_SUPERTYPE_OF mode.
[['coalesce'], 'VARCHAR', ['VARCHAR', '...'], ''],
[['coalesce'], 'BITMAP', ['BITMAP', '...'], ''],
[['coalesce'], 'PERCENTILE', ['PERCENTILE', '...'], ''],
[['coalesce'], 'HLL', ['HLL', '...'], ''],
[['coalesce'], 'DECIMAL32', ['DECIMAL32', '...'], ''],
[['coalesce'], 'DECIMAL64', ['DECIMAL64', '...'], ''],
[['coalesce'], 'DECIMAL128', ['DECIMAL128', '...'], ''],
[['esquery'], 'BOOLEAN', ['VARCHAR', 'VARCHAR'],
'_ZN9starrocks11ESFunctions5matchEPN'
'13starrocks_udf15FunctionContextERKNS1_9StringValES6_'],
# String builtin functions
[['substr', 'substring'], 'VARCHAR', ['VARCHAR', 'INT'],
'_ZN9starrocks15StringFunctions9substringEPN'
'13starrocks_udf15FunctionContextERKNS1_9StringValERKNS1_6IntValE'],
[['substr', 'substring'], 'VARCHAR', ['VARCHAR', 'INT', 'INT'],
'_ZN9starrocks15StringFunctions9substringEPN'
'13starrocks_udf15FunctionContextERKNS1_9StringValERKNS1_6IntValES9_'],
[['strleft', 'left'], 'VARCHAR', ['VARCHAR', 'INT'],
'_ZN9starrocks15StringFunctions4leftEPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValERKNS1_6IntValE'],
[['strright', 'right'], 'VARCHAR', ['VARCHAR', 'INT'],
'_ZN9starrocks15StringFunctions5rightEPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValERKNS1_6IntValE'],
[['ends_with'], 'BOOLEAN', ['VARCHAR', 'VARCHAR'],
'_ZN9starrocks15StringFunctions9ends_withEPN13starrocks_udf15FunctionContextERKNS1_9StringValES6_'],
[['starts_with'], 'BOOLEAN', ['VARCHAR', 'VARCHAR'],
'_ZN9starrocks15StringFunctions11starts_withEPN13starrocks_udf15FunctionContextERKNS1_9StringValES6_'],
[['null_or_empty'], 'BOOLEAN', ['VARCHAR'],
'_ZN9starrocks15StringFunctions13null_or_emptyEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['space'], 'VARCHAR', ['INT'],
'_ZN9starrocks15StringFunctions5spaceEPN13starrocks_udf15FunctionContextERKNS1_6IntValE'],
[['repeat'], 'VARCHAR', ['VARCHAR', 'INT'],
'_ZN9starrocks15StringFunctions6repeatEPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValERKNS1_6IntValE'],
[['lpad'], 'VARCHAR', ['VARCHAR', 'INT', 'VARCHAR'],
'_ZN9starrocks15StringFunctions4lpadEPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValERKNS1_6IntValES6_'],
[['rpad'], 'VARCHAR', ['VARCHAR', 'INT', 'VARCHAR'],
'_ZN9starrocks15StringFunctions4rpadEPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValERKNS1_6IntValES6_'],
[['append_trailing_char_if_absent'], 'VARCHAR', ['VARCHAR', 'VARCHAR'],
'_ZN9starrocks15StringFunctions30append_trailing_char_if_absentEPN13starrocks_udf15FunctionContextERKNS1_9StringValES6_'],
[['length'], 'INT', ['VARCHAR'],
'_ZN9starrocks15StringFunctions6lengthEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['char_length', 'character_length'], 'INT', ['VARCHAR'],
'_ZN9starrocks15StringFunctions16char_utf8_lengthEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['lower', 'lcase'], 'VARCHAR', ['VARCHAR'],
'_ZN9starrocks15StringFunctions5lowerEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['upper', 'ucase'], 'VARCHAR', ['VARCHAR'],
'_ZN9starrocks15StringFunctions5upperEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['reverse'], 'VARCHAR', ['VARCHAR'],
'_ZN9starrocks15StringFunctions7reverseEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['trim'], 'VARCHAR', ['VARCHAR'],
'_ZN9starrocks15StringFunctions4trimEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['ltrim'], 'VARCHAR', ['VARCHAR'],
'_ZN9starrocks15StringFunctions5ltrimEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['rtrim'], 'VARCHAR', ['VARCHAR'],
'_ZN9starrocks15StringFunctions5rtrimEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['ascii'], 'INT', ['VARCHAR'],
'_ZN9starrocks15StringFunctions5asciiEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['instr'], 'INT', ['VARCHAR', 'VARCHAR'],
'_ZN9starrocks15StringFunctions5instrEPN13starrocks_udf15FunctionContextERKNS1_9StringValES6_'],
[['locate'], 'INT', ['VARCHAR', 'VARCHAR'],
'_ZN9starrocks15StringFunctions6locateEPN13starrocks_udf15FunctionContextERKNS1_9StringValES6_'],
[['locate'], 'INT', ['VARCHAR', 'VARCHAR', 'INT'],
'_ZN9starrocks15StringFunctions10locate_posEPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValES6_RKNS1_6IntValE'],
[['regexp_extract'], 'VARCHAR', ['VARCHAR', 'VARCHAR', 'BIGINT'],
'_ZN9starrocks15StringFunctions14regexp_extractEPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValES6_RKNS1_9BigIntValE',
'_ZN9starrocks15StringFunctions14regexp_prepareEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE',
'_ZN9starrocks15StringFunctions12regexp_closeEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE'],
[['regexp_replace'], 'VARCHAR', ['VARCHAR', 'VARCHAR', 'VARCHAR'],
'_ZN9starrocks15StringFunctions14regexp_replaceEPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValES6_S6_',
'_ZN9starrocks15StringFunctions14regexp_prepareEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE',
'_ZN9starrocks15StringFunctions12regexp_closeEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE'],
[['concat'], 'VARCHAR', ['VARCHAR', '...'],
'_ZN9starrocks15StringFunctions6concatEPN13starrocks_udf15FunctionContextEiPKNS1_9StringValE'],
[['concat_ws'], 'VARCHAR', ['VARCHAR', 'VARCHAR', '...'],
'_ZN9starrocks15StringFunctions9concat_wsEPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValEiPS5_'],
[['find_in_set'], 'INT', ['VARCHAR', 'VARCHAR'],
'_ZN9starrocks15StringFunctions11find_in_setEPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValES6_'],
[['parse_url'], 'VARCHAR', ['VARCHAR', 'VARCHAR'],
'_ZN9starrocks15StringFunctions9parse_urlEPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValES6_',
'_ZN9starrocks15StringFunctions17parse_url_prepareEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE',
'_ZN9starrocks15StringFunctions15parse_url_closeEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE'],
[['parse_url'], 'VARCHAR', ['VARCHAR', 'VARCHAR', 'VARCHAR'],
'_ZN9starrocks15StringFunctions13parse_url_keyEPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValES6_S6_',
'_ZN9starrocks15StringFunctions17parse_url_prepareEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE',
'_ZN9starrocks15StringFunctions15parse_url_closeEPN13starrocks_udf'
'15FunctionContextENS2_18FunctionStateScopeE'],
[['money_format'], 'VARCHAR', ['BIGINT'],
'_ZN9starrocks15StringFunctions12money_formatEPN13starrocks_udf15FunctionContextERKNS1_9BigIntValE'],
[['money_format'], 'VARCHAR', ['LARGEINT'],
'_ZN9starrocks15StringFunctions12money_formatEPN13starrocks_udf15FunctionContextERKNS1_11LargeIntValE'],
[['money_format'], 'VARCHAR', ['DOUBLE'],
'_ZN9starrocks15StringFunctions12money_formatEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['money_format'], 'VARCHAR', ['DECIMALV2'],
'_ZN9starrocks15StringFunctions12money_formatEPN13starrocks_udf15FunctionContextERKNS1_12DecimalV2ValE'],
[['split_part'], 'VARCHAR', ['VARCHAR', 'VARCHAR', 'INT'],
'_ZN9starrocks15StringFunctions10split_partEPN13starrocks_udf15FunctionContextERKNS1_9StringValES6_RKNS1_6IntValE'],
[['money_format'], 'VARCHAR', ['DECIMAL32'],
'_ZN9starrocks15StringFunctions12money_formatEPN13starrocks_udf15FunctionContextERKNS1_9BigIntValE'],
[['money_format'], 'VARCHAR', ['DECIMAL64'],
'_ZN9starrocks15StringFunctions12money_formatEPN13starrocks_udf15FunctionContextERKNS1_9BigIntValE'],
[['money_format'], 'VARCHAR', ['DECIMAL128'],
'_ZN9starrocks15StringFunctions12money_formatEPN13starrocks_udf15FunctionContextERKNS1_9BigIntValE'],
# Utility functions
[['sleep'], 'BOOLEAN', ['INT'],
'_ZN9starrocks16UtilityFunctions5sleepEPN13starrocks_udf15FunctionContextERKNS1_6IntValE'],
[['version'], 'VARCHAR', [],
'_ZN9starrocks16UtilityFunctions7versionEPN13starrocks_udf15FunctionContextE'],
[['current_version'], 'VARCHAR', [],
'_ZN9starrocks16UtilityFunctions15current_versionEPN13starrocks_udf15FunctionContextE'],
[['last_query_id'], 'VARCHAR', [],
'_ZN9starrocks16UtilityFunctions13last_query_idEPN13starrocks_udf15FunctionContextE'],
# Json functions
[['get_json_int'], 'INT', ['VARCHAR', 'VARCHAR'],
'_ZN9starrocks13JsonFunctions12get_json_intEPN13starrocks_udf15FunctionContextERKNS1_9StringValES6_',
'_ZN9starrocks13JsonFunctions17json_path_prepareEPN13starrocks_udf15FunctionContextENS2_18FunctionStateScopeE',
'_ZN9starrocks13JsonFunctions15json_path_closeEPN13starrocks_udf15FunctionContextENS2_18FunctionStateScopeE'],
[['get_json_double'], 'DOUBLE', ['VARCHAR', 'VARCHAR'],
'_ZN9starrocks13JsonFunctions15get_json_doubleEPN13starrocks_udf15FunctionContextERKNS1_9StringValES6_',
'_ZN9starrocks13JsonFunctions17json_path_prepareEPN13starrocks_udf15FunctionContextENS2_18FunctionStateScopeE',
'_ZN9starrocks13JsonFunctions15json_path_closeEPN13starrocks_udf15FunctionContextENS2_18FunctionStateScopeE'],
[['get_json_string'], 'VARCHAR', ['VARCHAR', 'VARCHAR'],
'_ZN9starrocks13JsonFunctions15get_json_stringEPN13starrocks_udf15FunctionContextERKNS1_9StringValES6_',
'_ZN9starrocks13JsonFunctions17json_path_prepareEPN13starrocks_udf15FunctionContextENS2_18FunctionStateScopeE',
'_ZN9starrocks13JsonFunctions15json_path_closeEPN13starrocks_udf15FunctionContextENS2_18FunctionStateScopeE'],
#hll function
[['hll_cardinality'], 'BIGINT', ['HLL'],
'_ZN9starrocks12HllFunctions15hll_cardinalityEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['hll_cardinality'], 'BIGINT', ['VARCHAR'],
'_ZN9starrocks12HllFunctions15hll_cardinalityEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['hll_hash'], 'HLL', ['VARCHAR'],
'_ZN9starrocks12HllFunctions8hll_hashEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['hll_empty'], 'HLL', [],
'_ZN9starrocks12HllFunctions9hll_emptyEPN13starrocks_udf15FunctionContextE'],
#bitmap function
[['to_bitmap'], 'BITMAP', ['VARCHAR'],
'_ZN9starrocks15BitmapFunctions9to_bitmapEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['bitmap_hash'], 'BITMAP', ['VARCHAR'],
'_ZN9starrocks15BitmapFunctions11bitmap_hashEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['bitmap_count'], 'BIGINT', ['BITMAP'],
'_ZN9starrocks15BitmapFunctions12bitmap_countEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['bitmap_empty'], 'BITMAP', [],
'_ZN9starrocks15BitmapFunctions12bitmap_emptyEPN13starrocks_udf15FunctionContextE'],
[['bitmap_or'], 'BITMAP', ['BITMAP','BITMAP'],
'_ZN9starrocks15BitmapFunctions9bitmap_orEPN13starrocks_udf15FunctionContextERKNS1_9StringValES6_'],
[['bitmap_and'], 'BITMAP', ['BITMAP','BITMAP'],
'_ZN9starrocks15BitmapFunctions10bitmap_andEPN13starrocks_udf15FunctionContextERKNS1_9StringValES6_'],
[['bitmap_to_string'], 'VARCHAR', ['BITMAP'],
'_ZN9starrocks15BitmapFunctions16bitmap_to_stringEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['bitmap_from_string'], 'BITMAP', ['VARCHAR'],
'_ZN9starrocks15BitmapFunctions18bitmap_from_stringEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['bitmap_contains'], 'BOOLEAN', ['BITMAP','BIGINT'],
'_ZN9starrocks15BitmapFunctions15bitmap_containsEPN13starrocks_udf15FunctionContextERKNS1_9StringValERKNS1_9BigIntValE'],
[['bitmap_has_any'], 'BOOLEAN', ['BITMAP','BITMAP'],
'_ZN9starrocks15BitmapFunctions14bitmap_has_anyEPN13starrocks_udf15FunctionContextERKNS1_9StringValES6_'],
#percentile function
[['percentile_hash'], 'PERCENTILE', ['DOUBLE'],
'_ZN9starrocks19PercentileFunctions15percentile_hashEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValE'],
[['percentile_empty'], 'PERCENTILE', [],
'_ZN9starrocks19PercentileFunctions16percentile_emptyEPN13starrocks_udf15FunctionContextE'],
[['percentile_approx_raw'], 'DOUBLE', ['PERCENTILE', 'DOUBLE'],
'_ZN9starrocks19PercentileFunctions21percentile_approx_rawEPN13starrocks_udf15FunctionContextERKNS1_9StringValERKNS1_9DoubleValE'],
# hash functions
[['murmur_hash3_32'], 'INT', ['VARCHAR', '...'],
'_ZN9starrocks13HashFunctions15murmur_hash3_32EPN13starrocks_udf15FunctionContextEiPKNS1_9StringValE'],
# aes and base64 function
[['aes_encrypt'], 'VARCHAR', ['VARCHAR', 'VARCHAR'],
'_ZN9starrocks19EncryptionFunctions11aes_encryptEPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValES6_'],
[['aes_decrypt'], 'VARCHAR', ['VARCHAR', 'VARCHAR'],
'_ZN9starrocks19EncryptionFunctions11aes_decryptEPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValES6_'],
[['from_base64'], 'VARCHAR', ['VARCHAR'],
'_ZN9starrocks19EncryptionFunctions11from_base64EPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValE'],
[['to_base64'], 'VARCHAR', ['VARCHAR'],
'_ZN9starrocks19EncryptionFunctions9to_base64EPN13starrocks_udf'
'15FunctionContextERKNS1_9StringValE'],
# for compatable with MySQL
[['md5'], 'VARCHAR', ['VARCHAR'],
'_ZN9starrocks19EncryptionFunctions3md5EPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['md5sum'], 'VARCHAR', ['VARCHAR', '...'],
'_ZN9starrocks19EncryptionFunctions6md5sumEPN13starrocks_udf15FunctionContextEiPKNS1_9StringValE'],
# geo functions
[['ST_Point'], 'VARCHAR', ['DOUBLE', 'DOUBLE'],
'_ZN9starrocks12GeoFunctions8st_pointEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValES6_'],
[['ST_X'], 'DOUBLE', ['VARCHAR'],
'_ZN9starrocks12GeoFunctions4st_xEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['ST_Y'], 'DOUBLE', ['VARCHAR'],
'_ZN9starrocks12GeoFunctions4st_yEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['ST_Distance_Sphere'], 'DOUBLE', ['DOUBLE', 'DOUBLE', 'DOUBLE', 'DOUBLE'],
'_ZN9starrocks12GeoFunctions18st_distance_sphereEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValES6_S6_S6_'],
[['ST_AsText', 'ST_AsWKT'], 'VARCHAR', ['VARCHAR'],
'_ZN9starrocks12GeoFunctions9st_as_wktEPN13starrocks_udf15FunctionContextERKNS1_9StringValE'],
[['ST_GeometryFromText', 'ST_GeomFromText'], 'VARCHAR', ['VARCHAR'],
'_ZN9starrocks12GeoFunctions11st_from_wktEPN13starrocks_udf15FunctionContextERKNS1_9StringValE',
'_ZN9starrocks12GeoFunctions19st_from_wkt_prepareEPN13starrocks_udf15FunctionContextENS2_18FunctionStateScopeE',
'_ZN9starrocks12GeoFunctions17st_from_wkt_closeEPN13starrocks_udf15FunctionContextENS2_18FunctionStateScopeE'],
[['ST_LineFromText', 'ST_LineStringFromText'], 'VARCHAR', ['VARCHAR'],
'_ZN9starrocks12GeoFunctions7st_lineEPN13starrocks_udf15FunctionContextERKNS1_9StringValE',
'_ZN9starrocks12GeoFunctions15st_line_prepareEPN13starrocks_udf15FunctionContextENS2_18FunctionStateScopeE',
'_ZN9starrocks12GeoFunctions17st_from_wkt_closeEPN13starrocks_udf15FunctionContextENS2_18FunctionStateScopeE'],
[['ST_Polygon', 'ST_PolyFromText', 'ST_PolygonFromText'], 'VARCHAR', ['VARCHAR'],
'_ZN9starrocks12GeoFunctions10st_polygonEPN13starrocks_udf15FunctionContextERKNS1_9StringValE',
'_ZN9starrocks12GeoFunctions18st_polygon_prepareEPN13starrocks_udf15FunctionContextENS2_18FunctionStateScopeE',
'_ZN9starrocks12GeoFunctions17st_from_wkt_closeEPN13starrocks_udf15FunctionContextENS2_18FunctionStateScopeE'],
[['ST_Circle'], 'VARCHAR', ['DOUBLE', 'DOUBLE', 'DOUBLE'],
'_ZN9starrocks12GeoFunctions9st_circleEPN13starrocks_udf15FunctionContextERKNS1_9DoubleValES6_S6_',
'_ZN9starrocks12GeoFunctions17st_circle_prepareEPN13starrocks_udf15FunctionContextENS2_18FunctionStateScopeE',
'_ZN9starrocks12GeoFunctions17st_from_wkt_closeEPN13starrocks_udf15FunctionContextENS2_18FunctionStateScopeE'],
[['ST_Contains'], 'BOOLEAN', ['VARCHAR', 'VARCHAR'],
'_ZN9starrocks12GeoFunctions11st_containsEPN13starrocks_udf15FunctionContextERKNS1_9StringValES6_',
'_ZN9starrocks12GeoFunctions19st_contains_prepareEPN13starrocks_udf15FunctionContextENS2_18FunctionStateScopeE',
'_ZN9starrocks12GeoFunctions17st_contains_closeEPN13starrocks_udf15FunctionContextENS2_18FunctionStateScopeE'],
# grouping sets functions
[['grouping_id'], 'BIGINT', ['BIGINT'],
'_ZN9starrocks21GroupingSetsFunctions11grouping_idEPN13starrocks_udf15FunctionContextERKNS1_9BigIntValE'],
[['grouping'], 'BIGINT', ['BIGINT'], '_ZN9starrocks21GroupingSetsFunctions8groupingEPN13starrocks_udf15FunctionContextERKNS1_9BigIntValE'],
[['array_length'], 'INT', ['ANY_ARRAY'], ''],
[['array_append'], 'ANY_ARRAY', ['ANY_ARRAY', 'ANY_ELEMENT'], ''],
[['array_contains'], 'BOOLEAN', ['ANY_ARRAY', 'ANY_ELEMENT'], ''],
]
# Except the following functions, other function will directly return
# null if there is null parameters.
# Functions in this set will handle null values, not just return null.
#
# This set is only used to replace 'functions with null parameters' with NullLiteral
# when applying FoldConstantsRule rules on the FE side.
# TODO(cmy): Are these functions only required to handle null values?
non_null_result_with_null_param_functions = [
'if',
'hll_hash',
'concat_ws',
'ifnull',
'nullif',
'null_or_empty',
'coalesce'
]
invisible_functions = [
]
|
#!/usr/bin/env python
"""
sample data for persistence/serialization examples
this version is flat for saving in CSV, ini, etc.
"""
AddressBook = [ {'first_name': "Chris",
'last_name': "Barker",
'address_line_1':"835 NE 33rd St",
'address_line_2' : "",
'address_city' : "Seattle",
'address_state': "WA",
'address_zip': "96543",
'email' : "PythonCHB@gmail.com",
'home_phone' : "206-555-1234",
'office_phone' : "123-456-7890",
'cell_phone' : "234-567-8901",
},
{'first_name': "Fred",
'last_name': "Jones",
'address_line_1':"123 SE 13th St",
'address_line_2' : "Apt. 43",
'address_city' : "Tacoma",
'address_state': "WA",
'address_zip': "93465",
'email' : "FredJones@some_company.com",
'home_phone' : "510-555-1234",
'office_phone' : "564-466-7990",
'cell_phone' : "403-561-8911",
},
{'first_name': "Nancy",
'last_name': "Wilson",
'address_line_1':"8654 Walnut St",
'address_line_2' : "Suite 567",
'address_city' : "Pasadena",
'address_state': "CA",
'address_zip': "12345",
'email' : "Wilson.Nancy@gmail.com",
'home_phone' : "423-321-9876",
'office_phone' : "123-765-9877",
'cell_phone' : "432-567-8466",
},
]
|
def test_get_empty_collection(client):
empty_response = client.get('/data')
assert empty_response.status_code == 200
assert 'json' in empty_response.content_type
assert empty_response.is_json
assert empty_response.json['href'].startswith('http')
assert empty_response.json['href'].endswith('/data')
assert not empty_response.json['collection']
def test_put_to_collection(client):
put_response = client.put('/data',
data={'python': '1'},
)
assert put_response.status_code == 201
assert 'json' in put_response.content_type
assert put_response.is_json
assert put_response.location == put_response.json['href']
assert put_response.json['href'].startswith('http')
assert '/data/' in put_response.json['href']
def test_get_collection(client):
response = client.get('/data')
assert response.status_code == 200
assert 'json' in response.content_type
assert response.is_json
assert response.json['href'].startswith('http')
assert response.json['href'].endswith('/data')
assert response.json['collection']
|
"""
@file
@brief `c3 <http://c3js.org/gettingstarted.html>`_
"""
def version():
"version"
return "0.4.2"
|
#!/usr/bin/env python3
def next_nums():
A = 703
B = 516
for _ in range(40000000):
A *= 16807
A %= 2147483647
B *= 48271
B %= 2147483647
yield (A, B)
count = 0
for newA, newB in next_nums():
if newA % 65536 == newB % 65536:
count += 1
print(count)
|
class ApiConfig:
def __init__(self,
environment: str = None,
name: str = None,
is_debug: bool = None,
port: int = None,
root_directory: str = None):
self.port = port
self.is_debug = is_debug
self.name = name
self.environment = environment
self.root_directory = root_directory
|
data = (
'Mang ', # 0x00
'Zhu ', # 0x01
'Utsubo ', # 0x02
'Du ', # 0x03
'Ji ', # 0x04
'Xiao ', # 0x05
'Ba ', # 0x06
'Suan ', # 0x07
'Ji ', # 0x08
'Zhen ', # 0x09
'Zhao ', # 0x0a
'Sun ', # 0x0b
'Ya ', # 0x0c
'Zhui ', # 0x0d
'Yuan ', # 0x0e
'Hu ', # 0x0f
'Gang ', # 0x10
'Xiao ', # 0x11
'Cen ', # 0x12
'Pi ', # 0x13
'Bi ', # 0x14
'Jian ', # 0x15
'Yi ', # 0x16
'Dong ', # 0x17
'Shan ', # 0x18
'Sheng ', # 0x19
'Xia ', # 0x1a
'Di ', # 0x1b
'Zhu ', # 0x1c
'Na ', # 0x1d
'Chi ', # 0x1e
'Gu ', # 0x1f
'Li ', # 0x20
'Qie ', # 0x21
'Min ', # 0x22
'Bao ', # 0x23
'Tiao ', # 0x24
'Si ', # 0x25
'Fu ', # 0x26
'Ce ', # 0x27
'Ben ', # 0x28
'Pei ', # 0x29
'Da ', # 0x2a
'Zi ', # 0x2b
'Di ', # 0x2c
'Ling ', # 0x2d
'Ze ', # 0x2e
'Nu ', # 0x2f
'Fu ', # 0x30
'Gou ', # 0x31
'Fan ', # 0x32
'Jia ', # 0x33
'Ge ', # 0x34
'Fan ', # 0x35
'Shi ', # 0x36
'Mao ', # 0x37
'Po ', # 0x38
'Sey ', # 0x39
'Jian ', # 0x3a
'Qiong ', # 0x3b
'Long ', # 0x3c
'Souke ', # 0x3d
'Bian ', # 0x3e
'Luo ', # 0x3f
'Gui ', # 0x40
'Qu ', # 0x41
'Chi ', # 0x42
'Yin ', # 0x43
'Yao ', # 0x44
'Xian ', # 0x45
'Bi ', # 0x46
'Qiong ', # 0x47
'Gua ', # 0x48
'Deng ', # 0x49
'Jiao ', # 0x4a
'Jin ', # 0x4b
'Quan ', # 0x4c
'Sun ', # 0x4d
'Ru ', # 0x4e
'Fa ', # 0x4f
'Kuang ', # 0x50
'Zhu ', # 0x51
'Tong ', # 0x52
'Ji ', # 0x53
'Da ', # 0x54
'Xing ', # 0x55
'Ce ', # 0x56
'Zhong ', # 0x57
'Kou ', # 0x58
'Lai ', # 0x59
'Bi ', # 0x5a
'Shai ', # 0x5b
'Dang ', # 0x5c
'Zheng ', # 0x5d
'Ce ', # 0x5e
'Fu ', # 0x5f
'Yun ', # 0x60
'Tu ', # 0x61
'Pa ', # 0x62
'Li ', # 0x63
'Lang ', # 0x64
'Ju ', # 0x65
'Guan ', # 0x66
'Jian ', # 0x67
'Han ', # 0x68
'Tong ', # 0x69
'Xia ', # 0x6a
'Zhi ', # 0x6b
'Cheng ', # 0x6c
'Suan ', # 0x6d
'Shi ', # 0x6e
'Zhu ', # 0x6f
'Zuo ', # 0x70
'Xiao ', # 0x71
'Shao ', # 0x72
'Ting ', # 0x73
'Ce ', # 0x74
'Yan ', # 0x75
'Gao ', # 0x76
'Kuai ', # 0x77
'Gan ', # 0x78
'Chou ', # 0x79
'Kago ', # 0x7a
'Gang ', # 0x7b
'Yun ', # 0x7c
'O ', # 0x7d
'Qian ', # 0x7e
'Xiao ', # 0x7f
'Jian ', # 0x80
'Pu ', # 0x81
'Lai ', # 0x82
'Zou ', # 0x83
'Bi ', # 0x84
'Bi ', # 0x85
'Bi ', # 0x86
'Ge ', # 0x87
'Chi ', # 0x88
'Guai ', # 0x89
'Yu ', # 0x8a
'Jian ', # 0x8b
'Zhao ', # 0x8c
'Gu ', # 0x8d
'Chi ', # 0x8e
'Zheng ', # 0x8f
'Jing ', # 0x90
'Sha ', # 0x91
'Zhou ', # 0x92
'Lu ', # 0x93
'Bo ', # 0x94
'Ji ', # 0x95
'Lin ', # 0x96
'Suan ', # 0x97
'Jun ', # 0x98
'Fu ', # 0x99
'Zha ', # 0x9a
'Gu ', # 0x9b
'Kong ', # 0x9c
'Qian ', # 0x9d
'Quan ', # 0x9e
'Jun ', # 0x9f
'Chui ', # 0xa0
'Guan ', # 0xa1
'Yuan ', # 0xa2
'Ce ', # 0xa3
'Ju ', # 0xa4
'Bo ', # 0xa5
'Ze ', # 0xa6
'Qie ', # 0xa7
'Tuo ', # 0xa8
'Luo ', # 0xa9
'Dan ', # 0xaa
'Xiao ', # 0xab
'Ruo ', # 0xac
'Jian ', # 0xad
'Xuan ', # 0xae
'Bian ', # 0xaf
'Sun ', # 0xb0
'Xiang ', # 0xb1
'Xian ', # 0xb2
'Ping ', # 0xb3
'Zhen ', # 0xb4
'Sheng ', # 0xb5
'Hu ', # 0xb6
'Shi ', # 0xb7
'Zhu ', # 0xb8
'Yue ', # 0xb9
'Chun ', # 0xba
'Lu ', # 0xbb
'Wu ', # 0xbc
'Dong ', # 0xbd
'Xiao ', # 0xbe
'Ji ', # 0xbf
'Jie ', # 0xc0
'Huang ', # 0xc1
'Xing ', # 0xc2
'Mei ', # 0xc3
'Fan ', # 0xc4
'Chui ', # 0xc5
'Zhuan ', # 0xc6
'Pian ', # 0xc7
'Feng ', # 0xc8
'Zhu ', # 0xc9
'Hong ', # 0xca
'Qie ', # 0xcb
'Hou ', # 0xcc
'Qiu ', # 0xcd
'Miao ', # 0xce
'Qian ', # 0xcf
None, # 0xd0
'Kui ', # 0xd1
'Sik ', # 0xd2
'Lou ', # 0xd3
'Yun ', # 0xd4
'He ', # 0xd5
'Tang ', # 0xd6
'Yue ', # 0xd7
'Chou ', # 0xd8
'Gao ', # 0xd9
'Fei ', # 0xda
'Ruo ', # 0xdb
'Zheng ', # 0xdc
'Gou ', # 0xdd
'Nie ', # 0xde
'Qian ', # 0xdf
'Xiao ', # 0xe0
'Cuan ', # 0xe1
'Gong ', # 0xe2
'Pang ', # 0xe3
'Du ', # 0xe4
'Li ', # 0xe5
'Bi ', # 0xe6
'Zhuo ', # 0xe7
'Chu ', # 0xe8
'Shai ', # 0xe9
'Chi ', # 0xea
'Zhu ', # 0xeb
'Qiang ', # 0xec
'Long ', # 0xed
'Lan ', # 0xee
'Jian ', # 0xef
'Bu ', # 0xf0
'Li ', # 0xf1
'Hui ', # 0xf2
'Bi ', # 0xf3
'Di ', # 0xf4
'Cong ', # 0xf5
'Yan ', # 0xf6
'Peng ', # 0xf7
'Sen ', # 0xf8
'Zhuan ', # 0xf9
'Pai ', # 0xfa
'Piao ', # 0xfb
'Dou ', # 0xfc
'Yu ', # 0xfd
'Mie ', # 0xfe
'Zhuan ', # 0xff
)
|
"""739. Daily Temperatures"""
class Solution(object):
def dailyTemperatures(self, T):
"""
:type T: List[int]
:rtype: List[int]
"""
## R2:
res = [0 for _ in range(len(T))]
stack = []
for pos, tem in enumerate(T):
while stack and T[stack[-1]] < tem:
cur = stack.pop()
res[cur] = pos - cur
stack.append(pos)
return res
## R1:
ans = [0 for _ in range(len(T))]
stack = []
for i, t in enumerate(T):
while stack and T[stack[-1]] < t:
cur = stack.pop()
ans[cur] = i - cur
stack.append(i)
return ans
|
# Python 3.6.1
with open("input.txt", "r") as f:
puzzle_input = [int(i) for i in f.read()[0:-1]]
total = 0
for cur_index in range(len(puzzle_input)):
next_index = cur_index + 1 if not cur_index == len(puzzle_input) - 1 else 0
puz_cur = puzzle_input[cur_index]
pnext = puzzle_input[next_index]
if puz_cur == pnext:
total += puz_cur
print(total)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.