content stringlengths 7 1.05M |
|---|
class Const:
GITHUB = "https://github.com/ayvytr/PythonBox"
ISSUE = "https://github.com/Ayvytr/PythonBox/issues"
MAIL = "mailto:ayvytr@163.com?subject=Bug-Report&body={}"
|
def prediction(image_path):
img = tf.keras.utils.load_img(
image_path, target_size=(img_height, img_width))
img = tf.keras.utils.img_to_array(img)
plt.title('Image')
plt.axis('off')
plt.imshow((img/255.0).squeeze())
predict = model.predict(img[np.newaxis , ...])
predicted_class = labels[np.argmax(predict[0] , axis = -1)]
print('Prediction Value: ' , np.max(predict[0] , axis = -1))
print("Classified:",predicted_class) |
target_str = "hello python world"
# reverse encrypt
print(target_str[-1::-1])
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright (c) The Lab of Professor Weiwei Lin (linww@scut.edu.cn),
# School of Computer Science and Engineering, South China University of Technology.
# A-Tune is licensed under the Mulan PSL v2.
# You can use this software according to the terms and conditions of the Mulan PSL v2.
# You may obtain a copy of Mulan PSL v2 at:
# http://license.coscl.org.cn/MulanPSL2
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
# PURPOSE.
# See the Mulan PSL v2 for more details.
# Create: 2020-01-04
x1 = 3
x2 = 1
x3 = 5
x4 = 3
x5 = 3
x6 = 3
x7 = 3
x8 = 3
x9 = 3
x10 = 2
x11 = 2
x12 = 4
x13 = 4
x14 = 2
x15 = 2
x16 = 1
x17 = 2
x18 = 5
x19 = 1
x20 = 1
x21 = 1
x22 = 1
x23 = 1
x24 = 2
x25 = 4
x26 = 2
x27 = 3
x28 = 1
x29 = 2
x30 = 4
x31 = 4
x32 = 1
x33 = 4
x34 = 1
x35 = 2
x36 = 1
x37 = 3
x38 = 2
x39 = 1
x40 = 2
x41 = 3
x42 = 3
x43 = 2
x44 = 2
x45 = 2
x46 = 4
x47 = 4
x48 = 2
x49 = 2
x50 = 2
x51 = 2
x52 = 1
x53 = 4
x54 = 3
x55 = 3
x56 = 1
x57 = 2
x58 = 3
x59 = 3
x60 = 3
x61 = 1
x62 = 3
x63 = 3
x64 = 4
x65 = 3
x66 = 2
x67 = 3
x68 = 3
x69 = 3
x70 = 2
x71 = 4
x72 = 1
x73 = 3
x74 = 2
x75 = 3
x76 = 1
x77 = 3
x78 = 1
x79 = 4
x80 = 2
x81 = 1
x82 = 1
x83 = 2
x84 = 4
x85 = 5
x86 = 3
x87 = 4
x88 = 2
x89 = 2
x90 = 1
x91 = 2
x92 = 1
x93 = 2
x94 = 1
x95 = 2
x96 = 3
x97 = 3
x98 = 2
x99 = 2
x100 = 3
x101 = 4
x102 = 3
x103 = 2
x104 = 2
x105 = 3
x106 = 5
x107 = 4
x108 = 2
x109 = 1
x110 = 4
x111 = 3
x112 = 4
x113 = 2
x114 = 2
x115 = 4
x116 = 4
x117 = 2
x118 = 3
x119 = 2
x120 = 4
x121 = 3
x122 = 2
x123 = 4
x124 = 4
x125 = 3
x126 = 4
x127 = 1
x128 = 3
x129 = 3
x130 = 5
x131 = 4
x132 = 3
x133 = 1
x134 = 2
x135 = 1
x136 = 1
x137 = 4
x138 = 4
x139 = 3
x140 = 1
x141 = 4
x142 = 1
x143 = 1
x144 = 4
x145 = 5
x146 = 4
x147 = 1
x148 = 4
x149 = 3
x150 = 3
y = 1 * x147 ** 1 + 2 * x80 ** 1 + 3 * x55 ** 1 + 4 * x81 ** 1 + 5 * x87 ** 1 + 1 * x82 ** 2 + 2 * x88 ** 2 + \
3 * x83 ** 2 + 4 * x144 ** 2 + 5 * x38 ** 2 + 1 * x135 ** 3 + 2 * x125 ** 3 + 3 * x14 ** 3 + 4 * x65 ** 3 + \
5 * x95 ** 3 + 1 * x73 ** 4 + 2 * x37 ** 4 + 3 * x105 ** 4 + 4 * x28 ** 4 + 5 * x121 ** 4 + 1 * x100 ** 5 + \
2 * x141 ** 5 + 3 * x69 ** 5 + 4 * x97 ** 5 + 5 * x53 ** 5 + 1 * x126 ** 6 + 2 * x104 ** 6 + 3 * x103 ** 6 + \
4 * x27 ** 6 + 5 * x10 ** 6 + 1 * x140 ** 7 + 2 * x54 ** 7 + 3 * x5 ** 7 + 4 * x70 ** 7 + 5 * x114 ** 7 + \
1 * x57 ** 8 + 2 * x74 ** 8 + 3 * x26 ** 8 + 4 * x19 ** 8 + 5 * x111 ** 8 + 1 * x108 ** 9 + 2 * x48 ** 9 + \
3 * x11 ** 9 + 4 * x59 ** 9 + 5 * x123 ** 9 + 1 * x61 ** 10 + 2 * x6 ** 10 + 3 * x79 ** 10 + 4 * x71 ** 10 + \
5 * x98 ** 10 + 1 * x34 ** 11 + 2 * x112 ** 11 + 3 * x25 ** 11 + 4 * x93 ** 11 + 5 * x86 ** 11 + 1 * x64 ** 12 + \
2 * x120 ** 12 + 3 * x20 ** 12 + 4 * x16 ** 12 + 5 * x94 ** 12 + 1 * x76 ** 13 + 2 * x21 ** 13 + 3 * x129 ** 13 + \
4 * x146 ** 13 + 5 * x77 ** 13 + 1 * x46 ** 14 + 2 * x91 ** 14 + 3 * x31 ** 14 + 4 * x67 ** 14 + 5 * x150 ** 14 + \
1 * x72 ** 15 + 2 * x84 ** 15 + 3 * x136 ** 15 + 4 * x15 ** 15 + 5 * x149 ** 15 + 1 * x2 ** 16 + 2 * x116 ** 16 + \
3 * x66 ** 16 + 4 * x42 ** 16 + 5 * x45 ** 16 + 1 * x63 ** 17 + 2 * x85 ** 17 + 3 * x143 ** 17 + 4 * x4 ** 17 + \
5 * x29 ** 17 + 1 * x113 ** 18 + 2 * x50 ** 18 + 3 * x132 ** 18 + 4 * x127 ** 18 + 5 * x30 ** 18 + 1 * x109 ** 19 +\
2 * x131 ** 19 + 3 * x36 ** 19 + 4 * x9 ** 19 + 5 * x43 ** 19 + 1 * x119 ** 20 + 2 * x8 ** 20 + 3 * x68 ** 20 + \
4 * x107 ** 20 + 5 * x12 ** 20 + 1 * x32 ** 21 + 2 * x122 ** 21 + 3 * x115 ** 21 + 4 * x75 ** 21 + 5 * x49 ** 21 + \
1 * x110 ** 22 + 2 * x40 ** 22 + 3 * x17 ** 22 + 4 * x134 ** 22 + 5 * x128 ** 22 + 1 * x18 ** 23 + 2 * x142 ** 23 +\
3 * x133 ** 23 + 4 * x24 ** 23 + 5 * x102 ** 23 + 1 * x145 ** 24 + 2 * x33 ** 24 + 3 * x106 ** 24 + 4 * x58 ** 24 +\
5 * x47 ** 24 + 1 * x22 ** 25 + 2 * x118 ** 25 + 3 * x44 ** 25 + 4 * x35 ** 25 + 5 * x90 ** 25 + 1 * x96 ** 26 + \
2 * x62 ** 26 + 3 * x78 ** 26 + 4 * x39 ** 26 + 5 * x99 ** 26 + 1 * x117 ** 27 + 2 * x1 ** 27 + 3 * x3 ** 27 + \
4 * x7 ** 27 + 5 * x52 ** 27 + 1 * x60 ** 28 + 2 * x124 ** 28 + 3 * x139 ** 28 + 4 * x101 ** 28 + 5 * x23 ** 28 + \
1 * x92 ** 29 + 2 * x148 ** 29 + 3 * x137 ** 29 + 4 * x89 ** 29 + 5 * x51 ** 29 + 1 * x41 ** 30 + 2 * x13 ** 30 + \
3 * x130 ** 30 + 4 * x138 ** 30 + 5 * x56 ** 30
print("y = %s" % y)
|
def solve():
n=int(input())
row,col=(n,n)
res=""
for i in range(row):
for j in range(col):
if i==j:
res+='1 '
elif i==j-1:
res+='1 '
elif i==j+1:
res+='1 '
else:
res+='0 '
if i!=n-1:
res+='\n'
print(res)
for _ in range(int(input())):
solve()
|
tuple_a = 1, 2
tuple_b = (1, 2)
print(tuple_a == tuple_b)
print(tuple_a[1])
AngkorWat = (13.4125, 103.866667)
print(type(AngkorWat))
# <class 'tuple'="">
print("AngkorWat is at latitude: {}".format(AngkorWat[0]))
# AngkorWat is at latitude: 13.4125
print("AngkorWat is at longitude: {}".format(AngkorWat[1]))
# AngkorWat is at longitude: 103.866667 |
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
''' Battery runner classes and Report classes '''
class BatteryRunner(object):
def __init__(self, checks):
self._checks = checks
def check_only(self, obj):
reports = []
for check in self._checks:
reports.append(check(obj, False))
return reports
def check_fix(self, obj):
reports = []
for check in self._checks:
report = check(obj, True)
obj = report.obj
reports.append(report)
return obj, reports
def __len__(self):
return len(self._checks)
class Report(object):
def __init__(self,
obj=None,
error=None,
problem_level=0,
problem_msg='',
fix_msg=''):
''' Initialize report with values
Parameters
----------
obj : object
object tested, possibly fixed. Default is None
error : None or Exception
Error to raise if raising error for this check. If None,
no error can be raised for this check (it was probably
normal).
problem_level : int
level of problem. From 0 (no problem) to 50 (severe
problem). If the report originates from a fix, then this
is the level of the problem remaining after the fix.
Default is 0
problem_msg : string
String describing problem detected. Default is ''
fix_msg : string
String describing any fix applied. Default is ''.
Examples
--------
>>> rep = Report()
>>> rep.problem_level
0
>>> rep = Report((), TypeError, 10)
>>> rep.problem_level
10
'''
self.obj = obj
self.error = error
self.problem_level = problem_level
self.problem_msg = problem_msg
self.fix_msg = fix_msg
def __eq__(self, other):
''' Test for equality
Parameters
----------
other : object
report-like object to test equality
Examples
--------
>>> rep = Report(problem_level=10)
>>> rep2 = Report(problem_level=10)
>>> rep == rep2
True
>>> rep3 = Report(problem_level=20)
>>> rep == rep3
False
'''
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
''' Test for equality
Parameters
----------
other : object
report-like object to test equality
See __eq__ docstring for examples
'''
return (self.__dict__ != other.__dict__)
def __str__(self):
''' Printable string for object '''
return self.__dict__.__str__()
@property
def message(self):
''' formatted message string, including fix message if present
'''
if self.fix_msg:
return '; '.join((self.problem_msg, self.fix_msg))
return self.problem_msg
def log_raise(self, logger, error_level=40):
''' Log problem, raise error if problem >= `error_level`
Parameters
----------
logger : log
log object, implementing ``log`` method
error_level : int, optional
If ``self.problem_level`` >= `error_level`, raise error
'''
logger.log(self.problem_level, self.message)
if self.problem_level and self.problem_level >= error_level:
if self.error:
raise self.error(self.problem_msg)
def write_raise(self, stream, error_level=40, log_level=30):
if self.problem_level >= log_level:
stream.write('Level %s: %s\n' %
(self.problem_level, self.message))
if self.problem_level and self.problem_level >= error_level:
if self.error:
raise self.error(self.problem_msg)
|
class Solution:
def spiralOrder(self, matrix) -> list:
result = []
m = len(matrix)
n = len(matrix[0])
flag = [[False] * n for _ in range(m)]
i = 0
j = 0
orient = (0, 1) # (0, 1)=left, (1, 0)=down, (0, -1)=right, (-1, 0)=up
while len(result) < m * n:
result.append(matrix[i][j])
flag[i][j] = True
next_i, next_j = i + orient[0], j + orient[1]
if 0 <= next_i < m and 0 <= next_j < n and not flag[next_i][next_j]:
i, j = next_i, next_j
else:
if orient == (0, 1):
orient = (1, 0)
elif orient == (1, 0):
orient = (0, -1)
elif orient == (0, -1):
orient = (-1, 0)
elif orient == (-1, 0):
orient = (0, 1)
i, j = i + orient[0], j + orient[1]
return result
|
class RequireTwoFactorException(Exception):
pass
class LoginFailedException(Exception):
pass
|
"""
python-social-auth application, allows OpenId or OAuth user
registration/authentication just adding a few configurations.
"""
version = (0, 1, 16)
extra = ''
__version__ = '.'.join(map(str, version)) + extra
|
### Default Pins M5Stack bzw. Mapping auf IoTKitV3.1 small
DEFAULT_IOTKIT_LED1 = 27 # ohne Funktion - internes Neopixel verwenden
DEFAULT_IOTKIT_BUZZER = 27 # ohne Funktion - internen Vibrationsmotor verwenden
DEFAULT_IOTKIT_BUTTON1 = 39 # Pushbotton A unter Touchscreen M5Stack
# Port A
DEFAULT_IOTKIT_I2C_SDA = 32
DEFAULT_IOTKIT_I2C_SCL = 33
# Port B
DEFAULT_IOTKIT_PORT_B_DAC = 26
DEFAULT_IOTKIT_PORT_B_ADC = 27
# Port C
DEFAULT_IOTKIT_PORT_C_TX = 14
DEFAULT_IOTKIT_PORT_C_RX = 13
# Port D - kann auf dem Shield bei Grove A4/A5 abgegriffen werden
DEFAULT_IOTKIT_PORT_C_TX = 1
DEFAULT_IOTKIT_PORT_C_RX = 3
# L293D mit 1 x DC Motor (zweiter ohne Funktion)
DEFAULT_IOTKIT_MOTOR1_FWD = 2
DEFAULT_IOTKIT_MOTOR1_PWM = 19
DEFAULT_IOTKIT_MOTOR1_REV = 0
DEFAULT_IOTKIT_MOTOR2_FWD = -1
DEFAULT_IOTKIT_MOTOR2_PWM = -1
DEFAULT_IOTKIT_MOTOR2_REW = -1
# SPI Bus inkl. SS Pin fuer RFID Reader
DEFAULT_IOTKIT_SPI_SCLK = 18
DEFAULT_IOTKIT_SPI_MISO = 38
DEFAULT_IOTKIT_SPI_MOSI = 23
DEFAULT_IOTKIT_SPI_SS = 25
# Servos gleich wie Port B
DEFAULT_IOTKIT_SERVO1 = 26
DEFAULT_IOTKIT_SERVO2 = 27
# ADC - Analog Pins
DEFAULT_IOTKIT_POTI = 35
DEFAULT_IOTKIT_HALL_SENSOR = 36
|
categories = [
(82, False, "player", "defense_ast", "Assist to a tackle."),
(91, False, "player", "defense_ffum", "Defensive player forced a fumble."),
(88, False, "player", "defense_fgblk", "Defensive player blocked a field goal."),
(60, False, "player", "defense_frec", "Defensive player recovered a fumble by the opposing team."),
(62, False, "player", "defense_frec_tds", "Defensive player scored a touchdown after recovering a fumble by the opposing team."),
(62, False, "player", "defense_frec_yds", "Yards gained by a defensive player after recovering a fumble by the opposing team."),
(26, False, "player", "defense_int", "An interception."),
(28, False, "player", "defense_int_tds", "A touchdown scored after an interception."),
(28, False, "player", "defense_int_yds", "Yards gained after an interception."),
(64, False, "player", "defense_misc_tds", "A touchdown scored on miscellaneous yardage (e.g., on a missed field goal or a blocked punt)."),
(64, False, "player", "defense_misc_yds", "Miscellaneous yards gained by a defensive player (e.g., yardage on a missed field goal or blocked punt)."),
(85, False, "player", "defense_pass_def", "Incomplete pass was due primarily to a defensive player's action."),
(86, False, "player", "defense_puntblk", "Defensive player blocked a punt."),
(110, False, "player", "defense_qbhit", "Defensive player knocked the quarterback to the ground and the quarterback was not the ball carrier."),
(89, False, "player", "defense_safe", "Tackle by a defensive player that resulted in a safety. This is in addition to a tackle."),
(84, True, "player", "defense_sk", "Defensive player sacked the quarterback. Note that this is the only field that is a floating point number. Namely, there can be half-credit sacks."),
(84, False, "player", "defense_sk_yds", "Yards lost as a result of a sack."),
(80, False, "player", "defense_tkl", "A defensive player tackle. (This include defense_tkl_primary.)"),
(120, False, "player", "defense_tkl_loss", "Defensive player tackled the runner behind the line of scrimmage. Play must have ended, player must have received a tackle stat, has to be an offensive player tackled."),
(402, False, "player", "defense_tkl_loss_yds", "The number of yards lost caused by a defensive tackle behind the line of scrimmage."),
(80, False, "player", "defense_tkl_primary", "Defensive player was the primary tackler."),
(87, False, "player", "defense_xpblk", "Defensive player blocked the extra point."),
(5, False, "play", "first_down", "A first down or TD occurred due to a penalty. A play can have a first down from a pass or rush and from a penalty."),
(9, False, "play", "fourth_down_att", "4th down play."),
(8, False, "play", "fourth_down_conv", "4th down play resulted in a first down or touchdown."),
(9, False, "play", "fourth_down_failed", "4th down play did not result in a first down or touchdown."),
(52, False, "player", "fumbles_forced", "Player fumbled the ball, fumble was forced by another player."),
(106, False, "player", "fumbles_lost", "Player fumbled the ball and the opposing team recovered it."),
(53, False, "player", "fumbles_notforced", "Player fumbled the ball that was not caused by a defensive player."),
(54, False, "player", "fumbles_oob", "Player fumbled the ball, and the ball went out of bounds."),
(56, False, "player", "fumbles_rec", "Fumble recovery from a player on the same team."),
(58, False, "player", "fumbles_rec_tds", "A touchdown after a fumble recovery from a player on the same team."),
(58, False, "player", "fumbles_rec_yds", "Yards gained after a fumble recovery from a player on the same team."),
(54, False, "player", "fumbles_tot", "Total number of fumbles by a player. Includes forced, not forced and out-of-bounds."),
(410, False, "player", "kicking_all_yds", "Kickoff and length of kick. Includes end zone yards for all kicks into the end zone, including kickoffs ending in a touchback."),
(102, False, "player", "kicking_downed", "A downed kickoff. A kickoff is downed when touched by an offensive player within the 10 yard free zone, and the ball is awarded to the receivers at the spot of the touch."),
(71, False, "player", "kicking_fga", "A field goal attempt, including blocked field goals. Unlike a punt, a field goal is statistically blocked even if the ball does go beyond the line of scrimmage."),
(71, False, "player", "kicking_fgb", "Field goal was blocked. Unlike a punt, a field goal is statistically blocked even if the ball does go beyond the line of scrimmage."),
(70, False, "player", "kicking_fgm", "A field goal."),
(70, False, "player", "kicking_fgm_yds", "The length of a successful field goal."),
(71, False, "player", "kicking_fgmissed", "The field goal was unsuccessful, including blocked field goals. Unlike a punt, a field goal is statistically blocked even if the ball does go beyond the line of scrimmage."),
(71, False, "player", "kicking_fgmissed_yds", "The length of an unsuccessful field goal, including blocked field goals. Unlike a punt, a field goal is statistically blocked even if the ball does go beyond the line of scrimmage."),
(42, False, "player", "kicking_i20", "Kickoff and length of kick, where return ended inside opponent's 20 yard line."),
(108, False, "player", "kicking_rec", "Recovery of own kickoff, whether or not the kickoff is onside."),
(108, False, "player", "kicking_rec_tds", "Touchdown resulting from direct recovery in endzone of own kickoff, whether or not the kickoff is onside."),
(44, False, "player", "kicking_tot", "A kickoff."),
(44, False, "player", "kicking_touchback", "A kickoff that resulted in a touchback."),
(74, False, "player", "kicking_xpa", "An extra point attempt."),
(74, False, "player", "kicking_xpb", "Extra point was blocked."),
(72, False, "player", "kicking_xpmade", "Extra point good."),
(74, False, "player", "kicking_xpmissed", "Extra point missed. This includes blocked extra points."),
(44, False, "player", "kicking_yds", "The length of a kickoff."),
(50, False, "player", "kickret_fair", "A fair catch kickoff return."),
(49, False, "player", "kickret_oob", "Kicked ball went out of bounds."),
(46, False, "player", "kickret_ret", "A kickoff return."),
(48, False, "player", "kickret_tds", "A kickoff return touchdown."),
(51, False, "player", "kickret_touchback", "A kickoff return that resulted in a touchback."),
(48, False, "player", "kickret_yds", "Yards gained by a kickoff return."),
(19, False, "player", "passing_att", "A pass attempt."),
(16, False, "player", "passing_cmp", "A pass completion."),
(111, False, "player", "passing_cmp_air_yds", "Length of a pass, not including the yards gained by the receiver after the catch."),
(4, False, "play", "passing_first_down", "A first down or TD occurred due to a pass."),
(19, False, "player", "passing_incmp", "Pass was incomplete."),
(112, False, "player", "passing_incmp_air_yds", "Length of the pass, if it would have been a completion."),
(19, False, "player", "passing_int", "Pass attempt that resulted in an interception."),
(20, False, "player", "passing_sk", "The player was sacked."),
(103, False, "player", "passing_sk_yds", "The yards lost by a player that was sacked."),
(16, False, "player", "passing_tds", "A pass completion that resulted in a touchdown."),
(78, False, "player", "passing_twopta", "A passing two-point conversion attempt."),
(77, False, "player", "passing_twoptm", "A successful passing two-point conversion."),
(78, False, "player", "passing_twoptmissed", "An unsuccessful passing two-point conversion."),
(16, False, "player", "passing_yds", "Total yards resulting from a pass completion."),
(93, False, "play", "penalty", "A penalty occurred."),
(5, False, "play", "penalty_first_down", "A first down or TD occurred due to a penalty."),
(93, False, "play", "penalty_yds", "The number of yards gained or lost from a penalty."),
(2, False, "player", "punting_blk", "Punt was blocked. A blocked punt is a punt that is touched behind the line of scrimmage, and is recovered, or goes out of bounds, behind the line of scrimmage. If the impetus of the punt takes it beyond the line of scrimmage, it is not a blocked punt."),
(30, False, "player", "punting_i20", "A punt where the punt return ended inside the opponent's 20 yard line."),
(32, False, "player", "punting_tot", "A punt."),
(32, False, "player", "punting_touchback", "A punt that results in a touchback."),
(32, False, "player", "punting_yds", "The length of a punt."),
(38, False, "player", "puntret_downed", "Punt return where the ball was downed by kicking team."),
(39, False, "player", "puntret_fair", "Punt return resulted in a fair catch."),
(37, False, "player", "puntret_oob", "Punt went out of bounds."),
(36, False, "player", "puntret_tds", "A punt return touchdown."),
(34, False, "player", "puntret_tot", "A punt return."),
(40, False, "player", "puntret_touchback", "A punt return that resulted in a touchback."),
(36, False, "player", "puntret_yds", "Yards gained by a punt return."),
(22, False, "player", "receiving_rec", "A reception."),
(115, False, "player", "receiving_tar", "Player was the target of a pass attempt."),
(24, False, "player", "receiving_tds", "A reception that results in a touchdown."),
(105, False, "player", "receiving_twopta", "A receiving two-point conversion attempt."),
(104, False, "player", "receiving_twoptm", "A successful receiving two-point conversion."),
(105, False, "player", "receiving_twoptmissed", "An unsuccessful receiving two-point conversion."),
(113, False, "player", "receiving_yac_yds", "Yardage from where the ball was caught until the player's action was over."),
(24, False, "player", "receiving_yds", "Yards resulting from a reception."),
(11, False, "player", "rushing_att", "A rushing attempt."),
(3, False, "play", "rushing_first_down", "A first down or TD occurred due to a rush."),
(95, False, "player", "rushing_loss", "Ball carrier was tackled for a loss behind the line of scrimmage, where at least one defensive player is credited with ending the rush with a tackle, or tackle assist."),
(95, False, "player", "rushing_loss_yds", "Yards lost from the ball carrier being tackled for a loss behind the line of scrimmage, where at least one defensive player is credited with ending the rush with a tackle, or tackle assist."),
(13, False, "player", "rushing_tds", "A touchdown resulting from a rush attempt."),
(76, False, "player", "rushing_twopta", "A rushing two-point conversion attempt."),
(75, False, "player", "rushing_twoptm", "A successful rushing two-point conversion."),
(76, False, "player", "rushing_twoptmissed", "An unsuccessful rushing two-point conversion."),
(13, False, "player", "rushing_yds", "Yards resulting from a rush."),
(7, False, "play", "third_down_att", "3rd down play."),
(6, False, "play", "third_down_conv", "3rd down play resulted in a first down or touchdown."),
(7, False, "play", "third_down_failed", "3rd down play did not result in a first down or touchdown."),
(68, False, "play", "timeout", "Team took a time out."),
(301, False, "play", "xp_aborted", "The extra point was aborted."),
]
|
"""
https://leetcode.com/problems/container-with-most-water/
Given n non-negative integers a1, a2, ..., an , where each represents a point at coordinate (i, ai). n vertical lines are drawn such that the two endpoints of line i is at (i, ai) and (i, 0). Find two lines, which together with x-axis forms a container, such that the container contains the most water.
Note: You may not slant the container and n is at least 2.
Example:
Input: [1,8,6,2,5,4,8,3,7]
Output: 49
"""
class Solution(object):
def maxArea(self, height):
"""
:type height: List[int]
:rtype: int
"""
low, high = height[0], height[len(height)-1]
container_area = []
low_index = 0
high_index = len(height)-1
#print, low_index, high_index
while low_index < high_index:
#print "append"
container_area.append(min(low,high)*(high_index-low_index))
#print container_area
if low < high:
low_index += 1
low = height[low_index]
else:
high_index -=1
high = height[high_index]
return max(container_area)
|
# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
"""
Stubs for testing cloudformation.py
"""
describe_stack = {
'Stacks': [{
'Outputs': [{
'OutputKey': "DeploymentFrameworkRegionalKMSKey",
'OutputValue': "some_key_arn"
}, {
'OutputKey': "DeploymentFrameworkRegionalS3Bucket",
'OutputValue': "some_bucket_name"
}],
'StackStatus': 'CREATE_IN_PROGRESS'
}]
}
|
JAVA_EXEC_LABEL="//third_party/openjdk:java"
PHASICJ_AGENT_LABEL="//phasicj/agent:libpjagent"
RENAISSANCE_JAR_LABEL="//third_party/renaissance:jar"
RENAISSANCE_MAIN_CLASS="org.renaissance.core.Launcher"
PHASICJ_EXEC="//phasicj/cli"
EXTRA_PHASICJ_AGENT_OPTIONS="verbose"
def smoke_test_benchmark(name):
native.sh_test(
name = name,
srcs = ["test.sh"],
data = [
JAVA_EXEC_LABEL,
PHASICJ_AGENT_LABEL,
RENAISSANCE_JAR_LABEL,
PHASICJ_EXEC,
],
args = [
"$(rootpath {})".format(JAVA_EXEC_LABEL),
"$(rootpath {})".format(PHASICJ_AGENT_LABEL),
"$(rootpath {})".format(RENAISSANCE_JAR_LABEL),
RENAISSANCE_MAIN_CLASS,
"$(rootpath {})".format(PHASICJ_EXEC),
EXTRA_PHASICJ_AGENT_OPTIONS,
"--repetitions",
"1",
name
],
)
KNOWN_BENCHMARKS = [
"akka-uct",
"als",
"chi-square",
# TODO(dwtj): Figure out why this crashes.
# "db-shootout",
"dec-tree",
"dotty",
"finagle-chirper",
"finagle-http",
"fj-kmeans",
"future-genetic",
"gauss-mix",
"log-regression",
"mnemonics",
"movie-lens",
"naive-bayes",
"neo4j-analytics",
"page-rank",
"par-mnemonics",
"philosophers",
"reactors",
"rx-scrabble",
"scala-doku",
"scala-kmeans",
"scala-stm-bench7",
"scrabble",
]
def test_each_known_renaissance_benchmark():
for benchmark in KNOWN_BENCHMARKS:
smoke_test_benchmark(benchmark)
|
__author__ = """Christopher Bevan Barnett"""
__email__ = 'chrisbarnettster@gmail.com'
__version__ = '0.3.7'
|
def trace(func):
def wrapper():
func_name = func.__name__
print(f'Entering "{func_name}" function')
func()
print(f'Exiting from "{func_name}" function')
return wrapper
def say_hello():
print('Hello!')
say_hello = trace(say_hello)
say_hello()
|
# Copyright 2019 Erik Maciejewski
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""system file creation rules"""
NOBODY = 65534
NONROOT = 65532
def _pad(max_len, text, extra = " "):
pad_spcs = max_len - len(text)
if pad_spcs <= 0:
return extra
return "".join([" " for i in range(0, pad_spcs)]) + extra
def _nsswitch_conf_file_impl(ctx):
doc = """# /etc/nsswitch.conf
#
# generated by bazel
#
# Example configuration of GNU Name Service Switch functionality.
# If you have the `glibc-doc-reference' and `info' packages installed, try:
# `info libc "Name Service Switch"' for information about this file.
\n"""
max_len = max([len(key) for key in ctx.attr.entries.keys()])
data = doc + "".join(["%s:%s%s\n" % (
entry[0], # db name
_pad(max_len, entry[0]),
entry[1], # db service spec
) for entry in ctx.attr.entries.items()])
nsswitch_file = ctx.actions.declare_file(ctx.attr.name)
ctx.actions.write(output = nsswitch_file, content = data)
return DefaultInfo(files = depset([nsswitch_file]))
nsswitch_conf_file = rule(
attrs = {
"entries": attr.string_dict(
allow_empty = False,
),
},
executable = False,
implementation = _nsswitch_conf_file_impl,
)
def _os_release_file_impl(ctx):
data = """PRETTY_NAME=\"%s\"
NAME=\"%s\"
VERSION_ID=\"%s\"
VERSION=\"%s\"
VERSION_CODENAME=%s
ID=%s
HOME_URL=\"%s\"
SUPPORT_URL=\"%s\"
BUG_REPORT_URL=\"%s\"\n""" % (
ctx.attr.pretty_name,
ctx.attr.release_name,
ctx.attr.version_id,
ctx.attr.version,
ctx.attr.version_codename,
ctx.attr.id,
ctx.attr.home_url,
ctx.attr.support_url,
ctx.attr.bug_report_url,
)
release_file = ctx.actions.declare_file(ctx.attr.name)
ctx.actions.write(output = release_file, content = data)
return DefaultInfo(files = depset([release_file]))
os_release_file = rule(
attrs = {
"pretty_name": attr.string(mandatory = True),
"release_name": attr.string(mandatory = True),
"version_id": attr.string(mandatory = True),
"version": attr.string(mandatory = True),
"version_codename": attr.string(mandatory = True),
"id": attr.string(mandatory = True),
"home_url": attr.string(default = ""),
"support_url": attr.string(default = ""),
"bug_report_url": attr.string(default = ""),
},
implementation = _os_release_file_impl,
)
|
bitcoin = int(input())
yuans = float(input())
commission = float(input()) / 100
bitcoin_lv = bitcoin * 1168
yuans_dollars = yuans * (0.15 * 1.76)
sum_lv = bitcoin_lv + yuans_dollars
sum_eur = sum_lv / 1.95
sum_eur = round(sum_eur - (commission * sum_eur), 2)
print(sum_eur)
|
"""The simplest data reader."""
for line in open('ice-cream.csv'):
row = line.split(',')
print(row)
|
description = 'Neutron Grating Interferometer'
group = 'optional'
tango_base = 'tango://antareshw.antares.frm2.tum.de:10000/antares/'
devices = dict(
G0rz = device('nicos.devices.entangle.Motor',
speed = 1,
unit = 'deg',
description = 'Rotation of G0 grating around beam direction',
tangodevice = tango_base + 'fzjs7/G0rz',
abslimits = (-400, 400),
maxage = 5,
pollinterval = 3,
precision = 0.01,
),
G0ry = device('nicos.devices.entangle.Motor',
speed = 1,
unit = 'deg',
description = 'Rotation of G0 grating around vertical axis',
tangodevice = tango_base + 'fzjs7/G0ry',
abslimits = (-1, 400),
maxage = 5,
pollinterval = 3,
precision = 0.01,
),
G0tz = device('nicos.devices.entangle.Motor',
speed = 0.5,
unit = 'mm',
description = 'Stepping of G0 perpendicular to the beam direction',
tangodevice = tango_base + 'fzjs7/G0tx',
abslimits = (-2, 25),
maxage = 5,
pollinterval = 3,
precision = 0.01,
),
G1tx = device('nicos.devices.entangle.Motor',
speed = 50,
unit = 'mum',
description = 'Stepping of G1 perpendicular to the beam direction',
tangodevice = tango_base + 'copley/m09',
abslimits = (0, 25000),
userlimits = (0, 25000),
maxage = 5,
pollinterval = 3,
precision = 0.1,
),
G1tz = device('nicos.devices.entangle.Motor',
speed = 5,
unit = 'mm',
description = 'Translation of G1 parallel to the beam direction',
tangodevice = tango_base + 'copley/m12',
abslimits = (-1, 101),
userlimits = (0, 100),
maxage = 5,
pollinterval = 3,
precision = 0.001,
),
G1rz = device('nicos.devices.entangle.Motor',
speed = 5,
unit = 'deg',
description = 'Rotation of G1 around the beam axis',
tangodevice = tango_base + 'copley/m10',
abslimits = (-400, 400),
userlimits = (-400, 400),
maxage = 5,
pollinterval = 3,
precision = 0.001,
),
G1ry = device('nicos.devices.entangle.Motor',
speed = 5,
unit = 'deg',
description = 'Rotation of G1 around the y-axis',
tangodevice = tango_base + 'copley/m14',
abslimits = (-400,400),
userlimits = (-400,400),
maxage = 5,
pollinterval = 3,
precision = 0.001,
),
G1gx = device('nicos.devices.entangle.Motor',
speed = 5,
unit = 'deg',
description = 'Rotation of G1 around the x-axis',
tangodevice = tango_base + 'copley/m15',
abslimits = (-20, 20),
userlimits = (-20, 20),
maxage = 5,
pollinterval = 3,
precision = 0.001,
),
G2rz_p = device('nicos.devices.entangle.Motor',
speed = 0.2,
unit = 'deg',
description = 'Rotation of G1 grating around beam direction',
tangodevice = tango_base + 'fzjs7/G1rz',
abslimits = (-400, 400),
maxage = 5,
pollinterval = 3,
precision = 0.0005,
),
G2tz = device('nicos.devices.entangle.Motor',
speed = 1,
unit = 'mm',
description = 'Translation of G1 in beam direction. (Talbot distance)',
tangodevice = tango_base + 'fzjs7/G1tz',
abslimits = (0, 20),
maxage = 5,
pollinterval = 3,
precision = 0.05,
),
G2rz = device('nicos.devices.entangle.Motor',
speed = 1,
unit = 'deg',
description = 'Rotation of G2 and G1 around beam axis',
tangodevice = tango_base + 'fzjs7/G12rz',
abslimits = (-400, 400),
userlimits = (-250, 250),
maxage = 5,
pollinterval = 3,
precision = 0.01,
),
)
|
e = 2.718281828459045
"""
IEEE 754 floating-point representation of Euler's constant.
``e = 2.71828182845904523536028747135266249775724709369995...``
"""
inf = float('inf')
"""
IEEE 754 floating-point representation of (positive) infinity.
"""
nan = float('nan')
"""
IEEE 754 floating-point representation of Not a Number (``NaN``).
"""
pi = 3.141592653589793
"""
IEEE 754 floating-point representation of the mathematical constant ``π``.
``pi = 3.1415926535897932384626433...``
"""
__all__ = ['e', 'inf', 'nan', 'pi']
|
# -*- coding: utf-8 -*-
# coding: utf8
@auth.requires_membership('admin')
def index():
return locals()
@auth.requires_membership('admin')
def products():
products_grid = SQLFORM.grid(db.product, csv=False)
return locals()
@auth.requires_membership('admin')
def product_categories():
categories_grid = SQLFORM.grid(db.category, csv=False)
return locals()
@auth.requires_membership('admin')
def orders():
return locals()
@auth.requires_membership('admin')
def store_users():
users_grid = SQLFORM.grid(db.auth_user, csv=False)
return locals()
@auth.requires_membership('admin')
def user_groups():
groups_grid = SQLFORM.grid(db.auth_membership, csv=False)
return locals()
@auth.requires_membership('admin')
def suppliers():
suppliers_grid = SQLFORM.grid(db.supplier, csv=False)
return locals()
@auth.requires_membership('admin')
def carriers():
carriers_grid = SQLFORM.grid(db.carrier, csv=False)
return locals()
@auth.requires_membership('admin')
def carriers_tax():
carriers_tax_grid = SQLFORM.grid(db.carrier_tax, csv=False)
return locals()
@auth.requires_membership('admin')
def invoices():
return locals()
@auth.requires_membership('admin')
def merchandise_returns():
return locals()
@auth.requires_membership('admin')
def statuses():
return locals()
@auth.requires_membership('admin')
def order_messages():
return locals()
@auth.requires_membership('admin')
def costumers():
costumer_grid = db(db.auth_user)
return locals()
@auth.requires_membership('admin')
def costumer_groups():
return locals()
@auth.requires_membership('admin')
def shopping_carts():
return locals()
@cache.action()
def download():
"""
allows downloading of uploaded files
http://..../[app]/default/download/[filename]
"""
return response.download(request, db) |
num = int(input("Insert some numbers: "))
even = 0
odd = 0
while num > 0:
if num%2 == 0:
even += 1
else:
odd += 1
num = num//10
print("Even numbers = %d, Odd numbers = %d" % (even,odd))
|
"""A board is a list of list of str. For example, the board
ANTT
XSOB
is represented as the list
[['A', 'N', 'T', 'T'], ['X', 'S', 'O', 'B']]
A word list is a list of str. For example, the list of words
ANT
BOX
SOB
TO
is represented as the list
['ANT', 'BOX', 'SOB', 'TO']
"""
def is_valid_word(wordlist, word):
""" (list of str, str) -> bool
Return True if and only if word is an element of wordlist.
>>> is_valid_word(['ANT', 'BOX', 'SOB', 'TO'], 'TO')
True
>>>is_valid_word(['ANT', 'BOX', 'SOB', 'TO'], '')
False
"""
i = 0
while i < len(wordlist):
if word in wordlist:
i = i +1
return True
else:
return False
def make_str_from_row(board, row_index):
""" (list of list of str, int) -> str
Return the characters from the row of the board with index row_index
as a single string.
>>> make_str_from_row([['A', 'N', 'T', 'T'], ['X', 'S', 'O', 'B']], 0)
'ANTT'
"""
word = ''
i= 0
while i < len(board[row_index]):
word = word + board[row_index][i]
i = i + 1
return word
def make_str_from_column(board, column_index):
""" (list of list of str, int) -> str
Return the characters from the column of the board with index column_index
as a single string.
>>> make_str_from_column([['A', 'N', 'T', 'T'], ['X', 'S', 'O', 'B']], 1)
'NS'
"""
#first index change +1
#second index stays and its column_index
word = ''
for i in range(len(board)):
word = word + board[i][column_index]
return word
def board_contains_word_in_row(board, word):
""" (list of list of str, str) -> bool
Return True if and only if one or more of the rows of the board contains
word.
Precondition: board has at least one row and one column, and word is a
valid word.
>>> board_contains_word_in_row([['A', 'N', 'T', 'T'], ['X', 'S', 'O', 'B']], 'SOB')
True
"""
for row_index in range(len(board)):
if word in make_str_from_row(board, row_index):
return True
return False
def board_contains_word_in_column(board, word):
""" (list of list of str, str) -> bool
Return True if and only if one or more of the columns of the board
contains word.
Precondition: board has at least one row and one column, and word is a
valid word.
>>> board_contains_word_in_column([['A', 'N', 'T', 'T'], ['X', 'S', 'O', 'B']], 'NO')
False
"""
y = board[0]
for column_index in range(len(y)):
if word in make_str_from_column(board, column_index):
return True
return False
def board_contains_word(board, word):
""" (list of list of str, str) -> bool
Return True if and only if word appears in board.
Precondition: board has at least one row and one column.
>>> board_contains_word([['A', 'N', 'T', 'T'], ['X', 'S', 'O', 'B']], 'ANT')
True
>>>board_contains_word([['A', 'N', 'T', 'T'], ['X', 'S', 'O', 'B']], 'AX')
True
"""
a = board_contains_word_in_column(board, word)
b = board_contains_word_in_row(board, word)
if a or b:
return True
return False
def word_score(word):
""" (str) -> int
Return the point value the word earns.
Word length: < 3: 0 points
3-6: 1 point per character for all characters in word
7-9: 2 points per character for all characters in word
10+: 3 points per character for all characters in word
>>> word_score('DRUDGERY')
16
"""
if board_contains_word(board,word):
length = len(word)
if length < 3:
return 0
elif length >= 3 and length <=6:
score = 1 * len(word)
return score
elif length >=7 and length <= 9:
score = 2 * len(word)
return score
elif length >=10:
score = 3 * len(word)
return score
def update_score(player_info, word):
""" ([str, int] list, str) -> NoneType
player_info is a list with the player's name and score. Update player_info
by adding the point value word earns to the player's score.
>>> update_score(['Jonathan', 4], 'ANT')
"""
def num_words_on_board(board, words):
""" (list of list of str, list of str) -> int
Return how many words appear on board.
>>> num_words_on_board([['A', 'N', 'T', 'T'], ['X', 'S', 'O', 'B']], ['ANT', 'BOX', 'SOB', 'TO'])
3
"""
def read_words(words_file):
""" (file open for reading) -> list of str
Return a list of all words (with newlines removed) from open file
words_file.
Precondition: Each line of the file contains a word in uppercase characters
from the standard English alphabet.
"""
def read_board(board_file):
""" (file open for reading) -> list of list of str
Return a board read from open file board_file. The board file will contain
one row of the board per line. Newlines are not included in the board.
"""
|
# e é uma tupla (A,B) representando os coeficientes da RETA A*x + B
def solve1(A, B, x):
return A*x + B
# e é uma tupla (A,B,C) representando os coeficientes da PARABOLA A*x**2 + B*x + C
def solve2(A, B, C, x):
return A*x**2 + B*x + C |
def foo(x = []):
return x.append("x")
def bar(x = []):
return len(x)
foo()
bar()
class Owner(object):
@classmethod
def cm(cls, arg):
return cls
@classmethod
def cm2(cls, arg):
return arg
#Normal method
def m(self):
a = self.cm(0)
return a.cm2(1)
|
#sequence cleaner removes sequences that are ambiguous (6-mer appending the poly sequence is indefinite ("N") and shifts all "N" characters
#in poly sequence right so that they can be combined
def sequenceCleaner(string):
if len(string) < 13:
return "", 0
if string[5] == "*":
return "", 0
if string[len(string)-6] == "*":
return "", 0
if not "*" in string[6:len(string)-6]:
return string[6:len(string)-6], len(string)-12
else:
return rightShiftN(string[6:len(string)-6])
def rightShiftN(string):
output = ""
indefinite = ""
total = 0.0
for i in string:
if i == "*":
indefinite = indefinite + i
total += 0.51
else:
output = output + i
total += 1.0
output = output + indefinite
return output, total |
# Copyright 2015 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load(
"@io_bazel_rules_scala//scala:scala_maven_import_external.bzl",
_scala_maven_import_external = "scala_maven_import_external",
)
"""Helper functions for Scala cross-version support. Encapsulates the logic
of abstracting over Scala major version (2.11, 2.12, etc) for dependency
resolution."""
def default_scala_version():
"""return the scala version for use in maven coordinates"""
return "2.12.10" #TODO
def default_scala_version_jar_shas():
return {
"scala_compiler": "cedc3b9c39d215a9a3ffc0cc75a1d784b51e9edc7f13051a1b4ad5ae22cfbc0c",
"scala_library": "0a57044d10895f8d3dd66ad4286891f607169d948845ac51e17b4c1cf0ab569d",
"scala_reflect": "56b609e1bab9144fb51525bfa01ccd72028154fc40a58685a1e9adcbe7835730"
}
def extract_major_version(scala_version):
"""Return major Scala version given a full version, e.g. "2.11.11" -> "2.11" """
return scala_version[:scala_version.find(".", 2)]
def extract_major_version_underscore(scala_version):
"""Return major Scala version with underscore given a full version,
e.g. "2.11.11" -> "2_11" """
return extract_major_version(scala_version).replace(".", "_")
def default_scala_major_version():
return extract_major_version(default_scala_version())
def scala_mvn_artifact(
artifact,
major_scala_version = default_scala_major_version()):
"""Add scala version to maven artifact"""
gav = artifact.split(":")
groupid = gav[0]
artifactid = gav[1]
version = gav[2]
return "%s:%s_%s:%s" % (groupid, artifactid, major_scala_version, version)
def new_scala_default_repository(
scala_version,
scala_version_jar_shas,
maven_servers):
_scala_maven_import_external(
name = "io_bazel_rules_scala_scala_library",
artifact = "org.scala-lang:scala-library:{}".format(scala_version),
artifact_sha256 = scala_version_jar_shas["scala_library"],
licenses = ["notice"],
server_urls = maven_servers,
)
_scala_maven_import_external(
name = "io_bazel_rules_scala_scala_compiler",
artifact = "org.scala-lang:scala-compiler:{}".format(scala_version),
artifact_sha256 = scala_version_jar_shas["scala_compiler"],
licenses = ["notice"],
server_urls = maven_servers,
)
_scala_maven_import_external(
name = "io_bazel_rules_scala_scala_reflect",
artifact = "org.scala-lang:scala-reflect:{}".format(scala_version),
artifact_sha256 = scala_version_jar_shas["scala_reflect"],
licenses = ["notice"],
server_urls = maven_servers,
)
|
def fibonaci(n):
if n <= 1:
return n
else:
return fibonaci(n-1)+fibonaci(n-2)
fibonaci(0) |
class Position:
def __init__(self, idx, ln, col, fn, ftxt) -> None:
self.idx = idx
self.ln = ln
self.col = col
self.fn = fn
self.ftxt = ftxt
def advance(self, current_char=None):
self.idx += 1
self.col += 1
if current_char == "\n":
self.ln += 1
self.col = 0
return self
def copy(self):
return Position(self.idx, self.ln, self.col, self.fn, self.ftxt)
|
def on_config():
# Here you can do all you want.
print("Called.")
def on_config_with_config(config):
print("Called with config.")
print(config["docs_dir"])
# You can change config, for example:
# config['docs_dir'] = 'other_directory'
# Optionally, you can return altered config to customize MkDocs.
# return config
def on_config_with_mkapi(config, mkapi):
print("Called with config and mkapi.")
print(config["docs_dir"])
print(mkapi)
|
lista = [12, 10, 7, 5]
lista_animal = ['cachorro', 'gato', 'elefante', 'lobo', 'arara']
lista_animal[0] = 'macaco' # a lista pode variar de valor, porém, a tupla não!!!!
print(lista_animal)
tupla = (1, 10, 12, 14)
print(len(tupla))
print(len(lista_animal))
tupla_animal = tuple(lista_animal)
print(type(tupla_animal))
print(tupla_animal)
lista_numerica = list(tupla) #converte a tupla para lista
print(type(lista_numerica))
print(lista_numerica)
lista_numerica[0] = 100
print(lista_numerica)
'''print(lista_animal[1])
nova_lista = lista_animal * 3
print(nova_lista)
lista.sort() #ordena por ordem alfabética.
lista_animal.sort()
print(lista)
print(lista_animal)
lista_animal.reverse() #ordena do final para o começo.
print(lista_animal)
if 'lobo' in lista_animal: # para procurar algo na lista.
print('Existe um lobo na lista!')
else:
print('Não existe um lobo na lista!')
lista_animal.append('lobo') # o .append inclui algo.
print(lista_animal)
lista_animal.pop() # o .pop retira sempre a última inclusão, aqui será o lobo.
print(lista_animal)
lista_animal.pop(0) # o .pop retira pela posição também, no 0 será o cachorro.
print(lista_animal)
lista_animal.remove('elefante') # se quiser remover um elemento que já conheço uso o .remove
print(lista_animal)'''
#print(max(lista_animal)) maior valor
#print(sum(lista)) #soma
'''soma = 0
for x in lista_animal:
print(x)
soma += x
print(soma)''' |
##########################################################################
# Copyright (c) 2018-2019 NVIDIA Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# File: DL4AGX/tools/nvcc/private/constants.bzl
# Description: Constants for use in creating cuda rules
##########################################################################
CUDA_ACCEPTABLE_SRC_EXTENSIONS = [".cu", ".c", ".cc", ".cxx", ".cpp"]
CUDA_ACCEPTABLE_HDR_EXTENSIONS = [".h", ".cuh", ".hpp", ".inl"]
CUDA_ACCEPTABLE_BIN_EXTENSIONS = [".ptx", ".cubin", ".fatbin",
".o", ".obj", ".a", ".lib",
".res", ".so"]
CUDA_ACCEPTABLE_EXTENSIONS = CUDA_ACCEPTABLE_SRC_EXTENSIONS + CUDA_ACCEPTABLE_BIN_EXTENSIONS + CUDA_ACCEPTABLE_HDR_EXTENSIONS |
class Car(object):
condition = "new"
def __init__(self, model, color, mpg):
self.model = model
self.color = color
self.mpg = mpg
my_car = Car("Chevv", "GOLDEN", 1933)
print(my_car.model)
print(my_car.color)
print(my_car.mpg)
|
"""
Entradas
capital prestado-->float-->a
tiempo de interes-->int-->b
interes-->float-->c
Salidas
porcentaje de interes anual-->float-->d
"""
a=float(input("cual es la cantidad dde dinero prestado "))
b=int(input("cuantos años de interes tiene el prestamo "))
c=float(input("cual es la cantidad total del capital abonado "))
d=(c*100)/(b*a)
print("El procentaje de interes anual es el siguiente "+str(d)+" %") |
def validate(name, bracket, bracket_side, bfr):
"""
Check if bracket is lowercase
"""
return bfr[bracket.begin:bracket.end].islower()
|
"""
画出下列代码内存图
找出打印结果
"""
g01 = 100
g02 = 100
g03 = [100]
def func01():
g01 = 200# 创建一个局部变量
def func02():
global g02
g02 = 200
def func03():
g03[0] = 200 # 读取全局变量,修改列表元素
func01()
print(g01) # 100
func02()
print(g02) # 200
func03()
print(g03) # [200]
class MyClass:
cls01 = 300 # 饮水机
def __init__(self):
self.ins01 = 400 # 杯子
self.ins01 += 1
MyClass.cls01 += 1
instance01 = MyClass()# 400->401 300 -> 301
print(instance01.ins01) # 401
print(MyClass.cls01) # 301
instance02 = MyClass()# 400->401 301 -> 302
print(instance02.ins01) # 401
print(MyClass.cls01) # 302
|
def add(a, b) :
s = a + b
return s
#main app begins here
x = 2
y = 3
z = add(x, y)
print('Sum : ', z) |
print("--- TRIANGULO ---")
a = int(input("Digite o 1o. valor: "))
b = int(input("Digite o 2o. valor: "))
c = int(input("Digite o 3o. valor: "))
if a < b + c and b < a + c and c < a + b:
print("Esses valores podem formar um triângulo", end="")
if a == b == c :
print (" e esse triangulo é equilátero.")
elif a != b != c != a:
print(" e esse triângulo é escaleno.")
else:
print (" e esse triângulo é isósceles.")
else:
print("Esses valores não podem formar um triângulo")
print("Obrigada por participar") |
#service.process.factory.proc_provider_factories
class ProcProviderFactories(object):
def __init__(self, log):
self._log = log
self._factory = None
def get_factory(self, process):
#should instantiate the teradata factory
if self._factory is None:
tmp = __import__('service.process.factory.'+process.name.lower(), fromlist=[process.name+'Factory'])
self._factory = getattr(tmp, process.name+'Factory')(self._log)
return self._factory |
# Time: ls: O(l + klogk), l is the path length, k is the number of entries in the last level directory
# mkdir: O(l)
# addContentToFile: O(l + c), c is the content size
# readContentFromFile: O(l + c)
# Space: O(n + s), n is the number of dir/file nodes, s is the total content size.
# Design an in-memory file system to simulate the following functions:
#
# ls: Given a path in string format. If it is a file path,
# return a list that only contains this file's name.
# If it is a directory path, return the list of file and directory names in this directory.
# Your output (file and directory names together) should in lexicographic order.
#
# mkdir: Given a directory path that does not exist,
# you should make a new directory according to the path.
# If the middle directories in the path don't exist either,
# you should create them as well. This function has void return type.
#
# addContentToFile: Given a file path and file content in string format.
# If the file doesn't exist, you need to create that file containing given content.
# If the file already exists, you need to append given content to original content.
# This function has void return type.
#
# readContentFromFile: Given a file path, return its content in string format.
#
# Example:
# Input:
# ["FileSystem","ls","mkdir","addContentToFile","ls","readContentFromFile"]
# [[],["/"],["/a/b/c"],["/a/b/c/d","hello"],["/"],["/a/b/c/d"]]
# Output:
# [null,[],null,null,["a"],"hello"]
#
# Note:
# 1. You can assume all file or directory paths are absolute paths
# which begin with / and do not end with / except that the path is just "/".
# 2. You can assume that all operations will be passed valid parameters and
# users will not attempt to retrieve file content or list a directory or file that does not exist.
# 3. You can assume that all directory names and file names only contain lower-case letters,
# and same names won't exist in the same directory.
class TrieNode(object):
def __init__(self):
self.is_file = False
self.children = {}
self.content = ""
class FileSystem(object):
def __init__(self):
self.__root = TrieNode()
def ls(self, path):
"""
:type path: str
:rtype: List[str]
"""
curr = self.__getNode(path)
if curr.is_file:
return [self.__split(path, '/')[-1]]
return sorted(curr.children.keys())
def mkdir(self, path):
"""
:type path: str
:rtype: void
"""
curr = self.__putNode(path)
curr.is_file = False
def addContentToFile(self, filePath, content):
"""
:type filePath: str
:type content: str
:rtype: void
"""
curr = self.__putNode(filePath)
curr.is_file = True
curr.content += content
def readContentFromFile(self, filePath):
"""
:type filePath: str
:rtype: str
"""
return self.__getNode(filePath).content
def __getNode(self, path):
curr = self.__root
for s in self.__split(path, '/'):
curr = curr.children[s]
return curr
def __putNode(self, path):
curr = self.__root
for s in self.__split(path, '/'):
if s not in curr.children:
curr.children[s] = TrieNode()
curr = curr.children[s]
return curr
def __split(self, path, delim):
if path == '/':
return []
return path.split('/')[1:]
# Your FileSystem object will be instantiated and called as such:
# obj = FileSystem()
# param_1 = obj.ls(path)
# obj.mkdir(path)
# obj.addContentToFile(filePath,content)
# param_4 = obj.readContentFromFile(filePath)
|
def get_input():
file = open('inputs/bubble_sort.txt')
input = file.read()
file.close()
return input
def bubble_sort(a, n):
swap_count = 0
is_sorted = False
while not is_sorted:
is_sorted = True
for i in range(n-1):
if a[i] > a[i + 1]:
temp = a[i]
a[i] = a[i + 1]
a[i + 1] = temp
is_sorted = False
return a, swap_count
n, a = get_input().split('\n')
n = int(n)
a = list(map(int, a.split(' ')))
a, swap_count = bubble_sort(a, n)
print('Array is sorted in {} swaps.'.format(swap_count))
print('First Element: {}'.format(a[0]))
print('Last Element: {}'.format(a[n-1])) |
PROJECT_ID_LIST_URL = "https://cloudresourcemanager.googleapis.com/v1/projects"
HTTP_GET_METHOD = "GET"
class UtilBase(object):
def __init__(self, config):
self.config = config
self.__projectList = None
def getProjectList(self):
if self.__projectList != None:
return self.__projectList
projectData = None
if self.config.getIdType() == "PROJECT":
googleClient = self.config.getHttpClient()
singleprojectData = googleClient.make_request( HTTP_GET_METHOD, PROJECT_ID_LIST_URL + "/" + self.config.getId(), None, None)
if singleprojectData['isError']:
projectData = singleprojectData
else:
projectData= {
'isError': singleprojectData.get("isError"),
'defaultErrorObject':singleprojectData.get("defaultErrorObject"),
"data": {
'projects': [singleprojectData.get("data")]
}
}
if singleprojectData["data"].get('lifecycleState') != "ACTIVE":
raise Exception("Project Lifecycle state is: " + str(projectData['lifecycleState']))
else:
projectData = {
'isError': False,
'defaultErrorObject': {},
"data": {
'projects': self.__get_Project_List(None, True)
}
}
if projectData['isError']:
raise Exception("Error fetching projects")
projectList = projectData['data']['projects']
projectList = [project for project in projectList if project['lifecycleState'] == "ACTIVE"]
self.__projectList = projectList
return self.__projectList
def __get_Project_List(self, pageToken, isBegin):
if pageToken is None and not isBegin:
return []
googleClient = self.config.getHttpClient()
url = PROJECT_ID_LIST_URL
if pageToken is not None:
url = url + "?pageToken=" + pageToken + "&pageSize=100"
else:
url = url + "?pageSize=100"
projectData = googleClient.make_request(HTTP_GET_METHOD, url, None, None)
if projectData['isError']:
raise Exception("Error fetching Project Information \n" + str(projectData['defaultErrorObject']))
if 'projects' not in projectData['data'] or len(projectData['data']['projects']) == 0:
raise Exception("No Projects Found")
projectList = projectData['data']['projects']
if 'nextPageToken' in projectData['data']:
nextPageToken = str(projectData['data']['nextPageToken'])
projectList = projectList + (self.__get_Project_List(nextPageToken, False))
return projectList
def validateProjectId(self, projectId):
projectList = self.getProjectList()
for project in projectList:
if project['projectId'] == projectId:
return True
return False
|
NAME='logzmq'
CFLAGS = []
LDFLAGS = []
LIBS = ['-lzmq']
GCC_LIST = ['plugin']
|
test = {
'name': 'Problem EC',
'points': 2,
'suites': [
{
'cases': [
{
'code': r"""
>>> # Testing status parameters
>>> slow = SlowThrower()
>>> scary = ScaryThrower()
>>> SlowThrower.food_cost
4
>>> ScaryThrower.food_cost
6
>>> slow.armor
1
>>> scary.armor
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> # Testing Slow
>>> slow = SlowThrower()
>>> bee = Bee(3)
>>> gamestate.places["tunnel_0_0"].add_insect(slow)
>>> gamestate.places["tunnel_0_4"].add_insect(bee)
>>> slow.action(gamestate)
>>> gamestate.time = 1
>>> bee.action(gamestate)
>>> bee.place.name # SlowThrower should cause slowness on odd turns
'tunnel_0_4'
>>> gamestate.time += 1
>>> bee.action(gamestate)
>>> bee.place.name # SlowThrower should cause slowness on odd turns
'tunnel_0_3'
>>> for _ in range(3):
... gamestate.time += 1
... bee.action(gamestate)
>>> bee.place.name
'tunnel_0_1'
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> # Testing Scare
>>> scary = ScaryThrower()
>>> bee = Bee(3)
>>> gamestate.places["tunnel_0_0"].add_insect(scary)
>>> gamestate.places["tunnel_0_4"].add_insect(bee)
>>> scary.action(gamestate)
>>> bee.action(gamestate)
>>> bee.place.name # ScaryThrower should scare for two turns
'tunnel_0_5'
>>> bee.action(gamestate)
>>> bee.place.name # ScaryThrower should scare for two turns
'tunnel_0_6'
>>> bee.action(gamestate)
>>> bee.place.name
'tunnel_0_5'
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> # Scary stings an ant
>>> scary = ScaryThrower()
>>> harvester = HarvesterAnt()
>>> bee = Bee(3)
>>> gamestate.places["tunnel_0_0"].add_insect(scary)
>>> gamestate.places["tunnel_0_4"].add_insect(bee)
>>> gamestate.places["tunnel_0_5"].add_insect(harvester)
>>> scary.action(gamestate)
>>> bee.action(gamestate)
>>> bee.place.name # ScaryThrower should scare for two turns
'tunnel_0_5'
>>> harvester.armor
1
>>> bee.action(gamestate)
>>> harvester.armor
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> # Testing if statuses stack
>>> slow = SlowThrower()
>>> bee = Bee(3)
>>> slow_place = gamestate.places["tunnel_0_0"]
>>> bee_place = gamestate.places["tunnel_0_8"]
>>> slow_place.add_insect(slow)
>>> bee_place.add_insect(bee)
>>> slow.action(gamestate) # slow bee two times
>>> slow.action(gamestate)
>>> gamestate.time = 1
>>> bee.action(gamestate) # do nothing. The outer slow has 2 turns to go, the inner one still has 3 turns
>>> bee.place.name
'tunnel_0_8'
>>> gamestate.time = 2
>>> bee.action(gamestate) # moves forward. The outer slow has 1 turn to go, the inner one has 2 turns
>>> bee.place.name
'tunnel_0_7'
>>> gamestate.time = 3
>>> bee.action(gamestate) # do nothing. The outer slow has no turns left, the inner one has 2 turns
>>> bee.place.name
'tunnel_0_7'
>>> gamestate.time = 4
>>> bee.action(gamestate) # moves forward. The inner slow has 1 turn
>>> bee.place.name
'tunnel_0_6'
>>> gamestate.time = 5
>>> bee.action(gamestate) # does nothing. The inner slow has no turns
>>> bee.place.name
'tunnel_0_6'
>>> gamestate.time = 6 # slow status have worn off
>>> bee.action(gamestate)
>>> bee.place.name
'tunnel_0_5'
>>> gamestate.time = 7 # slow status have worn off
>>> bee.action(gamestate)
>>> bee.place.name
'tunnel_0_4'
>>> gamestate.time = 8 # slow status have worn off
>>> bee.action(gamestate)
>>> bee.place.name
'tunnel_0_3'
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> # Testing multiple scared bees
>>> scare1 = ScaryThrower()
>>> scare2 = ScaryThrower()
>>> bee1 = Bee(3)
>>> bee2 = Bee(3)
>>> gamestate.places["tunnel_0_0"].add_insect(scare1)
>>> gamestate.places["tunnel_0_1"].add_insect(bee1)
>>> gamestate.places["tunnel_0_4"].add_insect(scare2)
>>> gamestate.places["tunnel_0_5"].add_insect(bee2)
>>> scare1.action(gamestate)
>>> scare2.action(gamestate)
>>> bee1.action(gamestate)
>>> bee2.action(gamestate)
>>> bee1.place.name
'tunnel_0_2'
>>> bee2.place.name
'tunnel_0_6'
>>> bee1.action(gamestate)
>>> bee2.action(gamestate)
>>> bee1.place.name
'tunnel_0_3'
>>> bee2.place.name
'tunnel_0_7'
>>> bee1.action(gamestate)
>>> bee2.action(gamestate)
>>> bee1.place.name
'tunnel_0_2'
>>> bee2.place.name
'tunnel_0_6'
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> scare = ScaryThrower()
>>> bee = Bee(3)
>>> gamestate.places["tunnel_0_0"].add_insect(scare)
>>> gamestate.places["tunnel_0_1"].add_insect(bee)
>>> scare.action(gamestate)
>>> bee.action(gamestate)
>>> bee.place.name
'tunnel_0_2'
>>> bee.action(gamestate)
>>> bee.place.name
'tunnel_0_3'
>>> #
>>> # Same bee should not be scared more than once
>>> scare.action(gamestate)
>>> bee.action(gamestate)
>>> bee.place.name
'tunnel_0_2'
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> # Testing long status stack
>>> scary = ScaryThrower()
>>> slow = SlowThrower()
>>> bee = Bee(3)
>>> gamestate.places["tunnel_0_0"].add_insect(scary)
>>> gamestate.places["tunnel_0_1"].add_insect(slow)
>>> gamestate.places["tunnel_0_3"].add_insect(bee)
>>> scary.action(gamestate) # scare bee once
>>> gamestate.time = 0
>>> bee.action(gamestate) # scared
>>> bee.place.name
'tunnel_0_4'
>>> for _ in range(3): # slow bee three times
... slow.action(gamestate)
>>> gamestate.time = 1
>>> bee.action(gamestate) # scared, but also slowed thrice
>>> bee.place.name
'tunnel_0_4'
>>> gamestate.time = 2
>>> bee.action(gamestate) # scared and slowed thrice
>>> bee.place.name
'tunnel_0_5'
>>> gamestate.time = 3
>>> bee.action(gamestate) # slowed thrice
>>> bee.place.name
'tunnel_0_5'
>>> gamestate.time = 4
>>> bee.action(gamestate) # slowed twice
>>> bee.place.name
'tunnel_0_4'
>>> gamestate.time = 5
>>> bee.action(gamestate) # slowed twice
>>> bee.place.name
'tunnel_0_4'
>>> gamestate.time = 6
>>> bee.action(gamestate) # slowed once
>>> bee.place.name
'tunnel_0_3'
>>> gamestate.time = 7
>>> bee.action(gamestate) # statuses have worn off
>>> bee.place.name
'tunnel_0_2'
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> scary = ScaryThrower()
>>> slow = SlowThrower()
>>> bee = Bee(3)
>>> gamestate.places["tunnel_0_0"].add_insect(scary)
>>> gamestate.places["tunnel_0_1"].add_insect(slow)
>>> gamestate.places["tunnel_0_3"].add_insect(bee)
>>> slow.action(gamestate) # slow bee
>>> scary.action(gamestate) # scare bee
>>> bee.place.name
'tunnel_0_3'
>>> gamestate.time = 0
>>> bee.action(gamestate) # scared and slowed
>>> bee.place.name
'tunnel_0_4'
>>> gamestate.time = 1
>>> bee.action(gamestate) # scared and slowed
>>> bee.place.name
'tunnel_0_4'
>>> gamestate.time = 2
>>> bee.action(gamestate) # slowed
>>> bee.place.name
'tunnel_0_3'
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> from ants import *
>>> ScaryThrower.implemented
True
>>> SlowThrower.implemented
True
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': r"""
>>> from ants import *
>>> beehive, layout = Hive(AssaultPlan()), dry_layout
>>> dimensions = (1, 9)
>>> gamestate = GameState(None, beehive, ant_types(), layout, dimensions)
""",
'teardown': '',
'type': 'doctest'
}
]
}
|
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'chromium',
'chromium_android',
'depot_tools/bot_update',
'depot_tools/gclient',
]
def RunSteps(api):
api.gclient.set_config('chromium')
api.chromium.set_config('chromium')
update_step = api.bot_update.ensure_checkout()
api.chromium_android.upload_apks_for_bisect(
update_properties=update_step.json.output['properties'],
bucket='test-bucket',
path='test/%s/path')
def GenTests(api):
yield api.test('basic')
|
# class Tree:
# def __init__(self, val, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def solve(self, root):
if root:
root.left, root.right = self.solve(root.right), self.solve(root.left)
return root
|
products = ['bread','meat','egg','cheese']
file1 = open('products.txt','w')
for product in products:
file1.write(product+'\n')
file1.close()
file2= open('products.txt')
var = file2.readlines()
print(var)
|
def getMessage(status):
if status == 1:
print('status 1')
elif status == 2:
print('status 2')
elif status > 10:
print('status is more than 10')
else:
print('default message')
def isTrue(value):
if value:
print(str(value) + ' is true')
else:
print(str(value) + ' is false')
getMessage(1)
getMessage(2)
getMessage(3)
getMessage(4)
getMessage(20)
getMessage(-1)
isTrue(1)
isTrue(0)
isTrue(0.0)
isTrue(None) # Noneはfalse
isTrue(True)
isTrue(False)
isTrue('') # 空文字はfalse
isTrue("") # 空リテラルはfalse
isTrue([]) # 空配列はfalse
isTrue({}) # 空辞書はfalse
print('value1' == 'value2') # 同一ではないのでFalse
print('value1' != 'value2') # 同一ではないのでTrue
x = 'test'
print(x is None) # オブジェクトが同一ではないのでFalse
print(x is not None) # オブジェクトが同一ではないのでTrue
def getValue():
return 1
# python 3.8からのセイウチ演算子
# 代入して評価できる
# test = getValue()
# if test:
# のように分けなくても良い
if test := getValue():
print('セイウチ演算子:=')
|
class Layer(object):
def __init__(self):
self.prevlayer = None
self.nextlayer = None
def forepropagation(self):
pass
def backpropagation(self):
pass
def initialization(self):
pass
|
def collapse_sequences(message, collapse_char, collapsing = False):
if message == '':
return ''
# Approach 1:
prepend = message[0]
if prepend == collapse_char:
if collapsing:
prepend = ''
collapsing = True
else:
collapsing = False
return prepend + collapse_sequences(message[1:], collapse_char, collapsing)
# Approach 2:
# c = message[0]
# if c == collapse_char:
# if collapsing:
# return collapse_sequences(message[1:], collapse_char, True)
# else:
# return c + collapse_sequences(message[1:], collapse_char, True)
# return c + collapse_sequences(message[1:], collapse_char, False)
def main():
print(collapse_sequences("aabaaccaaaaaada", 'a'))
if __name__ == '__main__':
main() |
class Solution:
def threeSumClosest(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
minval=100000
nums.sort()
for i in range(len(nums)):
#if i>0 and num[i]==num[i-1]:
# continue
left=i+1
right=len(nums)-1
while left<right:
val=nums[i]+nums[left]+nums[right]
if abs(val-target)<minval:
minval=abs(val-target)
result=val
if val==target:
return target
if val<=target:
left+=1
else:
right-=1
return result
|
# Zombie Damage Skin
success = sm.addDamageSkin(2434661)
if success:
sm.chat("The Zombie Damage Skin has been added to your account's damage skin collection.")
|
#
# Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
{
'includes': [
'../build/features.gypi',
'../build/scripts/scripts.gypi',
'../build/win/precompile.gypi',
'blink_platform.gypi',
'heap/blink_heap.gypi',
],
'targets': [{
'target_name': 'blink_common',
'type': '<(component)',
'variables': { 'enable_wexit_time_destructors': 1 },
'dependencies': [
'../config.gyp:config',
'../wtf/wtf.gyp:wtf',
# FIXME: Can we remove the dependency on Skia?
'<(DEPTH)/skia/skia.gyp:skia',
],
'all_dependent_settings': {
'include_dirs': [
'..',
],
},
'export_dependent_settings': [
'<(DEPTH)/skia/skia.gyp:skia',
],
'defines': [
'BLINK_COMMON_IMPLEMENTATION=1',
'INSIDE_BLINK',
],
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/blink',
],
'sources': [
'exported/WebCString.cpp',
'exported/WebString.cpp',
'exported/WebCommon.cpp',
],
},
{
'target_name': 'blink_heap_asm_stubs',
'type': 'static_library',
# VS2010 does not correctly incrementally link obj files generated
# from asm files. This flag disables UseLibraryDependencyInputs to
# avoid this problem.
'msvs_2010_disable_uldi_when_referenced': 1,
'includes': [
'../../../yasm/yasm_compile.gypi',
],
'sources': [
'<@(platform_heap_asm_files)',
],
'variables': {
'more_yasm_flags': [],
'conditions': [
['OS == "mac"', {
'more_yasm_flags': [
# Necessary to ensure symbols end up with a _ prefix; added by
# yasm_compile.gypi for Windows, but not Mac.
'-DPREFIX',
],
}],
['OS == "win" and target_arch == "x64"', {
'more_yasm_flags': [
'-DX64WIN=1',
],
}],
['OS != "win" and target_arch == "x64"', {
'more_yasm_flags': [
'-DX64POSIX=1',
],
}],
['target_arch == "ia32"', {
'more_yasm_flags': [
'-DIA32=1',
],
}],
['target_arch == "arm"', {
'more_yasm_flags': [
'-DARM=1',
],
}],
],
'yasm_flags': [
'>@(more_yasm_flags)',
],
'yasm_output_path': '<(SHARED_INTERMEDIATE_DIR)/webcore/heap'
},
},
{
'target_name': 'blink_prerequisites',
'type': 'none',
'conditions': [
['OS=="mac"', {
'direct_dependent_settings': {
'defines': [
# Chromium's version of WebCore includes the following Objective-C
# classes. The system-provided WebCore framework may also provide
# these classes. Because of the nature of Objective-C binding
# (dynamically at runtime), it's possible for the
# Chromium-provided versions to interfere with the system-provided
# versions. This may happen when a system framework attempts to
# use core.framework, such as when converting an HTML-flavored
# string to an NSAttributedString. The solution is to force
# Objective-C class names that would conflict to use alternate
# names.
#
# This list will hopefully shrink but may also grow. Its
# performance is monitored by the "Check Objective-C Rename"
# postbuild step, and any suspicious-looking symbols not handled
# here or whitelisted in that step will cause a build failure.
#
# If this is unhandled, the console will receive log messages
# such as:
# com.google.Chrome[] objc[]: Class ScrollbarPrefsObserver is implemented in both .../Google Chrome.app/Contents/Versions/.../Google Chrome Helper.app/Contents/MacOS/../../../Google Chrome Framework.framework/Google Chrome Framework and /System/Library/Frameworks/WebKit.framework/Versions/A/Frameworks/WebCore.framework/Versions/A/WebCore. One of the two will be used. Which one is undefined.
'WebCascadeList=ChromiumWebCoreObjCWebCascadeList',
'WebScrollAnimationHelperDelegate=ChromiumWebCoreObjCWebScrollAnimationHelperDelegate',
'WebScrollbarPainterControllerDelegate=ChromiumWebCoreObjCWebScrollbarPainterControllerDelegate',
'WebScrollbarPainterDelegate=ChromiumWebCoreObjCWebScrollbarPainterDelegate',
'WebScrollbarPartAnimation=ChromiumWebCoreObjCWebScrollbarPartAnimation',
'WebCoreFlippedView=ChromiumWebCoreObjCWebCoreFlippedView',
'WebCoreTextFieldCell=ChromiumWebCoreObjCWebCoreTextFieldCell',
],
'postbuilds': [
{
# This step ensures that any Objective-C names that aren't
# redefined to be "safe" above will cause a build failure.
'postbuild_name': 'Check Objective-C Rename',
'variables': {
'class_whitelist_regex':
'ChromiumWebCoreObjC|TCMVisibleView|RTCMFlippedView|ScrollerStyleObserver|LayoutThemeNotificationObserver',
'category_whitelist_regex':
'WebCoreFocusRingDrawing|WebCoreTheme',
},
'action': [
'../build/scripts/check_objc_rename.sh',
'<(class_whitelist_regex)',
'<(category_whitelist_regex)',
],
},
],
},
}],
],
},
{
'target_name': 'blink_platform',
'type': '<(component)',
'dependencies': [
'../config.gyp:config',
'../wtf/wtf.gyp:wtf',
'blink_common',
'blink_heap_asm_stubs',
'blink_prerequisites',
'<(DEPTH)/gpu/gpu.gyp:gles2_c_lib',
'<(DEPTH)/skia/skia.gyp:skia',
'<(DEPTH)/third_party/icu/icu.gyp:icui18n',
'<(DEPTH)/third_party/icu/icu.gyp:icuuc',
'<(DEPTH)/third_party/libpng/libpng.gyp:libpng',
'<(DEPTH)/third_party/libwebp/libwebp.gyp:libwebp',
'<(DEPTH)/third_party/ots/ots.gyp:ots',
'<(DEPTH)/third_party/qcms/qcms.gyp:qcms',
'<(DEPTH)/url/url.gyp:url_lib',
'<(DEPTH)/v8/tools/gyp/v8.gyp:v8',
'platform_generated.gyp:make_platform_generated',
'<(DEPTH)/third_party/iccjpeg/iccjpeg.gyp:iccjpeg',
'<(libjpeg_gyp_path):libjpeg',
],
'export_dependent_settings': [
'<(DEPTH)/gpu/gpu.gyp:gles2_c_lib',
'<(DEPTH)/skia/skia.gyp:skia',
'<(DEPTH)/third_party/libpng/libpng.gyp:libpng',
'<(DEPTH)/third_party/libwebp/libwebp.gyp:libwebp',
'<(DEPTH)/third_party/ots/ots.gyp:ots',
'<(DEPTH)/third_party/qcms/qcms.gyp:qcms',
'<(DEPTH)/v8/tools/gyp/v8.gyp:v8',
'<(DEPTH)/url/url.gyp:url_lib',
'<(DEPTH)/third_party/iccjpeg/iccjpeg.gyp:iccjpeg',
'<(libjpeg_gyp_path):libjpeg',
],
'defines': [
'BLINK_PLATFORM_IMPLEMENTATION=1',
'INSIDE_BLINK',
],
'include_dirs': [
'<(angle_path)/include',
'<(SHARED_INTERMEDIATE_DIR)/blink',
],
'xcode_settings': {
# Some Mac-specific parts of WebKit won't compile without having this
# prefix header injected.
'GCC_PREFIX_HEADER': '<(DEPTH)/third_party/WebKit/Source/build/mac/Prefix.h',
},
'sources': [
'<@(platform_files)',
'<@(platform_heap_files)',
# Additional .cpp files from platform_generated.gyp:make_platform_generated actions.
'<(blink_platform_output_dir)/FontFamilyNames.cpp',
'<(blink_platform_output_dir)/RuntimeEnabledFeatures.cpp',
'<(blink_platform_output_dir)/RuntimeEnabledFeatures.h',
'<(blink_platform_output_dir)/ColorData.cpp',
],
'sources/': [
# Exclude all platform specific things, reinclude them below on a per-platform basis
# FIXME: Figure out how to store these patterns in a variable.
['exclude', '(cf|cg|mac|opentype|win)/'],
['exclude', '(?<!Chromium)(CF|CG|Mac|Win)\\.(cpp|mm?)$'],
# *NEON.cpp files need special compile options.
# They are moved to the webcore_0_neon target.
['exclude', 'graphics/cpu/arm/.*NEON\\.(cpp|h)'],
['exclude', 'graphics/cpu/arm/filters/.*NEON\\.(cpp|h)'],
],
# Disable c4267 warnings until we fix size_t to int truncations.
# Disable c4724 warnings which is generated in VS2012 due to improper
# compiler optimizations, see crbug.com/237063
'msvs_disabled_warnings': [ 4267, 4334, 4724 ],
'conditions': [
['OS=="linux" or OS=="android" or OS=="win"', {
'sources/': [
# Cherry-pick files excluded by the broader regular expressions above.
['include', 'fonts/opentype/OpenTypeTypes\\.h$'],
['include', 'fonts/opentype/OpenTypeVerticalData\\.(cpp|h)$'],
],
'dependencies': [
'<(DEPTH)/third_party/harfbuzz-ng/harfbuzz.gyp:harfbuzz-ng',
],
},
],
['OS=="linux" or OS=="android"', {
'sources/': [
['include', 'fonts/linux/FontPlatformDataLinux\\.cpp$'],
]
}, { # OS!="linux" and OS!="android"
'sources/': [
['exclude', 'fonts/linux/FontPlatformDataLinux\\.cpp$'],
]
}],
['OS=="mac"', {
'dependencies': [
'<(DEPTH)/third_party/harfbuzz-ng/harfbuzz.gyp:harfbuzz-ng',
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/Accelerate.framework',
'$(SDKROOT)/System/Library/Frameworks/Carbon.framework',
'$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
]
},
'sources/': [
# We use LocaleMac.mm instead of LocaleICU.cpp
['exclude', 'text/LocaleICU\\.(cpp|h)$'],
['include', 'text/LocaleMac\\.mm$'],
# The Mac uses mac/KillRingMac.mm instead of the dummy
# implementation.
['exclude', 'KillRingNone\\.cpp$'],
# The Mac build is USE(CF).
['include', 'CF\\.cpp$'],
# Use native Mac font code from core.
['include', '(fonts/)?mac/[^/]*Font[^/]*\\.(cpp|mm?)$'],
# TODO(dro): Merge the opentype vertical data files inclusion across all platforms.
['include', 'fonts/opentype/OpenTypeTypes\\.h$'],
['include', 'fonts/opentype/OpenTypeVerticalData\\.(cpp|h)$'],
# Cherry-pick some files that can't be included by broader regexps.
# Some of these are used instead of Chromium platform files, see
# the specific exclusions in the "exclude" list below.
['include', 'audio/mac/FFTFrameMac\\.cpp$'],
['include', 'fonts/mac/GlyphPageTreeNodeMac\\.cpp$'],
['include', 'mac/ColorMac\\.mm$'],
['include', 'mac/BlockExceptions\\.mm$'],
['include', 'mac/KillRingMac\\.mm$'],
['include', 'mac/LocalCurrentGraphicsContext\\.mm$'],
['include', 'mac/NSScrollerImpDetails\\.mm$'],
['include', 'mac/ScrollAnimatorMac\\.mm$'],
['include', 'mac/ThemeMac\\.h$'],
['include', 'mac/ThemeMac\\.mm$'],
['include', 'mac/WebCoreNSCellExtras\\.h$'],
['include', 'mac/WebCoreNSCellExtras\\.mm$'],
# Mac uses only ScrollAnimatorMac.
['exclude', 'scroll/ScrollbarThemeNonMacCommon\\.(cpp|h)$'],
['exclude', 'scroll/ScrollAnimatorNone\\.cpp$'],
['exclude', 'scroll/ScrollAnimatorNone\\.h$'],
['exclude', 'fonts/skia/FontCacheSkia\\.cpp$'],
['include', 'geometry/mac/FloatPointMac\\.mm$'],
['include', 'geometry/mac/FloatRectMac\\.mm$'],
['include', 'geometry/mac/FloatSizeMac\\.mm$'],
['include', 'geometry/mac/IntPointMac\\.mm$'],
['include', 'geometry/mac/IntRectMac\\.mm$'],
['include', 'geometry/cg/FloatPointCG\\.cpp$'],
['include', 'geometry/cg/FloatRectCG\\.cpp$'],
['include', 'geometry/cg/FloatSizeCG\\.cpp$'],
['include', 'geometry/cg/IntPointCG\\.cpp$'],
['include', 'geometry/cg/IntRectCG\\.cpp$'],
['include', 'geometry/cg/IntSizeCG\\.cpp$'],
],
}, { # OS!="mac"
'sources/': [
['exclude', 'mac/'],
['exclude', 'geometry/mac/'],
['exclude', 'geometry/cg/'],
['exclude', 'scroll/ScrollbarThemeMac'],
],
}],
['OS != "linux" and OS != "mac" and OS != "win"', {
'sources/': [
['exclude', 'VDMX[^/]+\\.(cpp|h)$'],
],
}],
['OS=="win"', {
'sources/': [
# We use LocaleWin.cpp instead of LocaleICU.cpp
['exclude', 'text/LocaleICU\\.(cpp|h)$'],
['include', 'text/LocaleWin\\.(cpp|h)$'],
['include', 'clipboard/ClipboardUtilitiesWin\\.(cpp|h)$'],
['include', 'fonts/opentype/'],
['include', 'fonts/win/FontCacheSkiaWin\\.cpp$'],
['include', 'fonts/win/FontFallbackWin\\.(cpp|h)$'],
['include', 'fonts/win/FontPlatformDataWin\\.cpp$'],
# SystemInfo.cpp is useful and we don't want to copy it.
['include', 'win/SystemInfo\\.cpp$'],
],
}, { # OS!="win"
'sources/': [
['exclude', 'win/'],
['exclude', 'Win\\.cpp$'],
['exclude', '/(Windows)[^/]*\\.cpp$'],
['include', 'fonts/opentype/OpenTypeSanitizer\\.cpp$'],
],
}],
['OS=="win" and chromium_win_pch==1', {
'sources/': [
['include', '<(DEPTH)/third_party/WebKit/Source/build/win/Precompile.cpp'],
],
}],
['OS=="android"', {
'sources/': [
['include', '^fonts/VDMXParser\\.cpp$'],
],
}, { # OS!="android"
'sources/': [
['exclude', 'Android\\.cpp$'],
],
}],
['OS=="linux"', {
'dependencies': [
'<(DEPTH)/build/linux/system.gyp:fontconfig',
],
'export_dependent_settings': [
'<(DEPTH)/build/linux/system.gyp:fontconfig',
],
}],
['use_default_render_theme==0', {
'sources/': [
['exclude', 'scroll/ScrollbarThemeAura\\.(cpp|h)'],
],
}],
['"WTF_USE_WEBAUDIO_FFMPEG=1" in feature_defines', {
'include_dirs': [
'<(DEPTH)/third_party/ffmpeg',
],
'dependencies': [
'<(DEPTH)/third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
],
}],
['"WTF_USE_WEBAUDIO_OPENMAX_DL_FFT=1" in feature_defines', {
'include_dirs': [
'<(DEPTH)/third_party/openmax_dl',
],
'dependencies': [
'<(DEPTH)/third_party/openmax_dl/dl/dl.gyp:openmax_dl',
],
}],
['target_arch=="arm"', {
'dependencies': [
'blink_arm_neon',
],
}],
],
'target_conditions': [
['OS=="android"', {
'sources/': [
['include', 'exported/linux/WebFontRenderStyle\\.cpp$'],
['include', 'fonts/linux/FontPlatformDataLinux\\.cpp$'],
],
}],
],
},
# The *NEON.cpp files fail to compile when -mthumb is passed. Force
# them to build in ARM mode.
# See https://bugs.webkit.org/show_bug.cgi?id=62916.
{
'target_name': 'blink_arm_neon',
'conditions': [
['target_arch=="arm"', {
'type': 'static_library',
'dependencies': [
'blink_common',
],
'hard_dependency': 1,
'sources': [
'<@(platform_files)',
],
'sources/': [
['exclude', '.*'],
['include', 'graphics/cpu/arm/filters/.*NEON\\.(cpp|h)'],
],
'cflags': ['-marm'],
'conditions': [
['OS=="android"', {
'cflags!': ['-mthumb'],
}],
],
},{ # target_arch!="arm"
'type': 'none',
}],
],
}],
}
|
"""
Provides data for the ISO 3166-1 Country codes.
Reference:
https://en.wikipedia.org/wiki/ISO_3166
"""
countries = [
("afghanistan", "af", "afg", "004"),
("aland islands", "ax", "ala", "248"),
("albania", "al", "alb", "008"),
("algeria", "dz", "dza", "012"),
("american samoa", "as", "asm", "016"),
("andorra", "ad", "and", "020"),
("angola", "ao", "ago", "024"),
("anguilla", "ai", "aia", "660"),
("antarctica", "aq", "ata", "010"),
("antigua and barbuda", "ag", "atg", "028"),
("argentina", "ar", "arg", "032"),
("armenia", "am", "arm", "051"),
("aruba", "aw", "abw", "533"),
("australia", "au", "aus", "036"),
("austria", "at", "aut", "040"),
("azerbaijan", "az", "aze", "031"),
("bahamas", "bs", "bhs", "044"),
("bahrain", "bh", "bhr", "048"),
("bangladesh", "bd", "bgd", "050"),
("barbados", "bb", "brb", "052"),
("belarus", "by", "blr", "112"),
("belgium", "be", "bel", "056"),
("belize", "bz", "blz", "084"),
("benin", "bj", "ben", "204"),
("bermuda", "bm", "bmu", "060"),
("bhutan", "bt", "btn", "064"),
("bolivia", "bo", "bol", "068"),
("bosnia and herzegovina", "ba", "bih", "070"),
("botswana", "bw", "bwa", "072"),
("bouvet island", "bv", "bvt", "074"),
("brazil", "br", "bra", "076"),
("british virgin islands", "vg", "vgb", "092"),
("british indian ocean territory", "io", "iot", "086"),
("brunei darussalam", "bn", "brn", "096"),
("bulgaria", "bg", "bgr", "100"),
("burkina faso", "bf", "bfa", "854"),
("burundi", "bi", "bdi", "108"),
("cambodia", "kh", "khm", "116"),
("cameroon", "cm", "cmr", "120"),
("canada", "ca", "can", "124"),
("cape verde", "cv", "cpv", "132"),
("cayman islands", "ky", "cym", "136"),
("central african republic", "cf", "caf", "140"),
("chad", "td", "tcd", "148"),
("chile", "cl", "chl", "152"),
("china", "cn", "chn", "156"),
("hong kong, sar china", "hk", "hkg", "344"),
("macao, sar china", "mo", "mac", "446"),
("christmas island", "cx", "cxr", "162"),
("cocos (keeling) islands", "cc", "cck", "166"),
("colombia", "co", "col", "170"),
("comoros", "km", "com", "174"),
("congo(brazzaville)", "cg", "cog", "178"),
("congo, (kinshasa)", "cd", "cod", "180"),
("cook islands", "ck", "cok", "184"),
("costa rica", "cr", "cri", "188"),
("côte d'ivoire", "ci", "civ", "384"),
("croatia", "hr", "hrv", "191"),
("cuba", "cu", "cub", "192"),
("cyprus", "cy", "cyp", "196"),
("czech republic", "cz", "cze", "203"),
("denmark", "dk", "dnk", "208"),
("djibouti", "dj", "dji", "262"),
("dominica", "dm", "dma", "212"),
("dominican republic", "do", "dom", "214"),
("ecuador", "ec", "ecu", "218"),
("egypt", "eg", "egy", "818"),
("el salvador", "sv", "slv", "222"),
("equatorial guinea", "gq", "gnq", "226"),
("eritrea", "er", "eri", "232"),
("estonia", "ee", "est", "233"),
("ethiopia", "et", "eth", "231"),
("falkland islands (malvinas)", "fk", "flk", "238"),
("faroe islands", "fo", "fro", "234"),
("fiji", "fj", "fji", "242"),
("finland", "fi", "fin", "246"),
("france", "fr", "fra", "250"),
("french guiana", "gf", "guf", "254"),
("french polynesia", "pf", "pyf", "258"),
("french southern territories", "tf", "atf", "260"),
("gabon", "ga", "gab", "266"),
("gambia", "gm", "gmb", "270"),
("georgia", "ge", "geo", "268"),
("germany", "de", "deu", "276"),
("ghana", "gh", "gha", "288"),
("gibraltar", "gi", "gib", "292"),
("greece", "gr", "grc", "300"),
("greenland", "gl", "grl", "304"),
("grenada", "gd", "grd", "308"),
("guadeloupe", "gp", "glp", "312"),
("guam", "gu", "gum", "316"),
("guatemala", "gt", "gtm", "320"),
("guernsey", "gg", "ggy", "831"),
("guinea", "gn", "gin", "324"),
("guinea-bissau", "gw", "gnb", "624"),
("guyana", "gy", "guy", "328"),
("haiti", "ht", "hti", "332"),
("heard and mcdonald islands", "hm", "hmd", "334"),
("holy see(vatican city state)", "va", "vat", "336"),
("honduras", "hn", "hnd", "340"),
("hungary", "hu", "hun", "348"),
("iceland", "is", "isl", "352"),
("india", "in", "ind", "356"),
("indonesia", "id", "idn", "360"),
("iran, islamic republic of", "ir", "irn", "364"),
("iraq", "iq", "irq", "368"),
("ireland", "ie", "irl", "372"),
("isle of man", "im", "imn", "833"),
("israel", "il", "isr", "376"),
("italy", "it", "ita", "380"),
("jamaica", "jm", "jam", "388"),
("japan", "jp", "jpn", "392"),
("jersey", "je", "jey", "832"),
("jordan", "jo", "jor", "400"),
("kazakhstan", "kz", "kaz", "398"),
("kenya", "ke", "ken", "404"),
("kiribati", "ki", "kir", "296"),
("korea(north)", "kp", "prk", "408"),
("korea(south)", "kr", "kor", "410"),
("kuwait", "kw", "kwt", "414"),
("kyrgyzstan", "kg", "kgz", "417"),
("lao pdr", "la", "lao", "418"),
("latvia", "lv", "lva", "428"),
("lebanon", "lb", "lbn", "422"),
("lesotho", "ls", "lso", "426"),
("liberia", "lr", "lbr", "430"),
("libya", "ly", "lby", "434"),
("liechtenstein", "li", "lie", "438"),
("lithuania", "lt", "ltu", "440"),
("luxembourg", "lu", "lux", "442"),
("macedonia, republic of", "mk", "mkd", "807"),
("madagascar", "mg", "mdg", "450"),
("malawi", "mw", "mwi", "454"),
("malaysia", "my", "mys", "458"),
("maldives", "mv", "mdv", "462"),
("mali", "ml", "mli", "466"),
("malta", "mt", "mlt", "470"),
("marshall islands", "mh", "mhl", "584"),
("martinique", "mq", "mtq", "474"),
("mauritania", "mr", "mrt", "478"),
("mauritius", "mu", "mus", "480"),
("mayotte", "yt", "myt", "175"),
("mexico", "mx", "mex", "484"),
("micronesia, federated states of", "fm", "fsm", "583"),
("moldova", "md", "mda", "498"),
("monaco", "mc", "mco", "492"),
("mongolia", "mn", "mng", "496"),
("montenegro", "me", "mne", "499"),
("montserrat", "ms", "msr", "500"),
("morocco", "ma", "mar", "504"),
("mozambique", "mz", "moz", "508"),
("myanmar", "mm", "mmr", "104"),
("namibia", "na", "nam", "516"),
("nauru", "nr", "nru", "520"),
("nepal", "np", "npl", "524"),
("netherlands", "nl", "nld", "528"),
("netherlands antilles", "an", "ant", "530"),
("new caledonia", "nc", "ncl", "540"),
("new zealand", "nz", "nzl", "554"),
("nicaragua", "ni", "nic", "558"),
("niger", "ne", "ner", "562"),
("nigeria", "ng", "nga", "566"),
("niue", "nu", "niu", "570"),
("norfolk island", "nf", "nfk", "574"),
("northern mariana islands", "mp", "mnp", "580"),
("norway", "no", "nor", "578"),
("oman", "om", "omn", "512"),
("pakistan", "pk", "pak", "586"),
("palau", "pw", "plw", "585"),
("palestinian territory", "ps", "pse", "275"),
("panama", "pa", "pan", "591"),
("papua new guinea", "pg", "png", "598"),
("paraguay", "py", "pry", "600"),
("peru", "pe", "per", "604"),
("philippines", "ph", "phl", "608"),
("pitcairn", "pn", "pcn", "612"),
("poland", "pl", "pol", "616"),
("portugal", "pt", "prt", "620"),
("puerto rico", "pr", "pri", "630"),
("qatar", "qa", "qat", "634"),
("réunion", "re", "reu", "638"),
("romania", "ro", "rou", "642"),
("russian federation", "ru", "rus", "643"),
("rwanda", "rw", "rwa", "646"),
("saint-barthélemy", "bl", "blm", "652"),
("saint helena", "sh", "shn", "654"),
("saint kitts and nevis", "kn", "kna", "659"),
("saint lucia", "lc", "lca", "662"),
("saint-martin (french part)", "mf", "maf", "663"),
("saint pierre and miquelon", "pm", "spm", "666"),
("saint vincent and grenadines", "vc", "vct", "670"),
("samoa", "ws", "wsm", "882"),
("san marino", "sm", "smr", "674"),
("sao tome and principe", "st", "stp", "678"),
("saudi arabia", "sa", "sau", "682"),
("senegal", "sn", "sen", "686"),
("serbia", "rs", "srb", "688"),
("seychelles", "sc", "syc", "690"),
("sierra leone", "sl", "sle", "694"),
("singapore", "sg", "sgp", "702"),
("slovakia", "sk", "svk", "703"),
("slovenia", "si", "svn", "705"),
("solomon islands", "sb", "slb", "090"),
("somalia", "so", "som", "706"),
("south africa", "za", "zaf", "710"),
("south georgia and the south sandwich islands", "gs", "sgs", "239"),
("south sudan", "ss", "ssd", "728"),
("spain", "es", "esp", "724"),
("sri lanka", "lk", "lka", "144"),
("sudan", "sd", "sdn", "736"),
("suriname", "sr", "sur", "740"),
("svalbard and jan mayen islands", "sj", "sjm", "744"),
("swaziland", "sz", "swz", "748"),
("sweden", "se", "swe", "752"),
("switzerland", "ch", "che", "756"),
("syrian arab republic(syria)", "sy", "syr", "760"),
("taiwan, republic of china", "tw", "twn", "158"),
("tajikistan", "tj", "tjk", "762"),
("tanzania, united republic of", "tz", "tza", "834"),
("thailand", "th", "tha", "764"),
("timor-leste", "tl", "tls", "626"),
("togo", "tg", "tgo", "768"),
("tokelau", "tk", "tkl", "772"),
("tonga", "to", "ton", "776"),
("trinidad and tobago", "tt", "tto", "780"),
("tunisia", "tn", "tun", "788"),
("turkey", "tr", "tur", "792"),
("turkmenistan", "tm", "tkm", "795"),
("turks and caicos islands", "tc", "tca", "796"),
("tuvalu", "tv", "tuv", "798"),
("uganda", "ug", "uga", "800"),
("ukraine", "ua", "ukr", "804"),
("united arab emirates", "ae", "are", "784"),
("united kingdom", "gb", "gbr", "826"),
("united states of america", "us", "usa", "840"),
("us minor outlying islands", "um", "umi", "581"),
("us minor islands", "um", "umi", "581"),
("uruguay", "uy", "ury", "858"),
("uzbekistan", "uz", "uzb", "860"),
("vanuatu", "vu", "vut", "548"),
("venezuela(bolivarian republic)", "ve", "ven", "862"),
("viet nam", "vn", "vnm", "704"),
("virgin islands, us", "vi", "vir", "850"),
("wallis and futuna islands", "wf", "wlf", "876"),
("western sahara", "eh", "esh", "732"),
("yemen", "ye", "yem", "887"),
("zambia", "zm", "zmb", "894"),
("zimbabwe", "zw", "zwe", "716"),
]
def makeColLook(rows, scol, dcol):
retn = {}
for x in rows:
retn[x[scol]] = x[dcol]
return retn
country2iso = makeColLook(countries, 0, 1)
|
def poscode2word(pos):
tag_des = {
'CC': 'Coordinating conjunction',
'CD': 'Cardinal number',
'DT': 'Determiner',
'EX': 'Existential',
'FW': 'Foreign word',
'IN': 'Preposition',
'JJ': 'Adjective',
'JJR': 'Adjective, comparative',
'JJS': 'Adjective, superlative',
'LS': 'List item maker',
'MD': 'Modal',
'NN': 'Noun, sigular',
'NNS': 'Noun, plural',
'NNP': 'Proper noun, singular',
'NNPS': 'Proper noun, plural',
'PDT': 'Predeterminer',
'POS': 'Possessive ending',
'PRP': 'Personal pronoun',
'PRP$': 'Possessive pronoun',
'RB': 'Adverb',
'RBR': 'Adverb, comparative',
'RBS': 'Adverb, superlative',
'RP': 'Particle',
'SYM': 'Symbol',
'TO': 'to',
'UH': 'Interjection',
'VB': 'Verb, base form',
'VBD': 'Verb, past tense',
'VBG': 'Verb, gerund or present participle',
'VBP': 'Verb, non-3rd person singular present',
'VBZ': 'Verb, 3rd person singular present',
'WDT': 'Wh-determiner',
'WP': 'Wh-pronoun',
'WP$': 'Possessive wh-pronoun',
'WRB': 'Wh-adverb'
}
if pos in tag_des:
return tag_des[pos]
return pos
if __name__ == "__main__":
print(poscode2word('NN'))
|
"""
Faster R-CNN with DIOU Assigner
Average Precision (AP) @[ IoU=0.50:0.95 | area= all | maxDets=1500 ] = 0.054
Average Precision (AP) @[ IoU=0.25 | area= all | maxDets=1500 ] = -1.000
Average Precision (AP) @[ IoU=0.50 | area= all | maxDets=1500 ] = 0.113
Average Precision (AP) @[ IoU=0.75 | area= all | maxDets=1500 ] = 0.040
Average Precision (AP) @[ IoU=0.50:0.95 | area=verytiny | maxDets=1500 ] = 0.000
Average Precision (AP) @[ IoU=0.50:0.95 | area= tiny | maxDets=1500 ] = 0.039
Average Precision (AP) @[ IoU=0.50:0.95 | area= small | maxDets=1500 ] = 0.122
Average Precision (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=1500 ] = 0.222
Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.139
Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets=300 ] = 0.148
Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets=1500 ] = 0.149
Average Recall (AR) @[ IoU=0.50:0.95 | area=verytiny | maxDets=1500 ] = 0.000
Average Recall (AR) @[ IoU=0.50:0.95 | area= tiny | maxDets=1500 ] = 0.076
Average Recall (AR) @[ IoU=0.50:0.95 | area= small | maxDets=1500 ] = 0.345
Average Recall (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=1500 ] = 0.436
Optimal LRP @[ IoU=0.50 | area= all | maxDets=1500 ] = 0.944
Optimal LRP Loc @[ IoU=0.50 | area= all | maxDets=1500 ] = 0.289
Optimal LRP FP @[ IoU=0.50 | area= all | maxDets=1500 ] = 0.743
Optimal LRP FN @[ IoU=0.50 | area= all | maxDets=1500 ] = 0.843
# Class-specific LRP-Optimal Thresholds #
[0.777 0.871 0.9 0.918 0.947 0.538 0.46 0.303]
+----------+-------+---------------+-------+--------------+-------+
| category | AP | category | AP | category | AP |
+----------+-------+---------------+-------+--------------+-------+
| airplane | 0.150 | bridge | 0.004 | storage-tank | 0.090 |
| ship | 0.092 | swimming-pool | 0.021 | vehicle | 0.049 |
| person | 0.023 | wind-mill | 0.000 | None | None |
+----------+-------+---------------+-------+--------------+-------+
+----------+-------+---------------+-------+--------------+-------+
| category | oLRP | category | oLRP | category | oLRP |
+----------+-------+---------------+-------+--------------+-------+
| airplane | 0.856 | bridge | 0.997 | storage-tank | 0.910 |
| ship | 0.915 | swimming-pool | 0.957 | vehicle | 0.950 |
| person | 0.971 | wind-mill | 0.999 | None | None |
+----------+-------+---------------+-------+--------------+-------+
"""
_base_ = [
'../_base_/models/faster_rcnn_r50_fpn_aitod.py',
'../_base_/datasets/aitod_detection.py',
'../_base_/schedules/schedule_1x.py',
'../_base_/default_runtime.py'
]
model = dict(
train_cfg=dict(
rpn=dict(
assigner=dict(
iou_calculator=dict(type='BboxDistanceMetric'),
assign_metric='diou')),
rcnn=dict(
assigner=dict(
iou_calculator=dict(type='BboxDistanceMetric'),
assign_metric='diou'))))
# optimizer
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001)
# learning policy
checkpoint_config = dict(interval=4)
|
def part1(arr):
s = 0
for v in arr:
s += v // 3 - 2
return s
def part2(arr):
s = 0
for v in arr:
fuel = v // 3 - 2
s += fuel
while fuel > 0:
fuel = fuel // 3 - 2
if fuel > 0:
s += fuel
return s
def day1():
arr = [
80891, 109412, 149508, 114894, 97527, 59858, 113548, 110516, 97454, 84612, 84578, 87923, 102675, 114312, 144158,
147190, 53051, 115477, 50870, 122198, 91019, 114350, 88592, 119617, 61012, 67012, 85425, 62185, 124628, 98505,
53320, 123834, 105862, 113715, 149328, 72125, 107301, 110684, 86037, 102012, 133227, 66950, 64761, 141015,
132134, 87171, 84142, 80355, 124967, 87973, 98062, 79312, 120108, 97537, 89584, 55206, 68135, 83286, 66726,
101805, 72996, 113109, 116248, 132007, 128378, 52506, 113628, 62277, 73720, 101756, 141675, 107011, 81118,
60598, 122703, 129905, 67786, 50982, 96193, 70006, 137087, 136121, 146902, 74781, 50569, 102645, 99426, 97857,
122801, 55022, 81433, 60509, 66906, 142099, 126652, 103240, 141014, 55579, 143169, 125521
]
print(part1(arr))
print(part2(arr))
if __name__ == '__main__':
day1()
|
BASE_JSON_PATH = '/home/mdd36/tools350/tools350/assembler/base_jsn'
BASE_JSON_LOCAL = '/Users/matthew/Documents/SchoolWork/TA/ECE350/2019s/350_tools_mk2/tools350/assembler/base_jsn'
class InstructionType:
def __init__(self, types: dict):
self._instruction_types: dict = types
def get_by_type(self, type_: str) -> dict:
return self._instruction_types["types"][type_]
def is_branch(self, instr: str) -> bool:
return instr in self._instruction_types["branches"]
NOP = {"opcode": 5, "rd": 5, "rs": 5, "rt": 5, "shamt": 5, "aluop": 5, "zeroes": 2}
ERROR = {"err": 32}
|
class Node:
def __init__(self, data):
self.left = None
self.right = None
self.data = data
def inorder(root):
if root is None:
return ""
res = ""
res += inorder(root.left)
res += "{} ".format(root.data)
res += inorder(root.right)
return res
def all_subtree(root):
if root is None:
return None
res = [root.data]
l = all_subtree(root.left)
if l is not None:
res += l
res.append(l)
r = all_subtree(root.right)
if r is not None:
res += r
res.append(r)
return res
def main():
root = Node(8)
root.left = Node(3)
root.right = Node(10)
print(inorder(root))
print(all_subtree(root))
if __name__ == "__main__":
main()
|
test = {
'name': 'Problem 6',
'points': 1,
'suites': [
{
'cases': [
{
'answer': 'fd4dd892ccea3adcf9446dc4a9738d47',
'choices': [
r"""
Pair('quote', Pair(A, nil)), where:
A is the quoted expression
""",
r"""
[A], where:
A is the quoted expression
""",
r"""
Pair(A, nil), where:
A is the quoted expression
""",
r"""
A, where:
A is the quoted expression
"""
],
'hidden': False,
'locked': True,
'question': 'What is the structure of the expressions argument to do_quote_form?'
}
],
'scored': False,
'type': 'concept'
},
{
'cases': [
{
'code': r"""
>>> do_quote_form(Pair(3, nil), global_frame)
3c7e8a3a2176a696c3a66418f78dff6b
# locked
>>> do_quote_form(Pair('hi', nil), global_frame)
95448591e64e04a7a7885d5fb9b45583
# locked
>>> expr = Pair(Pair('+', Pair('x', Pair(2, nil))), nil)
>>> do_quote_form(expr, global_frame)
2301ee746b57783004f00f39498fdaed
# locked
""",
'hidden': False,
'locked': True
}
],
'scored': True,
'setup': r"""
>>> from scheme import *
>>> global_frame = create_global_frame()
""",
'teardown': '',
'type': 'doctest'
},
{
'cases': [
{
'code': r"""
scm> ''hello
f675ad62f5f67e5229145843fd6bbcaa
# locked
# choice: (quote hello)
# choice: hello
# choice: (hello)
# choice: (quote (quote (hello)))
scm> (quote (1 2))
484e4b42665b2864d685ef07fe666107
# locked
scm> (car '(1 2 3))
eb892a26497f936d1f6cae54aacc5f51
# locked
scm> (cdr '(1 2))
750540b47bda75ff036b4a9aa741b087
# locked
scm> (eval (cons 'car '('(4 2))))
46beb7deeeb5e9af1c8d785b12558317
# locked
""",
'hidden': False,
'locked': True
}
],
'scored': True,
'setup': '',
'teardown': '',
'type': 'scheme'
},
{
'cases': [
{
'code': r"""
>>> read_line(" 'x ")
d88f877a51ba10d1c3a834a690bb43e0
# locked
# choice: Pair('x', nil)
# choice: 'x'
# choice: Pair('quote', 'x')
# choice: Pair('quote', Pair('x', nil))
>>> read_line(" '(a b) ")
e16dd0e729d41b52ddd5d4d38cbfc7e6
# locked
# choice: Pair('a', Pair('b', nil))
# choice: Pair('quote', Pair(Pair('a', Pair('b', nil)), nil))
# choice: Pair('quote', Pair('a', 'b'))
# choice: Pair('quote', Pair('a', Pair('b', nil)))
""",
'hidden': False,
'locked': True
},
{
'code': r"""
>>> read_line("(a (b 'c))")
Pair('a', Pair(Pair('b', Pair(Pair('quote', Pair('c', nil)), nil)), nil))
>>> read_line("(a (b '(c d)))")
Pair('a', Pair(Pair('b', Pair(Pair('quote', Pair(Pair('c', Pair('d', nil)), nil)), nil)), nil))
>>> read_line("')")
SyntaxError
>>> read_line("'()")
Pair('quote', Pair(nil, nil))
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> read_line("'('a)")
Pair('quote', Pair(Pair(Pair('quote', Pair('a', nil)), nil), nil))
>>> read_line("''a")
Pair('quote', Pair(Pair('quote', Pair('a', nil)), nil))
>>> read_line("'('('a 'b 'c))")
Pair('quote', Pair(Pair(Pair('quote', Pair(Pair(Pair('quote', Pair('a', nil)), Pair(Pair('quote', Pair('b', nil)), Pair(Pair('quote', Pair('c', nil)), nil))), nil)), nil), nil))
>>> read_line("(+ '(1 2) 3)")
Pair('+', Pair(Pair('quote', Pair(Pair(1, Pair(2, nil)), nil)), Pair(3, nil)))
>>> read_line("'('+ '(1 2) '3)")
Pair('quote', Pair(Pair(Pair('quote', Pair('+', nil)), Pair(Pair('quote', Pair(Pair(1, Pair(2, nil)), nil)), Pair(Pair('quote', Pair(3, nil)), nil))), nil))
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> scheme_read(Buffer(tokenize_lines(["'hello"])))
Pair('quote', Pair('hello', nil))
>>> read_line("(car '(1 2))")
Pair('car', Pair(Pair('quote', Pair(Pair(1, Pair(2, nil)), nil)), nil))
>>> print(read_line("(car '(1 2))"))
(car (quote (1 2)))
>>> read_line("'('a)")
Pair('quote', Pair(Pair(Pair('quote', Pair('a', nil)), nil), nil))
>>> read_line("''a")
Pair('quote', Pair(Pair('quote', Pair('a', nil)), nil))
>>> read_line("'('('a 'b 'c))")
Pair('quote', Pair(Pair(Pair('quote', Pair(Pair(Pair('quote', Pair('a', nil)), Pair(Pair('quote', Pair('b', nil)), Pair(Pair('quote', Pair('c', nil)), nil))), nil)), nil), nil))
>>> read_line("(+ '(1 2) 3)")
Pair('+', Pair(Pair('quote', Pair(Pair(1, Pair(2, nil)), nil)), Pair(3, nil)))
>>> read_line("'('+ '(1 2) '3)")
Pair('quote', Pair(Pair(Pair('quote', Pair('+', nil)), Pair(Pair('quote', Pair(Pair(1, Pair(2, nil)), nil)), Pair(Pair('quote', Pair(3, nil)), nil))), nil))
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': r"""
>>> from scheme_reader import *
""",
'teardown': '',
'type': 'doctest'
},
{
'cases': [
{
'code': r"""
scm> (quote hello)
hello
scm> 'hello
hello
scm> ''hello
(quote hello)
scm> (quote (1 2))
(1 2)
scm> '(1 2)
(1 2)
scm> (car (car '((1))))
1
scm> (quote 3)
3
scm> '(a b c)
(a b c)
scm> '(1 '2)
(1 (quote 2))
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': '',
'teardown': '',
'type': 'scheme'
}
]
}
|
for i in range(int(input())):
sum = 0
y, x = map(int, input().split())
n = max(x,y)
sum += (n-1) * (n-1)
if n%2!=0:
sum += x + (n-y)
else:
sum += y + (n-x)
print(sum)
|
entries = [1, 2, 3, 4, 5]
print("all: {}".format(all(entries)))
print("any: {}".format(any(entries)))
print("Iterable with a 'False' value")
entries_with_zero = [1, 2, 0, 4, 5]
print("all: {}".format(all(entries_with_zero)))
print("any: {}".format(any(entries_with_zero)))
print()
print("Values interpreted as False in Python")
print("""False: {0}
None: {1}
0: {2}
0.0: {3}
empty list []: {4}
empty tuple (): {5}
empty string '': {6}
empty string "": {7}
empty mapping {{}}: {8}
""".format(False, bool(None), bool(0), bool(0.0), bool([]), bool(()), bool(''), bool(""), bool({})))
print("=" * 80)
name = "Tim"
if name:
print("Hello {}".format(name))
else:
print("Welcome, person with no name")
|
n1 = float (input('Entre com a 1º nota: '))
n2 = float (input('Entre com a 2º nota: '))
r = (n1 + n2) /2
print ('A média do Aluno é: {:.1f}'.format(r))
|
# -*- coding: utf-8 -*-
#
# PySceneDetect: Python-Based Video Scene Detector
# ---------------------------------------------------------------
# [ Site: http://www.bcastell.com/projects/PySceneDetect/ ]
# [ Github: https://github.com/Breakthrough/PySceneDetect/ ]
# [ Documentation: http://pyscenedetect.readthedocs.org/ ]
#
# Copyright (C) 2014-2019 Brandon Castellano <http://www.bcastell.com>.
#
# PySceneDetect is licensed under the BSD 3-Clause License; see the included
# LICENSE file, or visit one of the following pages for details:
# - https://github.com/Breakthrough/PySceneDetect/
# - http://www.bcastell.com/projects/PySceneDetect/
#
# This software uses Numpy, OpenCV, click, tqdm, simpletable, and pytest.
# See the included LICENSE files or one of the above URLs for more information.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
""" Module: ``scenedetect.thirdparty``
This module includes all third-party libraries/dependencies that are distributed
with PySceneDetect. The source directory also includes the license files for all
packages that PySceneDetect depends on, to simplify distribution of binary builds.
"""
|
class Solution(object):
def buddyStrings(self, A, B):
"""
:type A: str
:type B: str
:rtype: bool
"""
if len(A) != len(B):
return False
a, b, sa = [], [], set()
for i in range(0, len(A)):
if A[i] != B[i]:
a.append(A[i])
b.append(B[i])
sa.add(A[i])
if len(a) == 0:
if len(A) == len(sa):
return False
return True
if len(a) == 1 or len(a) > 2:
return False
return a[0] == b[1] and a[1] == b[0]
def test_buddy_strings():
s = Solution()
assert s.buddyStrings("ab", "ba")
assert s.buddyStrings("ab", "ab") is False
assert s.buddyStrings("aa", "aa")
assert s.buddyStrings("aaaaaaabc", "aaaaaaacb")
assert s.buddyStrings("", "aa") is False
|
class Solution(object):
def numJewelsInStones(self, J, S):
"""
:type J: str
:type S: str
:rtype: int
"""
if len(J)==0 or len(S)==0:
return 0
answer=0
J_set = set(J)
for char in S:
if char in J_set:
answer+=1
return answer |
class GetCashgramStatus:
end_point = "/payout/v1/getCashgramStatus"
req_type = "GET"
def __init__(self, *args, **kwargs):
self.cashgramId = kwargs["cashgramId"] |
def reduceNum(n):
print('{} = '.format(n), end='')
if not isinstance(n, int) or n <= 0:
print('请输入一个正确的数字 !')
exit(0)
elif n in [1]:
print('{}'.format(n))
while n not in [1]: # 循环保证递归
for index in range(2, n + 1):
if n % index == 0:
n //= index # n 等于 n/index
if n == 1:
print(index)
else: # index 一定是素数
print('{} *'.format(index), end='')
break
reduceNum(90)
reduceNum(100)
reduceNum(1)
|
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def addTwoNumbers(self, l1, l2):
"""
:type l1: ListNode
:type l2: ListNode
:rtype: ListNode
将待加的数存储为单链表, 考察对单链表的使用
"""
rtn = ListNode(0)
r = rtn
carry = 0
while l1 or l2:
x = l1.val if l1 else 0
y = l2.val if l2 else 0
r.next = ListNode((x + y + carry) % 10)
carry = 1 if (x + y + carry) > 9 else 0
r = r.next
if l1 is not None:
l1 = l1.next
if l2 is not None:
l2 = l2.next
if carry > 0:
r.next = ListNode(1)
#rtn.next.val: 7
return rtn.next
def addTwoNumbersGreat(self, l1, l2):
"""
:type l1: ListNode
:type l2: ListNode
:rtype: ListNode
"""
l3 = ListNode(0)
before = head = l3
while l1 and l2:
s = l1.val + l2.val + l3.val
if s >= 10:
l3.val = s - 10
l3.next = ListNode(1)
before = l3
l3 = l3.next
else:
l3.val = s
l3.next = ListNode(0)
before = l3
l3 = l3.next
l1 = l1.next
l2 = l2.next
if not (l1 or l2):
pass
else:
con = l1 if l1 else l2
while con:
l3.val += con.val
more = 0
if l3.val >= 10:
l3.val = l3.val - 10
more = 1
l3.next = ListNode(more)
before = l3
l3 = l3.next
con = con.next
if l3.val == 0: before.next = None
return head
# 输入:(2 -> 4 -> 3) + (5 -> 6 -> 6 -> 9 -> 9)
l1 = ListNode(2)
l1.next = ListNode(4)
l1.next.next = ListNode(3)
print(l1.val, l1.next.val, l1.next.next.val)
l2 = ListNode(5)
l2.next = ListNode(6)
l2.next.next = ListNode(6)
l2.next.next.next = ListNode(9)
l2.next.next.next.next = ListNode(9)
print(l2.val, l2.next.val, l2.next.next.val)
sol = Solution()
r = sol.addTwoNumbers(l1, l2)
print(r.val, r.next.val, r.next.next.val, r.next.next.next.val,
r.next.next.next.next.val, r.next.next.next.next.next.val)
|
# ------------------------------
# 25. Reverse Nodes in k-Group
#
# Description:
# Given a linked list, reverse the nodes of a linked list k at a time and return its modified list.
# k is a positive integer and is less than or equal to the length of the linked list. If the number of nodes is not a multiple of k then left-out nodes in the end should remain as it is.
#
# You may not alter the values in the nodes, only nodes itself may be changed.
# Only constant memory is allowed.
# For example,
# Given this linked list: 1->2->3->4->5
# For k = 2, you should return: 2->1->4->3->5
# For k = 3, you should return: 3->2->1->4->5
#
# Version: 1.0
# 09/21/17 by Jianfa
# ------------------------------
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def reverseKGroup(self, head, k):
"""
:type head: ListNode
:type k: int
:rtype: ListNode
"""
pointer = {}
if not head:
return None
else:
i = 0
temp = head
while temp and i < k:
pointer[i] = temp
temp = temp.next
i += 1
if i < k:
return head
else:
while i > 1:
pointer[i-1].next = pointer[i-2]
i -= 1
pointer[0].next = self.reverseKGroup(temp, k)
return pointer[k-1]
# Used for test
# if __name__ == "__main__":
# test = Solution()
# d = ListNode(2)
# d.next = None
# c = ListNode(1)
# c.next = d
# b = ListNode(4)
# b.next = c
# a = ListNode(3)
# a.next = b
# head = test.reverseKGroup(a, 3)
# while head != None:
# print(head.val)
# head = head.next
# ------------------------------
# Summary:
# Still using recursion solution
# Some ideas from other solutions are processing every k nodes every time, until there are
# no k nodes in the list. |
description = 'Small Beam Limiter in Experimental Chamber 1'
group = 'optional'
devices = dict(
nbl_l = device('nicos.devices.generic.VirtualReferenceMotor',
description = 'Beam Limiter Left Blade',
lowlevel = True,
abslimits = (-250, 260),
unit = 'mm',
speed = 10,
refswitch = 'high',
),
nbl_r = device('nicos.devices.generic.VirtualReferenceMotor',
description = 'Beam Limiter Right Blade',
lowlevel = True,
abslimits = (-250, 260),
unit = 'mm',
speed = 10,
refswitch = 'high',
),
nbl_t = device('nicos.devices.generic.VirtualReferenceMotor',
description = 'Beam Limiter Top Blade',
lowlevel = True,
abslimits = (-250, 260),
unit = 'mm',
speed = 10,
refswitch = 'high',
),
nbl_b = device('nicos.devices.generic.VirtualReferenceMotor',
description = 'Beam Limiter Bottom Blade',
lowlevel = True,
abslimits = (-250, 260),
unit = 'mm',
speed = 10,
refswitch = 'high',
),
nbl = device('nicos_mlz.nectar.devices.BeamLimiter',
description = 'NECTAR Beam Limiter',
left = 'nbl_l',
right = 'nbl_r',
top = 'nbl_t',
bottom = 'nbl_b',
opmode = 'centered',
coordinates = 'opposite',
pollinterval = 5,
maxage = 10,
parallel_ref = True,
),
)
|
async def test_admin_auth(client, admin, user):
res = await client.get('/admin', follow_redirect=False)
assert res.status_code == 307
# Login as an simple user
res = await client.post('/login', data={'email': user.email, 'password': 'pass'})
assert res.status_code == 200
res = await client.get('/admin', follow_redirect=False)
assert res.status_code == 307
# Login as an admin
res = await client.post('/login', data={'email': admin.email, 'password': 'pass'})
assert res.status_code == 200
res = await client.get('/admin', follow_redirect=False)
assert res.status_code == 200
assert 'initAdmin' in await res.text()
|
# https://edabit.com/challenge/Yj2Rew5XQYpu7Nosq
# Create a function that returns the number of frames shown in a given number of minutes for a certain FPS.
def frames(minutes: int, fps: int) -> int:
try:
total_frames = (minutes * 60) * fps
return total_frames
except TypeError as err:
print(f"Error: {err}")
print(frames(1, 1))
print(frames(10, 1))
print(frames(10, 25))
print(frames("a", "b"))
|
"""
HTTP/1.0 301 Moved Permanently
Location: http://www.google.ca/
Content-Type: text/html; charset=UTF-8
Date: Wed, 03 Oct 2018 19:51:01 GMT
Expires: Fri, 02 Nov 2018 19:51:01 GMT
Cache-Control: public, max-age=2592000
Server: gws
Content-Length: 218
X-XSS-Protection: 1; mode=block
X-Frame-Options: SAMEORIGIN
<HTML><HEAD><meta http-equiv="content-type" content="text/html;charset=utf-8">
<TITLE>301 Moved</TITLE></HEAD><BODY>
<H1>301 Moved</H1>
The document has moved
<A HREF="http://www.google.ca/">here</A>.
</BODY></HTML>
"""
"""
HTTP/1.1 200 OK
Connection: close
Server: gunicorn/19.9.0
Date: Wed, 03 Oct 2018 20:08:34 GMT
Content-Type: application/json
Content-Length: 245
Access-Control-Allow-Origin: *
Access-Control-Allow-Credentials: true
Via: 1.1 vegur
{
"args": {
"assignment": "1",
"course": "networking"
},
"headers": {
"Connection": "close",
"Host": "httpbin.org"
},
"origin": "206.180.247.122",
"url": "http://httpbin.org/get?course=networking&assignment=1"
}
"""
class HttpRequest:
def __init__(self, host, path, query, headers):
self.path = path
self.host = host
self.query = query
self.headers = headers
def getGet(self):
headers = ( "GET "+ self.path + "?" + self.query + " HTTP/1.0\r\n"
"{headers}\r\n"
"Host:" + self.host + "\r\n\r\n")
header_bytes = headers.format(
headers=self.headers
).encode('utf-8')
return header_bytes
def getPost(self):
headers = ( "POST {path} HTTP/1.0\r\n"
"{headers}\r\n"
"Content-Length: {content_length}\r\n"
"Host: {host}\r\n"
"User-Agent: Concordia-HTTP/1.0\r\n"
"Connection: close\r\n\r\n")
body_bytes = self.query.encode('utf-8')
header_bytes = headers.format(
path=self.path,
headers=self.headers,
content_length=len(self.query),
host=self.host
).encode('utf-8')
return header_bytes + body_bytes
class HttpResponse:
def __init__(self, response):
self.text = response.decode('utf-8')
self.parseText()
def parseText(self):
texts = self.text.split("\r\n\r\n")
self.header = texts[0]
self.body = texts[1]
lines = self.header.split("\r\n")
infos = lines[0].split(" ")
self.code = infos[1]
self.status = infos[2]
if(self.code == HttpCode.redirect):
self.location = lines[1].split(" ")[1].split("//")[1][:-1]
print("Redirect to " + self.location)
class HttpCode:
redirect = "301"
ok = "200"
|
class Node():
def __init__(self, value):
self.value = value
self.next = None
class LinkedList():
def __init__(self):
self.head = None
def __str__(self):
current = self.head
output = ''
while current:
output += f"{ {str(current.value)} } ->"
current = current.next
return output
def append(self, value):
'''
this method to append value in the last node
input ==> value
'''
if value is None:
raise TypeError("insert() missing 1 required positional argument: 'value' ")
else:
new_node = Node(value)
if not self.head:
self.head = new_node
else:
new_node = Node(value)
current = self.head
while current.next:
current = current.next
current.next = new_node
def zipLists(list1,list2):
"""
Takes two linked lists as arguments. Zip the two linked lists together into one so that
the nodes alternate between the two lists and return a reference to the head of the zipped list.
"""
if not list1 :
return list1
if not list2 :
return list1
output =LinkedList()
current1 =list1.head
current2 =list2.head
while current1 :
output.append(current1.value)
if current2 :
output.append(current2.value)
current2 = current2.next
current1= current1.next
while current2 :
output.append(current2.value)
current2 =current2.next
return output.__str__()
if __name__ == "__main__":
majd = LinkedList()
majd.append(1)
majd.append(2)
ahmad = LinkedList()
ahmad.append(5)
ahmad.append(10) |
list_one = [1, 2, 3]
list_two = [4, 5, 6,7]
lst = [0, *list_one, *list_two]
print(lst)
country_lst_one = ['Finland', 'Sweden', 'Norway']
country_lst_two = ['Denmark', 'Iceland']
nordic_countries = [*country_lst_one, *country_lst_two]
print(nordic_countries)
|
input = """
a(1) | a(3).
a(2).
c(1,1).
c(1,3).
d(1,5).
b(X) :- a(X), c(Y,X).
ok :- #max{V :b(V)} < X, d(Y,X).
"""
output = """
a(1) | a(3).
a(2).
c(1,1).
c(1,3).
d(1,5).
b(X) :- a(X), c(Y,X).
ok :- #max{V :b(V)} < X, d(Y,X).
"""
|
offices=[]
expected_offices = ("Federal", "Legislative", "State", "Local Government")
class PoliticalOffice():
@staticmethod
def exists(name):
"""
Checks if an office with the same name exists
Returns a boolean
"""
for office in offices:
if office["name"] == name:
return True
return False
@staticmethod
def check_office_type(office_type):
if office_type not in expected_offices:
return False
return True
@classmethod
def create_political_office(cls, name, office_type):
office= {
"office_id": len(offices)+1,
"name": name,
"type": office_type
}
offices.append(office)
return office
def get_political_office(self):
if len(offices) == 0: print('List is empty')
return offices
def get_specific_political_office(self, office_id):
for office in offices:
if office["office_id"] == office_id:
return office
|
"""
Queue.py
Description: This file contains the implementation of the queue data structure
"""
# The queue class is used to implement functionality of a queue using a list
class Queue:
# Default constructor
def __init__(self):
self.items = []
# Function used to tell us if the queue is empty
def isEmpty(self):
return self.items == []
# Function used to add an item to the queue
def enqueue(self, item):
self.items.insert(0, item)
# Function used to remove an item from the queue
def dequeue(self):
return self.items.pop()
# Function used to return the size of the queue
def size(self):
return len(self.items)
# Function used to check whether a value exists or not
def find(self, item):
return self.items.index(item)
# Function used to return the value at the particular position
def get(self, pos):
return self.items[pos]
# Function used to remove an item from the queue
def remove(self, item):
self.items.remove(item)
|
# https://www.codechef.com/problems/DEVARRAY
n,Q=map(int,input().split())
max1,min1,a=-99999999999,99999999999,list(map(int,input().split()))
for z in range(n): min1,max1 = min(min1,a[z]),max(max1,a[z])
for z in range(Q): print("Yes") if(int(input()) in range(min1,max1+1)) else print("No") |
"""Task:"""
# Imports --------------------------------------------------------------
# Classes --------------------------------------------------------------
# Functions ------------------------------------------------------------
# Methods --------------------------------------------------------------
# Define Variables -----------------------------------------------------
# Execute Code ---------------------------------------------------------
|
# Hash Table
# A website domain like "discuss.leetcode.com" consists of various subdomains. At the top level, we have "com", at the next level, we have "leetcode.com", and at the lowest level, "discuss.leetcode.com". When we visit a domain like "discuss.leetcode.com", we will also visit the parent domains "leetcode.com" and "com" implicitly.
#
# Now, call a "count-paired domain" to be a count (representing the number of visits this domain received), followed by a space, followed by the address. An example of a count-paired domain might be "9001 discuss.leetcode.com".
#
# We are given a list cpdomains of count-paired domains. We would like a list of count-paired domains, (in the same format as the input, and in any order), that explicitly counts the number of visits to each subdomain.
#
# Example 1:
# Input:
# ["9001 discuss.leetcode.com"]
# Output:
# ["9001 discuss.leetcode.com", "9001 leetcode.com", "9001 com"]
# Explanation:
# We only have one website domain: "discuss.leetcode.com". As discussed above, the subdomain "leetcode.com" and "com" will also be visited. So they will all be visited 9001 times.
#
# Example 2:
# Input:
# ["900 google.mail.com", "50 yahoo.com", "1 intel.mail.com", "5 wiki.org"]
# Output:
# ["901 mail.com","50 yahoo.com","900 google.mail.com","5 wiki.org","5 org","1 intel.mail.com","951 com"]
# Explanation:
# We will visit "google.mail.com" 900 times, "yahoo.com" 50 times, "intel.mail.com" once and "wiki.org" 5 times. For the subdomains, we will visit "mail.com" 900 + 1 = 901 times, "com" 900 + 50 + 1 = 951 times, and "org" 5 times.
#
# Notes:
#
# The length of cpdomains will not exceed 100.
# The length of each domain name will not exceed 100.
# Each address will have either 1 or 2 "." characters.
# The input count in any count-paired domain will not exceed 10000.
# The answer output can be returned in any order.
class Solution:
def subdomainVisits(self, cpdomains):
"""
:type cpdomains: List[str]
:rtype: List[str]
"""
pair = collections.defaultdict(int)
for item in cpdomains:
linkValue, link = item.split()
linkValue = int(linkValue)
pair[link] += linkValue
while "." in link:
link = link[link.find('.')+1:]
pair[link] += linkValue
output = []
for item in pair:
output.append(str(pair[item]) + " " + str(item))
return(output)
|
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
"""
Real estate.
"""
__version__ = '4.0'
content = {
'real-estate_headline': ['<#realestate_shortheadline#>'],
'realestate_headline': ['<#realestate_shortheadline#>'],
'realestate_section': ['<#realestate_type#>'],
'realestate_shortheadline': [
'<#realestate_type#>',
'<#city#> <#realestate_types#>',
'<#country#> <#realestate_types#>',
'<#realestate_type#> <#realestate_verb#> <#realestate_subject#>',
'<#realestate_types#> <#realestate_verbplural#> <#realestate_subject#>',
'<#realestate_types#> <#time_thisyear#>',
'<#realestate_subject#>',
'<#realestate_subject#>',
'<#realestate_subject#> <#time_thisyear#>',
],
'realestate_type': [
'Your Home', 'Your House', 'Your Accommodation', 'Your Apartment', 'The Flat',
'Your Condo', 'Your Office', 'Your Site', 'Your Estate', 'Your area', 'Your Land',
],
'realestate_types': [
'Homes', 'Houses', 'Accommodations', 'Apartments', 'Flats', 'Condo’s', 'Offices',
'Sites', 'Estates', 'Areas', 'Investments',
],
'realestate_verb': [
'', '', '', 'for', 'needs', 'has', 'requires', 'takes', 'invokes', 'does', 'gets',
],
'realestate_verbplural': [
'', '', '', 'for', 'need', 'have', 'require', 'take', 'invoke', 'do', 'get',
],
'realestate_subject':[
'Advance Fee',
'Advertising',
'Aesthetic Value',
'Affirmative Lending',
'Affordability State',
'A-Frame',
'Agency Disclosure',
'Agent',
'Agreement',
'Annexation',
'Annual Cap',
'Annuity',
'Antitrust Laws',
'Appraiser',
'Appreciation',
'Appropriation',
'Attorney',
'Architecture',
'Asbestos',
'Asking Price',
'Assemblage',
'Assessment',
'Audit',
'Authentication',
'Bailiff',
'Bailment',
'Balance',
'Balustrade',
'Bankruptcy',
'Binder',
'Blight',
'Blighted Area',
'Blueprint',
'Blue-Sky Laws',
'Boiler Plate',
'Broker',
'Brokerage',
'Brownfields',
'Brownstone',
'Budget',
'Budget Mortgage',
'Builder',
'Warranty',
'Bulk Sales',
'Bulk Transfer',
'Bungalow',
'Buy-Down',
'Buyer’s Agent',
'Buyer’s Broker',
'Buyer’s Market',
'Buyout',
'By-Laws',
'Caisson',
'Cap',
'Cap Sheet',
'Capacity',
'Cash Equivalent',
'Cash Flow',
'Cash Method',
'Cash Out',
'Cash Rent',
'Casing',
'Caulk',
'Caulking',
'Cavity',
'Cavity Wall',
'Ceiling joist',
'Cement',
'Census',
'Chalk line',
'Chancellor',
'Chancery',
'Change Order',
'Channeling',
'Chase',
'Chattel',
'Checking',
'Chip Board',
'Chose',
'Clean out',
'Cleaning Fee',
'Closing',
'Closing Costs',
'Closing Date',
'Cold Applied',
'Cold Joint',
'Collar',
'Collar Beam',
'Collateral',
'Concession',
'Consent',
'Convey',
'Cooperative',
'Coping',
'Corbel',
'Cornerite',
'Cornice',
'Credit',
'Cubic Yard',
'Cupping',
'Curb',
'Cure',
'Curtesy',
'Custodian',
'Custom Builder',
'Decree',
'Dedication',
'Deductions',
'Deed',
'Depression',
'Depth',
'Deregulation',
'Dereliction',
'Descent',
'Devise',
'Devisee',
'Devisor',
'Economics',
'Elevation',
'Employee',
'Encroachment',
'Equity',
'Examination',
'Exception',
'Exchange',
'Exchangor',
'Exclusion',
'Execution',
'Executor',
'Fishmouth',
'Fishplate',
'Forbearance',
'Forced Sale',
'Forecasting',
'Foreclosure',
'Forgery',
'Form',
'Fuse',
'Gain',
'Gap',
'Gap Loan',
'Glazing',
'Goodwill',
'Gradient',
'Grading',
'Grain',
'Grant',
'Grant Deed',
'Grantee',
'Grantor',
'Grounds',
'Groundwater',
'Gutter',
'Hardware',
'Haunch',
'Header',
'Hearth',
'Heir',
'Highlights',
'Highrise',
'Hip',
'Implied',
'Impounds',
'Incentive',
'Incline',
'Income',
'Indemnify',
'Indenture',
'Infant',
'Infiltration',
'Inflation',
'Infrastructure',
'Ingress',
'Inheritance',
'Input',
'Joint',
'Joist',
'Judgment',
'Jumpers',
'Land Lease',
'Lattice',
'Law',
'Layout',
'Lead',
'Lease',
'Leaseback ',
'Listing',
'Loan',
'Location',
'Male',
'Mall',
'Mall Stores',
'Mandatory',
'Masonry',
'Mile',
'Mill',
'Molding',
'Monument',
'Mortar',
'Mortgage',
'Negotiable',
'Offeree',
'Offeror',
'Organic',
'Orientation',
'Parties',
'Patent',
'Permeability',
'Permit',
'Plat',
'Pledge',
'Plough',
'Plumb',
'Premises',
'Premium',
'Price',
'Probate',
'Promisee',
'Property',
'Quasi',
'Radius',
'Rafter',
'Redeem',
'Redemption',
'Reformation',
'Refrigerant',
'Regime',
'Regulations',
'Rehabilitate',
'Reinforcing',
'Reinstate',
'Reinstatement ',
'Residence',
'Reversion',
'Scarcity',
'Secured',
'Security',
'Solvent',
'Span',
'Split',
'Stakeholder',
'Status',
'Sublease',
'Subletting',
'Subordinate',
'Summation',
'Summons',
'Sump',
'Tee',
'Tempered',
'Trade',
'Tread',
'Treads',
'TREC',
'Visqueen',
'Waive',
'Waste',
'Zone',
],
}
|
class Solution(object):
def singleNonDuplicate(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
if len(nums) == 1:
return nums[0]
st = 0
ed = len(nums) - 1
if nums[st] != nums[st + 1]:
return nums[st]
if nums[ed] != nums[ed - 1]:
return nums[ed]
while st < ed:
md = int((st + ed) / 2)
if md % 2 == 0:
if nums[md] == nums[md + 1]:
st = md
continue
elif nums[md] == nums[md - 1]:
ed = md
continue
else:
return nums[md]
else:
if nums[md] == nums[md + 1]:
ed = md
continue
elif nums[md] == nums[md - 1]:
st = md
continue
else:
return nums[md]
return nums[st] |
_base_ = [
'../_base_/default_runtime.py', '../_base_/datasets/coco_detection.py'
]
# model settings
model = dict(
type='CenterNet',
pretrained='torchvision://resnet18',
backbone=dict(
type='ResNet',
depth=18,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=-1,
norm_cfg=dict(type='BN', requires_grad=True),
norm_eval=False,
zero_init_residual=False,
style='pytorch'),
neck=dict(
type='CenterFPN',
in_channels=(512, 256, 128, 64),
out_channels=64,
level_index=0,
reverse_levels=True,
with_last_norm=True,
with_last_relu=True,
upsample_cfg=dict(
type='deconv',
kernel_size=4,
stride=2,
padding=1,
output_padding=0,
bias=False)),
bbox_head=dict(
type='CenterHead',
num_classes=1,
in_channels=64,
feat_channels=64,
loss_heatmap=dict(
type='GaussianFocalLoss', alpha=2.0, gamma=4.0, loss_weight=1),
loss_offset=dict(type='L1Loss', loss_weight=1.0),
loss_bbox=dict(type='L1Loss', loss_weight=0.1)))
# training and testing settings
train_cfg = dict(
vis_every_n_iters=100,
min_overlap=0.7,
debug=False)
test_cfg = dict(
score_thr=0.01,
max_per_img=100)
# dataset settings, SEE: Normalize RGB https://aishack.in/tutorials/normalized-rgb/
img_norm_cfg = dict(
# NOTE: add `norm_rgb=True` if eval offical pretrained weights
# mean=[0.408, 0.447, 0.470], std=[0.289, 0.274, 0.278], to_rgb=False, norm_rgb=True)
# mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225], to_rgb=False, norm_rgb=True)
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile', to_float32=True),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='PhotoMetricDistortion',
brightness_delta=32,
contrast_range=(0.5, 1.5),
saturation_range=(0.5, 1.5),
hue_delta=18),
dict(type='RandomLighting', scale=0.1),
dict(type='RandomCenterCropPad',
crop_size=(512, 512),
ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3),
test_mode=False,
test_pad_mode=None,
**img_norm_cfg),
dict(type='Resize', img_scale=(512, 512), keep_ratio=False),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Pad', size_divisor=32),
dict(type='Normalize', **img_norm_cfg),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile', to_float32=True),
dict(
type='MultiScaleFlipAug',
scale_factor=1.0,
flip=False,
transforms=[
dict(type='Resize'),
dict(type='RandomFlip'),
dict(type='Pad', size_divisor=32),
dict(type='Normalize', **img_norm_cfg),
dict(type='ImageToTensor', keys=['img']),
dict(
type='Collect',
keys=['img'],
meta_keys=('filename', 'ori_shape', 'img_shape', 'pad_shape',
'scale_factor', 'flip', 'img_norm_cfg')),
])
]
classes = ('person', )
data = dict(
samples_per_gpu=32,
workers_per_gpu=2,
train=dict(classes=classes, pipeline=train_pipeline),
val=dict(classes=classes, pipeline=test_pipeline),
test=dict(classes=classes, pipeline=test_pipeline))
# optimizer
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0004,
paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.))
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=1000,
warmup_ratio=1.0 / 5,
step=[90, 120])
checkpoint_config = dict(interval=5)
evaluation = dict(interval=1, metric=['bbox'], multitask=True)
# runtime settings
total_epochs = 140
cudnn_benchmark = True
find_unused_parameters = True |
class CTX:
"""
Global Class holding the configuration of the backward pass
"""
active_exts = tuple()
debug = False
@staticmethod
def set_active_exts(active_exts):
CTX.active_exts = tuple()
for act_ext in active_exts:
CTX.active_exts += (act_ext,)
@staticmethod
def get_active_exts():
return CTX.active_exts
@staticmethod
def add_hook_handle(hook_handle):
if getattr(CTX, "hook_handles", None) is None:
CTX.hook_handles = []
CTX.hook_handles.append(hook_handle)
@staticmethod
def remove_hooks():
for handle in CTX.hook_handles:
handle.remove()
CTX.hook_handles = []
@staticmethod
def is_extension_active(*extension_classes):
for backpack_ext in CTX.get_active_exts():
if isinstance(backpack_ext, extension_classes):
return True
return False
@staticmethod
def get_debug():
return CTX.debug
@staticmethod
def set_debug(debug):
CTX.debug = debug
|
"""
Exercício Python 25: Crie um programa que leia o nome de uma pessoa e diga se ela tem “SILVA” no nome.
"""
print('-' * 40)
print(f'{"Tem SILVA no nome":^40}')
print('-' * 40)
nome = str(input("Digite seu nome completo: "))
nome = nome.split()
if nome == "silva" or nome == "Silva":
print(f"Este nome tem SILVA")
else:
print(f"Este nome não tem SILVA") |
# Copyright (c) 2012 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
EXTENDED_ATTRIBUTES_2_0 = {
'networks': {
'v2attrs:something': {'allow_post': False,
'allow_put': False,
'is_visible': True},
'v2attrs:something_else': {'allow_post': True,
'allow_put': False,
'is_visible': False},
}
}
class V2attributes(object):
def get_name(self):
return "V2 Extended Attributes Example"
def get_alias(self):
return "v2attrs"
def get_description(self):
return "Demonstrates extended attributes on V2 core resources"
def get_namespace(self):
return "http://docs.openstack.org/ext/examples/v2attributes/api/v1.0"
def get_updated(self):
return "2012-07-18T10:00:00-00:00"
def get_extended_resources(self, version):
if version == "2.0":
return EXTENDED_ATTRIBUTES_2_0
else:
return {}
|
# tuplas
# O conjunto de dados não podem ser alterados
tupla = (10,12,13,15,18,20)
print(tupla)
print(tupla[1])
print(tupla[1:3])
# Código abaixo gera exceção pois a tupla não permite alteração
# tupla[1] = 88
# Crinando tuplas com tipos de dados diferentes, incluindo listas
tupla_aluno = ('Maykon','Diego',15,[10,8,5,6])
# Modificando o item da lista dentro da tupla
tupla_aluno[3][2] = 8
print(tupla_aluno)
# convetendo tupla em lista
lista_aluno = list(tupla_aluno)
print(lista_aluno)
# Dicionario
aluno1 = {'nome':'Maycon','sobrenome':'Granemann','idade':18,'lista_notas':[10,8,9,7]}
aluno2 = {'nome':'Mateus','sobrenome':'Peschke','idade':22,'lista_notas':[7,6,8,9]}
lista = [aluno1,aluno2]
print(aluno1)
print(aluno1['nome'])
for a in lista:
print(a['nome'])
print(a['sobrenome'])
print(a['idade'])
print(a['lista_notas'])
# dict_numero={'n1':10,2:3,3:5}
# print(dict_numero)
|
class HostVisual(ContainerVisual,IResource):
"""
Represents a System.Windows.Media.Visual object that can be connected anywhere to a parent visual tree.
HostVisual()
"""
def AddVisualChild(self,*args):
"""
AddVisualChild(self: Visual,child: Visual)
Defines the parent-child relationship between two visuals.
child: The child visual object to add to parent visual.
AddVisualChild(self: Window_16$17,child: Window_16$17)AddVisualChild(self: Label_17$18,child: Label_17$18)AddVisualChild(self: TextBox_18$19,child: TextBox_18$19)AddVisualChild(self: Button_19$20,child: Button_19$20)AddVisualChild(self: CheckBox_20$21,child: CheckBox_20$21)AddVisualChild(self: ComboBox_21$22,child: ComboBox_21$22)AddVisualChild(self: Separator_22$23,child: Separator_22$23)
"""
pass
def GetVisualChild(self,*args):
"""
GetVisualChild(self: ContainerVisual,index: int) -> Visual
Returns a specified child System.Windows.Media.Visual for the parent
System.Windows.Media.ContainerVisual.
index: A 32-bit signed integer that represents the index value of the child
System.Windows.Media.Visual. The value of index must be between 0 and
System.Windows.Media.ContainerVisual.VisualChildrenCount - 1.
Returns: The child System.Windows.Media.Visual.
"""
pass
def HitTestCore(self,*args):
"""
HitTestCore(self: HostVisual,hitTestParameters: GeometryHitTestParameters) -> GeometryHitTestResult
Implements
System.Windows.Media.HostVisual.HitTestCore(System.Windows.Media.GeometryHitTest
Parameters) to supply base hit testing behavior (returning
System.Windows.Media.GeometryHitTestParameters).
hitTestParameters: A value of type System.Windows.Media.GeometryHitTestParameters.
Returns: Returns a value of type System.Windows.Media.GeometryHitTestResult. The
System.Windows.Media.GeometryHitTestResult.VisualHit property contains the
visual that was hit.
HitTestCore(self: HostVisual,hitTestParameters: PointHitTestParameters) -> HitTestResult
Implements
System.Windows.Media.HostVisual.HitTestCore(System.Windows.Media.PointHitTestPar
ameters) to supply base hit testing behavior (returning
System.Windows.Media.PointHitTestParameters).
hitTestParameters: A value of type System.Windows.Media.PointHitTestParameters.
Returns: Returns a value of type System.Windows.Media.HitTestResult. The
System.Windows.Media.HitTestResult.VisualHit property contains the visual
object that was hit.
"""
pass
def OnDpiChanged(self,*args):
""" OnDpiChanged(self: Visual,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: Window_16$17,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: Label_17$18,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: TextBox_18$19,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: Button_19$20,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: CheckBox_20$21,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: ComboBox_21$22,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: Separator_22$23,oldDpi: DpiScale,newDpi: DpiScale) """
pass
def OnPropertyChanged(self,*args):
"""
OnPropertyChanged(self: DependencyObject,e: DependencyPropertyChangedEventArgs)
Invoked whenever the effective value of any dependency property on this
System.Windows.DependencyObject has been updated. The specific dependency
property that changed is reported in the event data.
e: Event data that will contain the dependency property identifier of interest,
the property metadata for the type,and old and new values.
OnPropertyChanged(self: Window_16$17,e: DependencyPropertyChangedEventArgs)OnPropertyChanged(self: Label_17$18,e: DependencyPropertyChangedEventArgs)OnPropertyChanged(self: TextBox_18$19,e: DependencyPropertyChangedEventArgs)OnPropertyChanged(self: Button_19$20,e: DependencyPropertyChangedEventArgs)OnPropertyChanged(self: CheckBox_20$21,e: DependencyPropertyChangedEventArgs)OnPropertyChanged(self: ComboBox_21$22,e: DependencyPropertyChangedEventArgs)OnPropertyChanged(self: Separator_22$23,e: DependencyPropertyChangedEventArgs)
"""
pass
def OnVisualChildrenChanged(self,*args):
"""
OnVisualChildrenChanged(self: Visual,visualAdded: DependencyObject,visualRemoved: DependencyObject)
Called when the System.Windows.Media.VisualCollection of the visual object is
modified.
visualAdded: The System.Windows.Media.Visual that was added to the collection
visualRemoved: The System.Windows.Media.Visual that was removed from the collection
OnVisualChildrenChanged(self: Window_16$17,visualAdded: DependencyObject,visualRemoved: DependencyObject)OnVisualChildrenChanged(self: Label_17$18,visualAdded: DependencyObject,visualRemoved: DependencyObject)OnVisualChildrenChanged(self: TextBox_18$19,visualAdded: DependencyObject,visualRemoved: DependencyObject)OnVisualChildrenChanged(self: Button_19$20,visualAdded: DependencyObject,visualRemoved: DependencyObject)OnVisualChildrenChanged(self: CheckBox_20$21,visualAdded: DependencyObject,visualRemoved: DependencyObject)OnVisualChildrenChanged(self: ComboBox_21$22,visualAdded: DependencyObject,visualRemoved: DependencyObject)OnVisualChildrenChanged(self: Separator_22$23,visualAdded: DependencyObject,visualRemoved: DependencyObject)
"""
pass
def OnVisualParentChanged(self,*args):
"""
OnVisualParentChanged(self: Visual,oldParent: DependencyObject)
Called when the parent of the visual object is changed.
oldParent: A value of type System.Windows.DependencyObject that represents the previous
parent of the System.Windows.Media.Visual object. If the
System.Windows.Media.Visual object did not have a previous parent,the value of
the parameter is null.
OnVisualParentChanged(self: Window_16$17,oldParent: DependencyObject)OnVisualParentChanged(self: Label_17$18,oldParent: DependencyObject)OnVisualParentChanged(self: TextBox_18$19,oldParent: DependencyObject)OnVisualParentChanged(self: Button_19$20,oldParent: DependencyObject)OnVisualParentChanged(self: CheckBox_20$21,oldParent: DependencyObject)OnVisualParentChanged(self: ComboBox_21$22,oldParent: DependencyObject)OnVisualParentChanged(self: Separator_22$23,oldParent: DependencyObject)
"""
pass
def RemoveVisualChild(self,*args):
"""
RemoveVisualChild(self: Visual,child: Visual)
Removes the parent-child relationship between two visuals.
child: The child visual object to remove from the parent visual.
RemoveVisualChild(self: Window_16$17,child: Window_16$17)RemoveVisualChild(self: Label_17$18,child: Label_17$18)RemoveVisualChild(self: TextBox_18$19,child: TextBox_18$19)RemoveVisualChild(self: Button_19$20,child: Button_19$20)RemoveVisualChild(self: CheckBox_20$21,child: CheckBox_20$21)RemoveVisualChild(self: ComboBox_21$22,child: ComboBox_21$22)RemoveVisualChild(self: Separator_22$23,child: Separator_22$23)
"""
pass
def ShouldSerializeProperty(self,*args):
"""
ShouldSerializeProperty(self: DependencyObject,dp: DependencyProperty) -> bool
Returns a value that indicates whether serialization processes should serialize
the value for the provided dependency property.
dp: The identifier for the dependency property that should be serialized.
Returns: true if the dependency property that is supplied should be value-serialized;
otherwise,false.
ShouldSerializeProperty(self: Window_16$17,dp: DependencyProperty) -> bool
ShouldSerializeProperty(self: Label_17$18,dp: DependencyProperty) -> bool
ShouldSerializeProperty(self: TextBox_18$19,dp: DependencyProperty) -> bool
ShouldSerializeProperty(self: Button_19$20,dp: DependencyProperty) -> bool
ShouldSerializeProperty(self: CheckBox_20$21,dp: DependencyProperty) -> bool
ShouldSerializeProperty(self: ComboBox_21$22,dp: DependencyProperty) -> bool
ShouldSerializeProperty(self: Separator_22$23,dp: DependencyProperty) -> bool
"""
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
VisualBitmapEffect=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Effects.BitmapEffect value for the System.Windows.Media.Visual.
"""
VisualBitmapEffectInput=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Effects.BitmapEffectInput value for the System.Windows.Media.Visual.
"""
VisualBitmapScalingMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.BitmapScalingMode for the System.Windows.Media.Visual.
"""
VisualCacheMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a cached representation of the System.Windows.Media.Visual.
"""
VisualChildrenCount=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of children for the System.Windows.Media.ContainerVisual.
"""
VisualClearTypeHint=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.ClearTypeHint that determines how ClearType is rendered in the System.Windows.Media.Visual.
"""
VisualClip=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the clip region of the System.Windows.Media.Visual as a System.Windows.Media.Geometry value.
"""
VisualEdgeMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the edge mode of the System.Windows.Media.Visual as an System.Windows.Media.EdgeMode value.
"""
VisualEffect=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the bitmap effect to apply to the System.Windows.Media.Visual.
"""
VisualOffset=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the offset value of the visual object.
"""
VisualOpacity=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the opacity of the System.Windows.Media.Visual.
"""
VisualOpacityMask=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Brush value that represents the opacity mask of the System.Windows.Media.Visual.
"""
VisualParent=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the visual tree parent of the visual object.
"""
VisualScrollableAreaClip=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a clipped scrollable area for the System.Windows.Media.Visual.
"""
VisualTextHintingMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.TextHintingMode of the System.Windows.Media.Visual.
"""
VisualTextRenderingMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.TextRenderingMode of the System.Windows.Media.Visual.
"""
VisualTransform=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Transform value for the System.Windows.Media.Visual.
"""
VisualXSnappingGuidelines=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the x-coordinate (vertical) guideline collection.
"""
VisualYSnappingGuidelines=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the y-coordinate (horizontal) guideline collection.
"""
|
# Python Program to subtract two numbers
# Store input numbers
num1 = input("Enter first number: ")
num2 = input("Enter second number: ")
# Sub two numbers
sub = float(num1) - float(num2)
# Display the sub
print("The sub of {0} and {1} is {2}".format(num1, num2, sub))
|
BACKTEST_FLOW_OK = {
"nodeList": {
"1": {
"blockType": "DATA_BLOCK",
"blockId": 1,
"equity_name": {"options": ["AAPL"], "value": ""},
"data_type": {"options": ["intraday", "daily_adjusted"], "value": ""},
"interval": {"options": ["1min"], "value": ""},
"outputsize": {"options": ["compact", "full"], "value": ""},
"start_date": {"value": ""},
"end_date": {"value": ""},
},
},
"edgeList": [],
}
SCREENER_FLOW_OK = {
"nodeList": {
"1": {
"blockType": "BULK_DATA_BLOCK",
"blockId": 1,
"equity_name": {"options": ["AAPL"], "value": ""},
"data_type": {"options": ["intraday", "daily_adjusted"], "value": ""},
"interval": {"options": ["1min"], "value": ""},
"outputsize": {"options": ["compact", "full"], "value": ""},
"start_date": {"value": ""},
"end_date": {"value": ""},
},
},
"edgeList": [],
}
|
numbers = [14, 2,3,4,5,6,7,6,5,7,8,8,9,10,11,12,13,14,14]
numbers2 =[]
for number in numbers:
if number not in numbers2:
numbers2.append(number)
print(numbers2) |
def solution(n: int) -> int:
b = to_bin(n)
if len(b) < 3:
return 0
gaps = []
gap_count = 0
for i in range(len(b)):
if b[i] == "1":
# gap stop. save gap and start counting again
gaps.append(gap_count)
# reset gap count
gap_count = 0
else:
# gap found
gap_count += 1
print(gaps)
return max(gaps)
def solution2(n: int) -> int:
b = to_bin(n)
len_b = len(b)
if len_b < 3:
return 0
max_gap = 0
gap_counter = 0
for i in range(len_b):
if b[i] == "1":
max_gap = gap_counter if gap_counter > max_gap else max_gap
# reset counter
gap_counter = 0
else:
gap_counter += 1
return max_gap
def to_bin(n: int) -> str:
"""convert number to binary """
b = bin(n)
return b[2:]
if __name__ == "__main__":
print(solution(1041))
print(solution2(1041))
print(solution(32))
print(solution2(32))
|
class MyClass:
print('MyClass created')
# instansiate a class
my_var = MyClass()
print(type(my_var))
print(dir(my_var)) |
"""Provides a redirection point for platform specific implementations of starlark utilities."""
load(
"//tensorflow/core/platform:default/build_config.bzl",
_pyx_library = "pyx_library",
_tf_additional_all_protos = "tf_additional_all_protos",
_tf_additional_binary_deps = "tf_additional_binary_deps",
_tf_additional_core_deps = "tf_additional_core_deps",
_tf_additional_cupti_test_flags = "tf_additional_cupti_test_flags",
_tf_additional_cupti_utils_cuda_deps = "tf_additional_cupti_utils_cuda_deps",
_tf_additional_device_tracer_srcs = "tf_additional_device_tracer_srcs",
_tf_additional_env_hdrs = "tf_additional_env_hdrs",
_tf_additional_lib_deps = "tf_additional_lib_deps",
_tf_additional_lib_hdrs = "tf_additional_lib_hdrs",
_tf_additional_monitoring_hdrs = "tf_additional_monitoring_hdrs",
_tf_additional_proto_hdrs = "tf_additional_proto_hdrs",
_tf_additional_rpc_deps = "tf_additional_rpc_deps",
_tf_additional_tensor_coding_deps = "tf_additional_tensor_coding_deps",
_tf_additional_test_deps = "tf_additional_test_deps",
_tf_additional_test_srcs = "tf_additional_test_srcs",
_tf_fingerprint_deps = "tf_fingerprint_deps",
_tf_jspb_proto_library = "tf_jspb_proto_library",
_tf_kernel_tests_linkstatic = "tf_kernel_tests_linkstatic",
_tf_lib_proto_parsing_deps = "tf_lib_proto_parsing_deps",
_tf_proto_library = "tf_proto_library",
_tf_proto_library_cc = "tf_proto_library_cc",
_tf_proto_library_py = "tf_proto_library_py",
_tf_protobuf_compiler_deps = "tf_protobuf_compiler_deps",
_tf_protobuf_deps = "tf_protobuf_deps",
_tf_protos_all = "tf_protos_all",
_tf_protos_all_impl = "tf_protos_all_impl",
_tf_protos_grappler = "tf_protos_grappler",
_tf_protos_grappler_impl = "tf_protos_grappler_impl",
_tf_protos_profiler_impl = "tf_protos_profiler_impl",
_tf_py_clif_cc = "tf_py_clif_cc",
_tf_pyclif_proto_library = "tf_pyclif_proto_library",
)
pyx_library = _pyx_library
tf_additional_all_protos = _tf_additional_all_protos
tf_additional_binary_deps = _tf_additional_binary_deps
tf_additional_core_deps = _tf_additional_core_deps
tf_additional_cupti_test_flags = _tf_additional_cupti_test_flags
tf_additional_cupti_utils_cuda_deps = _tf_additional_cupti_utils_cuda_deps
tf_additional_device_tracer_srcs = _tf_additional_device_tracer_srcs
tf_additional_env_hdrs = _tf_additional_env_hdrs
tf_additional_lib_deps = _tf_additional_lib_deps
tf_additional_lib_hdrs = _tf_additional_lib_hdrs
tf_additional_monitoring_hdrs = _tf_additional_monitoring_hdrs
tf_additional_proto_hdrs = _tf_additional_proto_hdrs
tf_additional_rpc_deps = _tf_additional_rpc_deps
tf_additional_tensor_coding_deps = _tf_additional_tensor_coding_deps
tf_additional_test_deps = _tf_additional_test_deps
tf_additional_test_srcs = _tf_additional_test_srcs
tf_fingerprint_deps = _tf_fingerprint_deps
tf_jspb_proto_library = _tf_jspb_proto_library
tf_kernel_tests_linkstatic = _tf_kernel_tests_linkstatic
tf_lib_proto_parsing_deps = _tf_lib_proto_parsing_deps
tf_proto_library = _tf_proto_library
tf_proto_library_cc = _tf_proto_library_cc
tf_proto_library_py = _tf_proto_library_py
tf_protobuf_compiler_deps = _tf_protobuf_compiler_deps
tf_protobuf_deps = _tf_protobuf_deps
tf_protos_all = _tf_protos_all
tf_protos_all_impl = _tf_protos_all_impl
tf_protos_grappler = _tf_protos_grappler
tf_protos_grappler_impl = _tf_protos_grappler_impl
tf_protos_profiler_impl = _tf_protos_profiler_impl
tf_py_clif_cc = _tf_py_clif_cc
tf_pyclif_proto_library = _tf_pyclif_proto_library
|
"""Should raise SyntaxError: name 'cc' is assigned to prior to global declaration
"""
aa, bb, cc, dd = 1, 2, 3, 4
def fn():
cc = 1
global aa, bb, cc, dd
|
# -*- coding: utf-8 -*-
###############################################################################
# Copyright (c), Forschungszentrum Jülich GmbH, IAS-1/PGI-1, Germany. #
# All rights reserved. #
# This file is part of the Masci-tools package. #
# (Material science tools) #
# #
# The code is hosted on GitHub at https://github.com/judftteam/masci-tools. #
# For further information on the license, see the LICENSE.txt file. #
# For further information please visit http://judft.de/. #
# #
###############################################################################
"""
This module contains the dictionary with all defined tasks for the outxml_parser.
The entries in the TASK_DEFINITION dict specify how to parse specific attributes tags.
This needs to be maintained if the specifications do not work for a new schema version
because of changed attribute names for example.
Each entry in the TASK_DEFINITION dict can contain a series of keys, which by default
correspond to the keys in the output dictionary
The following keys are expected in each entry:
:param parse_type: str, defines which methods to use when extracting the information
:param path_spec: dict with all the arguments that should be passed to get_tag_xpath
or get_attrib_xpath to get the correct path
:param subdict: str, if present the parsed values are put into this key in the output dictionary
:param overwrite_last: bool, if True no list is inserted and each entry overwrites the last
For the allAttribs parse_type there are more keys that can appear:
:param base_value: str, optional. If given the attribute
with this name will be inserted into the key from the task_definition
all other keys are formatted as {task_key}_{attribute_name}
:param ignore: list of str, these attributes will be ignored
:param overwrite: list of str, these attributes will not create a list and overwrite any value
that might be there
:param flat: bool, if False the dict parsed from the tag is inserted as a dict into the correspondin key
if True the values will be extracted and put into the output dictionary with the
format {task_key}_{attribute_name}
Each task entry can have additional keys to specify, when to perform the task.
These are denoted with underscores in their names and are all optional:
:param _general: bool, default False. If True the parsing is not performed for each iteration on the
iteration node but beforehand and on the root node
:param _modes: list of tuples, sets conditions for the keys in fleur_modes to perform the task
.e.g. [('jspins', 2), ('soc', True)] means only perform this task for a magnetic soc calculation
:param _minimal: bool, default False, denotes task to perform when minimal_mode=True is passed to the parser
:param _special: bool, default False, If true these tasks are not added by default and need to be added manually
:param _conversions: list of str, gives the names of functions in fleur_outxml_conversions to perform after parsing
The following keys are special at the moment:
- ```fleur_modes``` specifies how to identify the type of the calculation (e.g. SOC, magnetic, lda+u)
this is used to determine, whether additional things should be parsed
Following is the current specification of tasks
.. literalinclude:: ../../../masci_tools/io/parsers/fleur/default_parse_tasks.py
:language: python
:lines: 66-
:linenos:
"""
__working_out_versions__ = {'0.34'}
__base_version__ = '0.34'
TASKS_DEFINITION = {
#--------Definitions for general info from outfile (start, endtime, number_iterations)--------
'general_out_info': {
'_general': True,
'_minimal': True,
'_conversions': ['calculate_walltime'],
'creator_name': {
'parse_type': 'attrib',
'path_spec': {
'name': 'version',
'not_contains': 'git'
}
},
'creator_target_architecture': {
'parse_type': 'text',
'path_spec': {
'name': 'targetComputerArchitectures'
}
},
'output_file_version': {
'parse_type': 'attrib',
'path_spec': {
'name': 'fleurOutputVersion'
}
},
'number_of_iterations': {
'parse_type': 'numberNodes',
'path_spec': {
'name': 'iteration'
}
},
'number_of_atoms': {
'parse_type': 'attrib',
'path_spec': {
'name': 'nat'
}
},
'number_of_atom_types': {
'parse_type': 'attrib',
'path_spec': {
'name': 'ntype'
}
},
'number_of_kpoints': {
'parse_type': 'attrib',
'path_spec': {
'name': 'count',
'contains': 'numericalParameters'
}
},
'start_date': {
'parse_type': 'allAttribs',
'path_spec': {
'name': 'startDateAndTime'
},
'ignore': ['zone'],
'flat': False,
},
'end_date': {
'parse_type': 'allAttribs',
'path_spec': {
'name': 'endDateAndTime'
},
'ignore': ['zone'],
'flat': False,
}
},
#--------Defintions for general info from input section of outfile (kmax, symmetries, ..)--------
'general_inp_info': {
'_general': True,
'_minimal': True,
'title': {
'parse_type': 'text',
'path_spec': {
'name': 'comment'
}
},
'kmax': {
'parse_type': 'attrib',
'path_spec': {
'name': 'Kmax'
}
},
'gmax': {
'parse_type': 'attrib',
'path_spec': {
'name': 'Gmax'
}
},
'number_of_spin_components': {
'parse_type': 'attrib',
'path_spec': {
'name': 'jspins'
}
},
'number_of_symmetries': {
'parse_type': 'numberNodes',
'path_spec': {
'name': 'symOp'
}
},
'number_of_species': {
'parse_type': 'numberNodes',
'path_spec': {
'name': 'species'
}
},
'film': {
'parse_type': 'exists',
'path_spec': {
'name': 'filmPos'
}
},
},
#--------Defintions for lda+u info from input section (species, ldau tags)--------
'ldau_info': {
'_general': True,
'_modes': [('ldau', True)],
'_conversions': ['convert_ldau_definitions'],
'parsed_ldau': {
'parse_type': 'allAttribs',
'path_spec': {
'name': 'ldaU',
'contains': 'species'
},
'subdict': 'ldau_info',
'flat': False,
'only_required': True
},
'ldau_species': {
'parse_type': 'parentAttribs',
'path_spec': {
'name': 'ldaU',
'contains': 'species'
},
'subdict': 'ldau_info',
'flat': False,
'only_required': True
}
},
#--------Defintions for relaxation info from input section (bravais matrix, atompos)
#--------for Bulk and film
'bulk_relax_info': {
'_general': True,
'_modes': [('relax', True), ('film', False)],
'_conversions': ['convert_relax_info'],
'lat_row1': {
'parse_type': 'text',
'path_spec': {
'name': 'row-1',
'contains': 'bulkLattice/bravais'
}
},
'lat_row2': {
'parse_type': 'text',
'path_spec': {
'name': 'row-2',
'contains': 'bulkLattice/bravais'
}
},
'lat_row3': {
'parse_type': 'text',
'path_spec': {
'name': 'row-3',
'contains': 'bulkLattice/bravais'
}
},
'atom_positions': {
'parse_type': 'text',
'path_spec': {
'name': 'relPos'
}
},
'position_species': {
'parse_type': 'parentAttribs',
'path_spec': {
'name': 'relPos'
},
'flat': False,
'only_required': True
},
'element_species': {
'parse_type': 'allAttribs',
'path_spec': {
'name': 'species'
},
'flat': False,
'ignore': ['vcaAddCharge', 'magField']
},
},
'film_relax_info': {
'_general': True,
'_modes': [('relax', True), ('film', True)],
'_conversions': ['convert_relax_info'],
'lat_row1': {
'parse_type': 'text',
'path_spec': {
'name': 'row-1',
'contains': 'filmLattice/bravais'
}
},
'lat_row2': {
'parse_type': 'text',
'path_spec': {
'name': 'row-2',
'contains': 'filmLattice/bravais'
}
},
'lat_row3': {
'parse_type': 'text',
'path_spec': {
'name': 'row-3',
'contains': 'filmLattice/bravais'
}
},
'atom_positions': {
'parse_type': 'text',
'path_spec': {
'name': 'filmPos'
}
},
'position_species': {
'parse_type': 'parentAttribs',
'path_spec': {
'name': 'filmPos'
},
'flat': False,
'only_required': True
},
'element_species': {
'parse_type': 'allAttribs',
'path_spec': {
'name': 'species'
},
'flat': False,
'ignore': ['vcaAddCharge', 'magField']
},
},
#----General iteration tasks
# iteration number
# total energy (only total or also contributions, also lda+u correction)
# distances (nonmagnetic and magnetic, lda+u density matrix)
# charges (total, interstitial, mt sphere)
# fermi energy and bandgap
# magnetic moments
# orbital magnetic moments
# forces
'iteration_number': {
'_minimal': True,
'number_of_iterations_total': {
'parse_type': 'attrib',
'path_spec': {
'name': 'overallNumber'
},
'overwrite_last': True,
}
},
'total_energy': {
'_minimal': True,
'_conversions': ['convert_total_energy'],
'energy_hartree': {
'parse_type': 'singleValue',
'path_spec': {
'name': 'totalEnergy'
}
},
},
'distances': {
'_minimal': True,
'density_convergence': {
'parse_type': 'attrib',
'path_spec': {
'name': 'distance',
'tag_name': 'chargeDensity'
}
},
'density_convergence_units': {
'parse_type': 'attrib',
'path_spec': {
'name': 'units',
'tag_name': 'densityConvergence',
},
'overwrite_last': True,
}
},
'magnetic_distances': {
'_minimal': True,
'_modes': [('jspin', 2)],
'overall_density_convergence': {
'parse_type': 'attrib',
'path_spec': {
'name': 'distance',
'tag_name': 'overallChargeDensity'
}
},
'spin_density_convergence': {
'parse_type': 'attrib',
'path_spec': {
'name': 'distance',
'tag_name': 'spinDensity'
}
}
},
'total_energy_contributions': {
'sum_of_eigenvalues': {
'parse_type': 'singleValue',
'path_spec': {
'name': 'sumOfEigenvalues'
},
'only_required': True
},
'energy_core_electrons': {
'parse_type': 'singleValue',
'path_spec': {
'name': 'coreElectrons',
'contains': 'sumOfEigenvalues'
},
'only_required': True
},
'energy_valence_electrons': {
'parse_type': 'singleValue',
'path_spec': {
'name': 'valenceElectrons'
},
'only_required': True
},
'charge_den_xc_den_integral': {
'parse_type': 'singleValue',
'path_spec': {
'name': 'chargeDenXCDenIntegral'
},
'only_required': True
},
},
'ldau_energy_correction': {
'_modes': [('ldau', True)],
'ldau_energy_correction': {
'parse_type': 'singleValue',
'path_spec': {
'name': 'dftUCorrection'
},
'subdict': 'ldau_info',
'only_required': True
},
},
'nmmp_distances': {
'_minimal': True,
'_modes': [('ldau', True)],
'density_matrix_distance': {
'parse_type': 'attrib',
'path_spec': {
'name': 'distance',
'contains': 'ldaUDensityMatrixConvergence'
},
'subdict': 'ldau_info'
},
},
'fermi_energy': {
'fermi_energy': {
'parse_type': 'singleValue',
'path_spec': {
'name': 'FermiEnergy'
},
}
},
'bandgap': {
'_modes': [('bz_integration', 'hist')],
'bandgap': {
'parse_type': 'singleValue',
'path_spec': {
'name': 'bandgap'
},
}
},
'magnetic_moments': {
'_modes': [('jspin', 2)],
'magnetic_moments': {
'parse_type': 'allAttribs',
'path_spec': {
'name': 'magneticMoment'
},
'base_value': 'moment',
'ignore': ['atomType']
}
},
'orbital_magnetic_moments': {
'_modes': [('jspin', 2), ('soc', True)],
'orbital_magnetic_moments': {
'parse_type': 'allAttribs',
'path_spec': {
'name': 'orbMagMoment'
},
'base_value': 'moment',
'ignore': ['atomType']
}
},
'forces': {
'_minimal': True,
'_modes': [('relax', True)],
'_conversions': ['convert_forces'],
'force_units': {
'parse_type': 'attrib',
'path_spec': {
'name': 'units',
'tag_name': 'totalForcesOnRepresentativeAtoms'
},
'overwrite_last': True
},
'parsed_forces': {
'parse_type': 'allAttribs',
'path_spec': {
'name': 'forceTotal'
},
'flat': False,
'only_required': True
}
},
'charges': {
'_conversions': ['calculate_total_magnetic_moment'],
'spin_dependent_charge': {
'parse_type': 'allAttribs',
'path_spec': {
'name': 'spinDependentCharge',
'contains': 'allElectronCharges',
'not_contains': 'fixed'
},
'only_required': True
},
'total_charge': {
'parse_type': 'singleValue',
'path_spec': {
'name': 'totalCharge',
'contains': 'allElectronCharges',
'not_contains': 'fixed'
},
'only_required': True
}
},
#-------Tasks for forcetheorem Calculations
# DMI, JIJ, MAE, SSDISP
'forcetheorem_dmi': {
'_special': True,
'dmi_force': {
'parse_type': 'allAttribs',
'path_spec': {
'name': 'Entry',
'contains': 'DMI'
}
},
'dmi_force_qs': {
'parse_type': 'attrib',
'path_spec': {
'name': 'qpoints',
'contains': 'Forcetheorem_DMI'
}
},
'dmi_force_angles': {
'parse_type': 'attrib',
'path_spec': {
'name': 'Angles',
'contains': 'Forcetheorem_DMI'
}
},
'dmi_force_units': {
'parse_type': 'attrib',
'path_spec': {
'name': 'units',
'contains': 'Forcetheorem_DMI'
}
}
},
'forcetheorem_ssdisp': {
'_special': True,
'spst_force': {
'parse_type': 'allAttribs',
'path_spec': {
'name': 'Entry',
'contains': 'SSDISP'
}
},
'spst_force_qs': {
'parse_type': 'attrib',
'path_spec': {
'name': 'qvectors',
'contains': 'Forcetheorem_SSDISP'
}
},
'spst_force_units': {
'parse_type': 'attrib',
'path_spec': {
'name': 'units',
'contains': 'Forcetheorem_SSDISP'
}
}
},
'forcetheorem_mae': {
'_special': True,
'mae_force': {
'parse_type': 'allAttribs',
'path_spec': {
'name': 'Angle',
'contains': 'MAE'
}
},
'mae_force_units': {
'parse_type': 'attrib',
'path_spec': {
'name': 'units',
'contains': 'Forcetheorem_MAE'
}
}
},
'forcetheorem_jij': {
'_special': True,
'jij_force': {
'parse_type': 'allAttribs',
'path_spec': {
'name': 'Config',
'contains': 'JIJ'
}
},
'jij_force_units': {
'parse_type': 'attrib',
'path_spec': {
'name': 'units',
'contains': 'Forcetheorem_JIJ'
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.