hexsha stringlengths 40 40 | size int64 4 1.02M | ext stringclasses 8 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 209 | max_stars_repo_name stringlengths 5 121 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 209 | max_issues_repo_name stringlengths 5 121 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 209 | max_forks_repo_name stringlengths 5 121 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 1.02M | avg_line_length float64 1.07 66.1k | max_line_length int64 4 266k | alphanum_fraction float64 0.01 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a330bad99b320ba04765b901bc520c06807b5b9 | 3,040 | py | Python | examples/vrabie2009.py | VNOpenAI/OpenControl | 0087408c57bc77f34f524b28f8c4363b116700bb | [
"MIT"
] | 5 | 2021-07-05T18:03:45.000Z | 2021-07-16T09:31:02.000Z | examples/vrabie2009.py | VNOpenAI/OpenControl | 0087408c57bc77f34f524b28f8c4363b116700bb | [
"MIT"
] | null | null | null | examples/vrabie2009.py | VNOpenAI/OpenControl | 0087408c57bc77f34f524b28f8c4363b116700bb | [
"MIT"
] | null | null | null | ### This examples the simulation examples in the paper of Vrabie and F.Lewis, the very first paper in the ADP control area using neural networks. But in this python implementation, we dont use the algorithm in the paper because the original one requires knowledge the system dynamics. Instead, we simulate with OpenControl, Off Policy ADP Controller. The results that is the same as the paper's results demonstrate our python packages.
### you can check the pape at https://www.sciencedirect.com/science/article/abs/pii/S0893608009000446
### run simulation with the bash script: python vrabie2009.py
from OpenControl.ADP_control import NonLin, NonLinController
from matplotlib import pyplot as plt
import numpy as np
########################## Example 1
print("Example 1")
# define system
def dot_x(t,x,u):
# dynamic of the system
x1 = x[0]
x2 = x[1]
# coefficient
# system dynamics
dx1 = -x1 + x2
dx2 = -0.5*(x1+x2)+0.5*x2*np.sin(x1)**2 + np.sin(x1)*u
dx = [dx1, dx2]
return dx
dimension = (2,1)
sys = NonLin(dot_x, dimension)
# setup simulation
t_start = 0; t_stop = 20
x0 = np.array([-1, 1])
sample_time = 0.01
sys.setSimulationParam(t_sim=(t_start, t_stop), sample_time=sample_time, x0=x0)
# define and setup controller
ctrl = NonLinController(sys)
u0 = lambda x: -1.5*np.sin(x[0])*(x[0] + x[1])
data_eval = 0.1; num_data = 30
explore_noise = lambda t: 0.2*np.sum(np.sin(np.array([1, 3, 7, 11, 13, 15])*t))
phi_func = lambda x: np.array([x[0]**2, x[0]*x[1], x[1]**2])
ctrl.setPolicyParam(data_eval=data_eval, num_data=num_data, explore_noise=explore_noise, u0=u0, phi_func=phi_func)
# run simuilation
Wc, Wa = ctrl.offPolicy()
print("The optimal weight of the critic")
print(Wc)
# for visualizing the results, using tensorboard
########################## Example 2
print("example 2")
def dot_x(t,x,u):
# dynamic of the system
x1 = x[0]
x2 = x[1]
# coefficient
# system dynamics
dx1 = -x1 + x2 + 2*x2**3
dx2 = -0.5*(x1+x2)+0.5*x2*(1 + 2*x2**2)*np.sin(x1)**2 + np.sin(x1)*u
dx = [dx1, dx2]
return dx
dimension = (2,1)
sys = NonLin(dot_x, dimension)
t_start = 0; t_stop = 20
x0 = np.array([-1, 1])
sample_time = 0.01
sys.setSimulationParam(t_sim=(t_start, t_stop), sample_time=sample_time, x0=x0)
ctrl = NonLinController(sys)
def u0(x):
x1 = x[0]
x2 = x[1]
return -0.5*np.sin(x1)*(3*x2 - 0.2*x1**2*x2 + 12*x2**3)
data_eval = 0.1; num_data = 40
explore_noise = lambda t: 0.2*np.sum(np.sin(np.array([1, 3, 7, 11, 13, 15])*t))
# use default phi_func
def phi_func(x):
x1 = x[0]
x2 = x[1]
return np.array([x1**2, x1*x2, x2**2, x1**4, x1**3*x2, x1**2*x2**2, x1*x2**3, x2**4])
def q_func(x):
x1 = x[0]
x2 = x[1]
return x1**2 + x2**2 + 2*x2**4
ctrl.setPolicyParam(data_eval=data_eval, num_data=num_data, explore_noise=explore_noise, u0=u0, q_func=q_func, phi_func=phi_func)
Wc, Wa = ctrl.offPolicy()
print("The optimal weight of the critic")
print(Wc)
# for visualizing the results, using tensorboard | 27.636364 | 435 | 0.660197 |
7a56072c3ea64ec6cb6501acb57fa2145a472055 | 1,891 | py | Python | doajtest/unit/test_lib_normalise_url.py | glauberm/doaj | dc24dfcbf4a9f02ce5c9b09b611a5766ea5742f7 | [
"Apache-2.0"
] | 47 | 2015-04-24T13:13:39.000Z | 2022-03-06T03:22:42.000Z | doajtest/unit/test_lib_normalise_url.py | glauberm/doaj | dc24dfcbf4a9f02ce5c9b09b611a5766ea5742f7 | [
"Apache-2.0"
] | 1,215 | 2015-01-02T14:29:38.000Z | 2022-03-28T14:19:13.000Z | doajtest/unit/test_lib_normalise_url.py | glauberm/doaj | dc24dfcbf4a9f02ce5c9b09b611a5766ea5742f7 | [
"Apache-2.0"
] | 14 | 2015-11-27T13:01:23.000Z | 2021-05-21T07:57:23.000Z | from parameterized import parameterized
from combinatrix.testintegration import load_parameter_sets
from unittest import TestCase
from portality.lib.paths import rel2abs
from portality.lib.normalise import normalise_url
def load_cases():
return load_parameter_sets(rel2abs(__file__, "..", "matrices", "lib_normalise_url"), "normalise_url", "test_id",
{"test_id" : []})
EXCEPTIONS = {
"ValueError" : ValueError
}
class TestLibNormaliseURL(TestCase):
def setUp(self):
super(TestLibNormaliseURL, self).setUp()
def tearDown(self):
super(TestLibNormaliseURL, self).tearDown()
@parameterized.expand(load_cases)
def test_01_normalise_url(self, name, kwargs):
url_arg = kwargs.get("url")
scheme_arg = kwargs.get("scheme")
whitespace_arg = kwargs.get("whitespace")
raises_arg = kwargs.get("raises")
raises = EXCEPTIONS.get(raises_arg)
###############################################
## set up
canonicalUrl = None
if url_arg != "none":
canonicalUrl = "//example.com/path;p=1?query=one&two=three#frag"
url = canonicalUrl
if scheme_arg == "none" and url is not None:
url = url[2:]
if scheme_arg not in ["-", "invalid", "none", "//"]:
url = scheme_arg + ":" + url
elif scheme_arg == "invalid":
url = "somerubbish:" + url
elif scheme_arg == "unknown":
url = "unknown:" + url
if whitespace_arg == "yes":
url = " " + url + "\t\n"
###########################################################
# Execution
if raises is not None:
with self.assertRaises(raises):
norm = normalise_url(url)
else:
norm = normalise_url(url)
assert norm == canonicalUrl
| 30.015873 | 116 | 0.556319 |
e6f87f72b60f13ecc5d37e17f7cf24e62a96f39a | 1,228 | py | Python | ott/examples/fairness/config.py | theouscidda6/ott | 53a06973e417f579fc79795403f81ee829dbd575 | [
"Apache-2.0"
] | 110 | 2021-12-28T17:18:08.000Z | 2022-03-31T00:20:57.000Z | ott/examples/fairness/config.py | theouscidda6/ott | 53a06973e417f579fc79795403f81ee829dbd575 | [
"Apache-2.0"
] | 14 | 2022-01-31T14:04:47.000Z | 2022-03-24T17:20:35.000Z | ott/examples/fairness/config.py | theouscidda6/ott | 53a06973e417f579fc79795403f81ee829dbd575 | [
"Apache-2.0"
] | 17 | 2022-01-31T11:59:26.000Z | 2022-03-31T11:28:44.000Z | # coding=utf-8
# Copyright 2022 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Configuration to train a fairness aware classifier on the adult dataset."""
import ml_collections
def get_config():
"""Returns a ConfigDict."""
config = ml_collections.ConfigDict()
config.folder = '/tmp/adult_dataset/'
config.training_filename = 'adult.data'
config.test_filename = 'adult.test'
config.info_filename = 'adult.names'
config.protected = 'sex'
config.batch_size = 256
config.num_epochs = 20
config.embed_dim = 16
config.hidden_layers = (64, 64)
config.learning_rate = 1e-4
config.epsilon = 1e-3
config.quantization = 16
config.num_groups = 2
config.fair_weight = 1.0
return config
| 29.238095 | 78 | 0.739414 |
97382167d784a56a343fb759bb77bdd9bd7429a2 | 30,376 | py | Python | door_jam.py | danieljabailey/door_jam | 1848941e96569d9d5e9eaf7ca88b03ebbffe2338 | [
"MIT",
"Unlicense"
] | null | null | null | door_jam.py | danieljabailey/door_jam | 1848941e96569d9d5e9eaf7ca88b03ebbffe2338 | [
"MIT",
"Unlicense"
] | null | null | null | door_jam.py | danieljabailey/door_jam | 1848941e96569d9d5e9eaf7ca88b03ebbffe2338 | [
"MIT",
"Unlicense"
] | null | null | null | #!/usr/bin/env python3
import math
import sys
import pygame
import threading
import time
from enum import Enum
import pytmx
from pytmx.util_pygame import load_pygame as load_tmx
import traceback
import networkx as nx
target_fps = 30
RENDER = pygame.event.custom_type()
def surface_geom(w, h, tw, th):
return (
((w*tw) + (h*tw)) /2
,((h*th) + (w*th)) /2
)
def grid_to_surface(x, y, w, h, tw, th):
""" Converts grid coordinates to screen coordinates
with 0,0 on the top corner of the topmost tile in the map """
return (
((tw * (h-1)) + ((x-y)*tw)) / 2
,((x+y)*th) / 2
)
def surface_to_grid(x, y, w, h, tw, th):
x_plus_y = (y*2)/th
x_minus_y = ((x*2) - (tw * (h-1)))/tw
sx = (x_plus_y + x_minus_y) / 2
sy = x_plus_y - sx
return (
math.floor(sx)
,math.floor(sy)
)
def add(c1,c2):
((x1,y1),(x2,y2)) = (c1,c2)
return (
x1+x2
,y1+y2
)
def neg(c1):
(x,y) = c1
return (-x,-y)
def sub(c1,c2):
return add(c1, neg(c2))
def mul(a, b):
x,y=a
return (x*b, y*b)
def vmul(a, b):
x1,y1=a
x2,y2=b
return (x1*x2, y1*y2)
NORTH = (0,-1)
EAST = (1,0)
SOUTH = (0,1)
WEST = (-1,0)
SCREEN_NORTH = (1,-1)
SCREEN_EAST = (1,1)
SCREEN_SOUTH = (-1,1)
SCREEN_WEST = (-1,-1)
def heading_to_screen(heading):
return {
NORTH:SCREEN_NORTH
,EAST:SCREEN_EAST
,SOUTH:SCREEN_SOUTH
,WEST:SCREEN_WEST
}[heading]
def turn_left(h):
return {
NORTH:WEST
,EAST:NORTH
,SOUTH:EAST
,WEST:SOUTH
}[h]
def turn_right(h):
return {
NORTH:EAST
,EAST:SOUTH
,SOUTH:WEST
,WEST:NORTH
}[h]
def heading_name(h):
return {
NORTH:'north'
,EAST:'east'
,SOUTH:'south'
,WEST:'west'
}[h]
class Animation:
def __init__(self, filename, size, start_frame, end_frame):
w,h = size
self.img = pygame.image.load(filename)
self.frame_width = w
self.frame_height = h
self.size = size
self.n_frames = end_frame - start_frame + 1
self.start_frame = start_frame
(iw,ih) = self.img.get_size()
frames_per_row = math.floor(iw/w)
self.frames = [
self.img.subsurface(pygame.Rect(
(f%frames_per_row)*w, (f//frames_per_row)*h,
self.frame_width, self.frame_height
)) for f in range(start_frame, end_frame+1)
]
def get_frame(self, n):
i = (n % self.n_frames)
return self.frames[i]
class Character:
def __init__(self, marker, tile_unit):
self.anims = {}
self.tile_unit = tile_unit
self.cur_anim = None
self.cur_frame = 0
self.size = None
self.pos = (0,0)
self.target = (0,0)
self.destination = (0,0)
self.selected = False
self.marker = marker
self.frames_per_tile = 20
self.step_progress = 0
self.path = None
self.heading = EAST
self.screen_heading = SCREEN_EAST
self.selectable = True
self.walking = False
def is_selected(self):
return self.selected
def select(self):
self.selected = True
def clear_selection(self):
self.selected = False
def draw(self, surf, pos, scale):
if self.cur_anim is None:
return
anim = self.anims[self.cur_anim]
f = self.cur_frame
img = anim.get_frame(f)
lpos = add(pos, mul(vmul(self.screen_heading, mul(self.tile_unit, (self.step_progress/self.frames_per_tile)/2)), scale))
newsize = mul(self.size, scale)
scaled_char = pygame.transform.scale(img, newsize)
surf.blit(scaled_char, lpos)
if self.selected:
newsize = mul(self.marker.size, scale)
scaled_marker = pygame.transform.scale(self.marker.get_frame(f), newsize)
surf.blit(scaled_marker, add(lpos, mul((16, -4), scale)))
def next_frame(self):
self.cur_frame += 1
if self.pos != self.target or self.step_progress < 0:
self.step_progress = self.step_progress + 1
if self.step_progress >= self.frames_per_tile/2:
self.pos = self.target
self.step_progress = -(self.frames_per_tile/2)
if self.step_progress == 0:
self.walk_path(self.path)
def add_anim(self, name, filename, size, start, end):
a = Animation(filename, size, start, end)
self.anims[name] = a
if self.size is None:
self.size = (a.frame_width, a.frame_height)
def set_anim(self, name):
self.cur_anim = name
def warp_to(self, pos):
self.pos = pos
self.target = pos
self.step_progress = 0
def idle(self):
self.target = self.pos
self.destination = self.pos
self.walking = False
self.set_anim(
{
NORTH: 'idle_north'
,EAST: 'idle_east'
,SOUTH: 'idle_south'
,WEST: 'idle_west'
}[self.heading]
)
def walk_path(self, path):
self.step_progress = 0
self.walking = True
if path is not None:
if len(path) > 0:
self.destination = path[-1]
[self.target, *self.path] = path
if self.target == self.pos:
self.walk_path(self.path)
else:
self.heading = sub(self.target, self.pos)
self.screen_heading = heading_to_screen(self.heading)
self.set_anim(
{
NORTH: 'walk_north'
,EAST: 'walk_east'
,SOUTH: 'walk_south'
,WEST: 'walk_west'
}[self.heading]
)
else:
self.idle()
else:
self.idle()
class Game:
def __init__(self):
self.win = pygame.display.set_mode((1000,700), pygame.RESIZABLE)
self.stop_event = threading.Event()
self.can_render = threading.Event()
self.can_render.set()
self.last_time = time.time()
self.font = pygame.font.SysFont("monospace", 18)
self.big_font = pygame.font.SysFont("sans", 70)
self.button_font = pygame.font.SysFont("sans", 40)
self.tip_font = pygame.font.SysFont("sans", 18)
self.guard_points = []
self.levels = [
'Tiled/Map1.tmx'
,'Tiled/Map2.tmx'
,'Tiled/Map3.tmx'
,'Tiled/Map4.tmx'
,'Tiled/Map5.tmx'
]
self.cur_level = 0
self.load_next_level()
self.cursor = None
self.selection = None
self.path_plan = None
self.retry_button = None
self.game_is_over = False
self.panning = False
self.hover_occupied = None
self.scale = 1
self.scroll = 10
self.three_frame = 0
self.marker = Animation('Pointer.png', (16,16), 0, 15)
self.restart_level()
def load_next_level(self):
self.load_map(self.levels[self.cur_level])
self.cur_level += 1
def restart_level(self):
self.all_player_chars = []
for c in self.char_points:
player = self.load_character('Character1.png')
player.warp_to(c)
player.set_anim('idle_south')
self.all_player_chars.append(player)
self.guard = self.load_character('Guard.png')
self.guard.selectable = False
if self.guard_points:
pos = self.guard_points[0]
guard_path = [pos]
for p in self.guard_points[1:]:
guard_path.extend(nx.shortest_path(self.room, pos, p)[1:])
pos = p
new_paths = []
if self.guard_start is not None:
first_point = guard_path[0]
line = self.line(self.guard_start, first_point)
guard_path = [*line, *guard_path[1:]]
self.guard.warp_to(guard_path[0])
this_path = []
for p in guard_path:
door = self.door_from(p)
if door is None:
this_path.append(p)
else:
from_, to = door
this_path.append(p)
new_paths.append(this_path)
this_path = []
if self.guard_end is not None:
if len(this_path) >0:
last_point = this_path[-1]
else:
last_point = new_paths[-1][-1]
line = self.line(last_point, self.guard_end)
this_path.extend(line)
new_paths.append(this_path)
next_path, *self.guard_paths = new_paths
self.guard.walk_path(next_path)
self.guard_state = 'walk'
self.guard_done = False
self.guard_passes = self.init_guard_passes
self.all_guards = [self.guard]
self.all_chars = self.all_guards + self.all_player_chars
self.check_guard_vision()
self.apply_scale()
def door_from(self, pos):
for (f,t) in self.doors:
if f == pos:
return (f,t)
return None
def load_character(self, sprite_sheet, size=(48,48)):
new_char = Character(self.marker, (self.tw, self.th))
for i, heading in enumerate(['east', 'south', 'west', 'north']):
new_char.add_anim(f'idle_{heading}', sprite_sheet, size, i*9, i*9)
new_char.add_anim(f'walk_{heading}', sprite_sheet, size, (i*9)+1, (i*9)+8)
new_char.set_anim('idle_east')
new_char.warp_to((0,0))
return new_char
def grid_to_surface(self, x, y):
return grid_to_surface(x,y,self.w,self.h,self.tw,self.th)
def surface_to_grid(self, x, y):
return surface_to_grid(x,y,self.w,self.h,self.tw,self.th)
def coords(self, pos, size=None):
if size is None:
size = (self.tw/2, self.th)
delta = sub((self.tw/2, self.th), size)
return add(self.offset, mul(add(delta, self.grid_to_surface(*pos)),self.scale))
def load_map(self, name):
self.map = load_tmx(name)
self.w = self.map.width
self.h = self.map.height
self.tw = self.map.tilewidth
self.th = self.map.tileheight
self.sw, self.sh = surface_geom(self.w, self.h, self.tw, self.th)
self.map_surface = pygame.Surface((self.sw,self.sh))
self.overlay_surface = pygame.Surface((self.sw,self.sh))
self.overlay_surface.convert_alpha()
self.overlay_surface.set_alpha(255)
self.map_parts = {}
self.doors = []
g = nx.Graph()
layer_id = lambda layer: next(i for i, l in enumerate(self.map.layers) if l==layer)
def layer_by_name(name):
try:
return self.map.get_layer_by_name(name)
except ValueError:
return None
for layer,name in [(layer_by_name(name),name) for name in ['Floor', 'Walls', 'Doors', 'GuardEntrance', 'GuardExit']]:
if not isinstance(layer, pytmx.pytmx.TiledTileLayer):
continue
for x, y, img_gid in layer.iter_data():
img = self.map.get_tile_image_by_gid(img_gid)
if img is None:
continue
delta = sub((self.tw/2, self.th), img.get_size())
pos = add(self.grid_to_surface(x,y), delta)
if name == 'Floor':
self.map_surface.blit(img, pos)
elif name.startswith('Guard'):
self.overlay_surface.blit(img, pos)
else:
depth = x+y+1
part = self.map_parts.get(depth)
if part is None:
part = pygame.Surface((self.sw, self.th*2))
self.map_parts[depth]=part
part.blit(img, sub(pos, (0,-self.th+((depth-1)*(self.th/2)))))
props = self.map.get_tile_properties_by_gid(img_gid)
if props and props.get('floor', False):
g.add_node((x,y))
for ox, oy in [(x-1,y),(x,y-1)]:
other = self.map.get_tile_properties(ox, oy, layer_id(layer))
if other and other.get('floor',False):
g.add_edge((x,y), (ox,oy))
east_wall = props is not None and props.get('wall_east', False)
south_wall = props is not None and props.get('wall_south', False)
east_door = props is not None and props.get('door_east', False)
south_door = props is not None and props.get('door_south', False)
to_remove = []
if east_wall:
other = add(EAST, (x,y))
to_remove.append(((x,y), other))
if south_wall:
other = add(SOUTH, (x,y))
to_remove.append(((x,y), other))
if east_door:
other = add(EAST, (x,y))
self.doors.append(((x,y), other))
self.doors.append((other, (x,y)))
if south_door:
other = add(SOUTH, (x,y))
self.doors.append(((x,y), other))
self.doors.append((other, (x,y)))
for from_, to in to_remove:
try:
g.remove_edge(from_, to)
except nx.NetworkXError:
pass
self.offset = (100,100)
self.room = g
points = layer_by_name('Points')
self.rooms = []
self.guard_start = None
self.guard_end = None
self.guard_on_point = False
self.char_points = []
if points:
guard_points = {}
for p in points:
props = p.properties
if props.get('guard_start', False):
self.guard_start = (math.floor(p.x/self.th),math.floor(p.y/self.th))
if props.get('guard_end', False):
self.guard_end = (math.floor(p.x/self.th),math.floor(p.y/self.th))
passes = props.get('guard_passes')
if passes is not None:
self.init_guard_passes = passes
self.guard_passes = passes
self.guard_pass_point = (math.floor(p.x/self.th),math.floor(p.y/self.th))
if 'index' in props:
i = props['index']
guard_points[i] = (math.floor(p.x/self.th),math.floor(p.y/self.th))
else:
x,y,w,h = [math.floor(a/self.th) for a in [p.x, p.y, p.width, p.height]]
is_goal = props.get('goal', False)
start, end = ((x,y),(x+w,y+h))
self.rooms.append((start, end))
if is_goal:
self.goal_room = (start, end)
if props.get('character', False):
self.char_points.append((math.floor(p.x/self.th),math.floor(p.y/self.th)))
self.guard_points = [guard_points [k] for k in sorted(guard_points.keys())]
def line(self, from_, to):
fx, fy = from_
tx, ty = to
line = [from_]
x, y = from_
if fx==tx:
dir_ = 1 if ty>fy else -1
while (x,y) != to:
y += dir_
line.append((x,y))
if fy==ty:
dir_ = 1 if tx>fx else -1
while (x,y) != to:
x += dir_
line.append((x,y))
return line
def squares_in_room(self, start, end):
room_squares = []
sx,sy=start
ex,ey=end
for x in range(sx,ex):
for y in range(sy,ey):
room_squares.append((x,y))
return room_squares
def is_in_room(self, pos, start, end):
sx,sy=start
ex,ey=end
x,y=pos
return all([
x>=sx
,x<ex
,y>=sy
,y<ey
])
def game_over(self):
self.game_is_over = True
def check_guard_vision(self):
p = self.guard.pos
seen_points = [p]
for room in self.rooms:
if self.is_in_room(p, *room):
seen_points.extend(self.squares_in_room(*room))
h = self.guard.heading
next_tile = add(p, h)
while self.room.has_edge(p, next_tile):
seen_points.append(next_tile)
p = next_tile
next_tile = add(next_tile, h)
self.guard_vision = seen_points
for tile in seen_points:
for c in self.all_player_chars:
if tile == c.pos:
self.game_over()
def winning_condition(self):
return self.guard_done and all(self.is_in_room(c.pos, *self.goal_room) for c in self.all_player_chars)
def update(self, timediff):
if not self.game_is_over:
# Characters move one animation frame every 3 game frames
self.three_frame = (self.three_frame + 1) % 2
if self.three_frame == 0:
for c in self.all_chars:
c.next_frame()
match self.guard_state:
case 'walk':
if self.guard.pos == self.guard_pass_point and not self.guard_on_point:
self.guard_on_point = True
self.guard_passes -= 1
if self.guard_passes == 0:
self.guard_done = True
if self.guard.pos != self.guard_pass_point and self.guard_on_point:
self.guard_on_point = False
if not self.guard.walking:
door = self.door_from(self.guard.pos)
if door is not None:
from_, to = door
self.guard.walk_path([to])
self.guard_exit = from_
self.guard_counter = 0
self.guard_state = 'enter_room'
else:
self.guard_done = True
case 'enter_room':
if not self.guard.walking:
if self.guard_counter == 0:
new_heading = turn_left(self.guard.heading)
self.guard.heading = new_heading
self.guard.idle()
if self.guard_counter == 10:
new_heading = turn_right(self.guard.heading)
self.guard.heading = new_heading
self.guard.idle()
if self.guard_counter == 20:
new_heading = turn_right(self.guard.heading)
self.guard.heading = new_heading
self.guard.idle()
if self.guard_counter > 30:
next_path, *self.guard_paths = self.guard_paths
self.guard.walk_path([self.guard_exit, *next_path])
self.guard_state = 'walk'
self.guard_counter += 1
self.check_guard_vision()
def draw_cursor(self, pos, color):
cx,cy = pos
gx,gy = add(self.offset, self.grid_to_surface(cx,cy))
gx,gy = self.coords((cx,cy))
s = self.scale
pygame.draw.aalines(self.win, color, True, [
(gx,gy)
,(gx+(self.tw/2*s), gy+(self.th/2*s))
,(gx,gy+self.th*s)
,(gx-(self.tw/2*s), gy+(self.th/2*s))
])
def draw_box(self, pos, color):
cx,cy = pos
gx,gy = add(self.offset, self.grid_to_surface(cx,cy))
gx,gy = self.coords((cx,cy))
s = self.scale
pygame.draw.polygon(self.win, color, [
(gx,gy)
,(gx+(self.tw/2*s), gy+(self.th/2*s))
,(gx,gy+self.th*s)
,(gx-(self.tw/2*s), gy+(self.th/2*s))
])
def draw_path(self, path, color):
if len(path) < 2:
return
pygame.draw.aalines(self.win, color, False, [
add(self.offset, mul(add(self.grid_to_surface(*p), (0, self.th/2)), self.scale)) for p in path
])
def render(self):
self.win.fill((0,0,0))
self.win.blit(self.scaled_map, self.offset)
self.win.convert_alpha()
self.win.set_alpha(255)
if self.cursor is not None:
self.draw_cursor(self.cursor, (0,255,0))
if self.selection is not None:
self.draw_cursor(self.selection, (0,255,0))
if self.hover_occupied is not None:
self.draw_cursor(self.hover_occupied, (255,0,0))
if self.path_plan is not None:
self.draw_path(self.path_plan, (0,0,255))
for p in self.guard_vision:
self.draw_cursor(p, (255,238,77))
chars_for_depth = {}
for c in self.all_chars:
pos = self.coords(c.pos, c.size)
cx,cy=c.pos
depth = cx+cy
chars = chars_for_depth.get(depth, None)
if chars is None:
chars = list()
chars_for_depth[depth] = chars
chars.append(c)
for depth in range(0, self.w+self.h):
part = self.scaled_map_parts.get(depth)
if part is not None:
self.win.blit(part, add(self.offset, (0,math.ceil(self.scale * (-self.th+(depth-1) * (self.th/2))))))
chars = chars_for_depth.get(depth, [])
for char in chars:
pos = self.coords(char.pos, char.size)
char.draw(self.win, pos, self.scale)
self.win.blit(self.scaled_overlay, self.offset)
if self.game_is_over:
msg = self.big_font.render(f"You got caught! Game Over!", 1, (255,0,0))
msg_pos = sub(mul(self.win.get_size(), 1/2), mul(msg.get_size(), 1/2))
self.win.blit(msg, msg_pos)
msg = self.button_font.render(f"retry", 1, (0,0,255))
msg_pos = add(sub(mul(self.win.get_size(), 1/2), mul(msg.get_size(), 1/2)), (0,70))
(x,y) = sub(msg_pos, (5,5))
(w,h) = sub(add(msg_pos, add((5,5), msg.get_size())), (x,y))
self.retry_button = pygame.Rect(x, y, w, h)
color = (155,155,155)
if self.retry_button.collidepoint(self.last_mouse_pos):
color = (255,255,255)
pygame.draw.rect(self.win, color, self.retry_button)
self.win.blit(msg, msg_pos)
else:
self.retry_button = None
if self.winning_condition():
msg = self.big_font.render(f"You made it!", 1, (0,255,0))
msg_pos = sub(mul(self.win.get_size(), 1/2), mul(msg.get_size(), 1/2))
self.win.blit(msg, msg_pos)
msg = self.button_font.render(f"next level", 1, (0,0,255))
msg_pos = add(sub(mul(self.win.get_size(), 1/2), mul(msg.get_size(), 1/2)), (0,70))
(x,y) = sub(msg_pos, (5,5))
(w,h) = sub(add(msg_pos, add((5,5), msg.get_size())), (x,y))
self.next_button = pygame.Rect(x, y, w, h)
color = (155,155,155)
if self.next_button.collidepoint(self.last_mouse_pos):
color = (255,255,255)
pygame.draw.rect(self.win, color, self.next_button)
self.win.blit(msg, msg_pos)
else:
self.next_button = None
def apply_scale(self):
ssize = mul(self.map_surface.get_size(), self.scale)
self.scaled_map = pygame.transform.scale(self.map_surface, ssize)
self.scaled_overlay = pygame.transform.scale(self.overlay_surface, ssize)
self.scaled_map.convert_alpha()
self.scaled_overlay.convert_alpha()
self.scaled_map.set_alpha(255)
self.scaled_overlay.set_alpha(255)
self.scaled_map_parts = {}
for depth in self.map_parts:
part = self.map_parts[depth]
ssize = mul(part.get_size(), self.scale)
self.scaled_map_parts[depth] = pygame.transform.scale(part, ssize)
self.scaled_map_parts[depth].convert_alpha()
self.scaled_map_parts[depth].set_colorkey((0, 0, 0))
def to_cursor_pos(self, pos):
mouse_pos = mul(sub(pos, self.offset), 1/self.scale)
return self.surface_to_grid(*mouse_pos)
def select_character(self, pos):
selected = None
for c in self.all_chars:
if selected is None and c.pos == pos and c.selectable and not c.walking:
c.select()
selected = c
else:
c.clear_selection()
return selected
def space_is_free(self, pos):
return not any(c.destination== pos for c in self.all_player_chars)
def event(self, ev):
match ev.type:
case pygame.MOUSEMOTION:
self.last_mouse_pos = ev.pos
mouse_pos = self.to_cursor_pos(ev.pos)
props = None
try:
props = self.map.get_tile_properties(*mouse_pos,0)
except Exception:
pass
if props and props.get('floor',False):
if not self.selection or self.space_is_free(mouse_pos):
self.cursor = mouse_pos
self.hover_occupied = None
if self.selection:
self.path_plan = nx.shortest_path(self.room, self.selection, self.cursor)
else:
self.path_plan = None
else:
self.hover_occupied = mouse_pos
self.path_plan = None
self.cursor = None
else:
self.cursor = None
self.path_plan = None
self.hover_occupied = None
if self.panning:
self.offset = add(self.pan_start_offset, sub(ev.pos, self.pan_start_mouse))
case pygame.MOUSEBUTTONDOWN:
if ev.button == 1:
if self.winning_condition() and self.next_button is not None:
if self.next_button.collidepoint(self.last_mouse_pos):
self.load_next_level()
self.restart_level()
if self.game_is_over and self.retry_button is not None:
if self.retry_button.collidepoint(self.last_mouse_pos):
self.game_is_over = False
self.restart_level()
if self.selection:
if self.cursor:
self.selected_char.walk_path(self.path_plan)
self.selected_char.clear_selection()
self.path_plan = None
self.selection = None
self.selected_char = None
else:
char = self.select_character(self.cursor)
if char is not None:
self.selection = self.cursor
self.path_plan = None
self.selected_char = char
if ev.button == 2:
self.panning = True
self.pan_start_offset = self.offset
self.pan_start_mouse = ev.pos
case pygame.MOUSEBUTTONUP:
if ev.button == 2:
self.panning = False
case pygame.MOUSEWHEEL:
self.scroll = max(5, min(100, self.scroll + ev.y))
old_scale = self.scale
self.scale = self.scroll/10
self.offset = sub(self.last_mouse_pos, mul(mul(sub(self.last_mouse_pos, self.offset), 1/old_scale), self.scale))
self.apply_scale()
case _:
print(f"Unknown event: {ev}")
def fps_counter(self, diff):
fps = 1/diff
count = self.font.render(f"FPS: {int(fps)}/30", 1, (255,255,255))
self.win.blit(count, (1, 1))
def quit(self):
self.stop_event.set()
def render_poll(self):
while not self.stop_event.is_set():
self.can_render.wait()
self.can_render.clear()
pygame.fastevent.post(pygame.event.Event(RENDER, {}))
time.sleep(1/target_fps)
def run(self):
threading.Thread(target=self.render_poll).start()
try:
while ev := pygame.fastevent.wait():
match ev.type:
case pygame.QUIT:
break
case _RENDER if _RENDER == RENDER:
now = time.time()
diff = now - self.last_time
self.last_time = now
try:
self.update(diff)
self.render()
except Exception as e:
traceback.print_exception(e, file=sys.stderr)
self.fps_counter(diff)
pygame.display.flip()
self.can_render.set()
case _:
try:
self.event(ev)
except Exception as e:
traceback.print_exception(e, file=sys.stderr)
except KeyboardInterrupt:
pass
self.quit()
def main():
pygame.init()
pygame.fastevent.init()
game = Game()
game.run()
if __name__=="__main__":
main()
| 36.59759 | 128 | 0.505794 |
8100c2285c71e303d814843ad98ed22ea5a53ac3 | 1,762 | py | Python | blog/models.py | Attila-Sasvari/django_blog | 7aea29932b62d0c46ba1963a3685b6320730a73e | [
"MIT",
"Unlicense"
] | null | null | null | blog/models.py | Attila-Sasvari/django_blog | 7aea29932b62d0c46ba1963a3685b6320730a73e | [
"MIT",
"Unlicense"
] | null | null | null | blog/models.py | Attila-Sasvari/django_blog | 7aea29932b62d0c46ba1963a3685b6320730a73e | [
"MIT",
"Unlicense"
] | null | null | null | from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.conf import settings
from django.shortcuts import reverse
class Tag(models.Model):
name = models.CharField(max_length=50, unique=True)
def __str__(self):
return self.name
class Blog(models.Model):
title = models.CharField(max_length=200, unique=True)
lead = models.TextField(blank=False)
slug = models.SlugField(max_length=255, unique=True, null=True)
cover_img = models.ImageField(
upload_to='photos/%Y/%m/%d/', blank=True, default=settings.DEFUALT_COVER_IMG)
author = models.ForeignKey(User, on_delete=models.PROTECT, null=True)
content = models.TextField(default="-")
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
is_published = models.BooleanField(default=True)
is_featured = models.BooleanField(default=False)
tags = models.ManyToManyField(Tag, blank=True)
class Meta:
ordering = ["-updated_at"]
app_label = "blog"
@property
def img_url(self):
return self.cover_img.url
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
post = Blog.objects.get(pk=self.id)
obj, created = BlogCounts.objects.get_or_create(blog_id=post)
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse("article", kwargs={"slug": self.slug})
class BlogCounts(models.Model):
blog_id = models.OneToOneField(Blog, on_delete=models.CASCADE)
read_number = models.PositiveIntegerField(default=0)
stars_number = models.PositiveIntegerField(default=0)
| 32.62963 | 85 | 0.713394 |
5c3292281d4cf2312c30c7cd1d35ba38de7e0cf7 | 3,443 | py | Python | cloudfront-traffic-csv/cloudfront-traffic-csv.py | lakil00/aws-cloudfront-samples | 2a08d719d84ed63a1446be22db4c9c30c6a61477 | [
"Apache-2.0"
] | null | null | null | cloudfront-traffic-csv/cloudfront-traffic-csv.py | lakil00/aws-cloudfront-samples | 2a08d719d84ed63a1446be22db4c9c30c6a61477 | [
"Apache-2.0"
] | null | null | null | cloudfront-traffic-csv/cloudfront-traffic-csv.py | lakil00/aws-cloudfront-samples | 2a08d719d84ed63a1446be22db4c9c30c6a61477 | [
"Apache-2.0"
] | null | null | null | import boto3
import argparse
import datetime, dateutil
#GLOBAL_SETTINGS
timezone = datetime.timezone(datetime.timedelta(hours=0)) #UTC +0, this should be set accordingly.
#global resource to connect API
cf_client = boto3.client('cloudfront')
def get_cname(distribution):
'''
returns 1st CNAME alias or '' if not found
'''
return distribution['Aliases']['Items'][0] if distribution['Aliases']['Quantity'] > 0 else ''
def get_tag(arn,tag_key):
'''
returns given tag value of resource, or '' if not found
'''
tags = cf_client.list_tags_for_resource(Resource=arn)
result = ''
if len(tags['Tags']['Items']) != 0:
for tag in tags['Tags']['Items']:
if tag['Key'] == tag_key:
result = tag['Value']
return result
def get_traffic_csv_list(start_date, end_date, metric_name, reporting_tag=''):
'''
returns all CF distribution's Cloudwatch metric, as list of comma separated values.
'''
#first load all distributions
distributions = cf_client.list_distributions()['DistributionList']['Items']
#prepare metric query
metric_data_queries = []
for dist in distributions:
metric_data_queries.append({
'Id': 'm_'+dist['Id'],
'Label': '%s,%s' % (dist['Id'], get_cname(dist), ) + (',%s' % (get_tag(dist['ARN'],reporting_tag), ) if reporting_tag else ''),
'MetricStat': {
'Metric': {
'MetricName': metric_name,
'Namespace': 'AWS/CloudFront',
'Dimensions': [
{'Name': 'DistributionId', 'Value': dist['Id']},
{'Name': 'Region', 'Value': 'Global'}
]
},
'Period': 86400,
'Stat': 'Sum',
'Unit': 'None'
}
})
#call Cloudwatch get_metric_data
cw_client = boto3.client('cloudwatch', region_name='us-east-1')
result = cw_client.get_metric_data(MetricDataQueries=metric_data_queries, StartTime=start_date, EndTime=end_date)
#result csv
csv=['Distribution Id, CNAME, ' + ('Tag, ' if reporting_tag else '') + 'Date, '+metric_name]
for r in result['MetricDataResults']:
for i in range(len(r['Timestamps'])):
csv.append('%s,%s,%f' % (r['Label'],r['Timestamps'][i].astimezone().strftime('%Y-%m-%d'), r['Values'][i],))
return csv
if __name__ == '__main__':
#define command arguments
parser = argparse.ArgumentParser(description='Read CloudWatch Metric of all CloudFront distribution')
parser.add_argument('startdate', action='store', type=lambda x: datetime.datetime.strptime(x, '%Y-%m-%d').replace(tzinfo=timezone),
help='Start date of data period, YYYY-MM-DD.')
parser.add_argument('enddate', action='store', type=lambda x: datetime.datetime.strptime(x+' 23:59:59', '%Y-%m-%d %H:%M:%S').replace(tzinfo=timezone),
help='End date of data period, YYYY-MM-DD.')
parser.add_argument('-m','--metric', default='BytesDownloaded', choices=['BytesDownloaded','Requests', 'BytesUploaded'],
help='Reporting metric, default is BytesDownloaded')
parser.add_argument('-t','--tag', help='Reporting Tag key')
args = parser.parse_args()
#call functions
csv_list = get_traffic_csv_list(args.startdate, args.enddate, args.metric, args.tag)
for line in csv_list:
print(line) | 38.685393 | 155 | 0.611385 |
0854a725c0328a590bc1df576c10f16c7615e19b | 1,703 | py | Python | interval_intersection.py | m10singh94/Python-programs | a83083044b4a85afcf70c4b7024287a808b01fee | [
"Apache-2.0"
] | null | null | null | interval_intersection.py | m10singh94/Python-programs | a83083044b4a85afcf70c4b7024287a808b01fee | [
"Apache-2.0"
] | null | null | null | interval_intersection.py | m10singh94/Python-programs | a83083044b4a85afcf70c4b7024287a808b01fee | [
"Apache-2.0"
] | null | null | null |
# Implement the function interval_intersection below.
# You can define other functions if it helps you decompose and solve
# the problem.
# Do not import any module that you do not use!
# Remember that if this were an exam problem, in order to be marked
# this file must meet certain requirements:
# - it must contain ONLY syntactically valid python code (any syntax
# or indentation error that stops the file from running would result
# in a mark of zero);
# - you MAY NOT use global variables; the function must use only the
# input provided to it in its arguments.
def interval_intersection(lA, uA, lB, uB):
if (lB >= lA and lB <= uA):
if uB < uA:
return uB - lB
else:
return uA - lB
elif (lA >= lB and lA <= uB):
if uA < uB:
return uA - lA
else:
return uB - lA
return 0
def test_interval_intersection():
"""
This function runs a number of tests of the interval_intersection function.
If it works ok, you will just see the output ("all tests passed") at
the end when you call this function; if some test fails, there will
be an error message.
"""
assert interval_intersection(0, 2, 4, 7.5) == 0.0, "no intersection (uA < lB)"
assert interval_intersection(1, 3, 2.5, 6) == 0.5, "intersection is [2.5, 3]"
assert interval_intersection(1, 3, 1.5, 5) == 1.5, "intersection is [1.5, 3]"
assert interval_intersection(0, 2, -2, 1.5) == 1.5, "intersection is [0, 1.5]"
assert interval_intersection(1, 3, 0, 3.5) == 2.0, "A is contained in B"
assert interval_intersection(1.5, 3.5, 0, 3.5) == 2.0, "A is contained in B"
print("all tests passed")
| 36.234043 | 82 | 0.647093 |
177f285cbbcf4bbac268526b21c11d505366d5cf | 5,174 | py | Python | plugins/modules/panos_commit_firewall.py | bkarypid/pan-os-ansible | d7b376192b24fd7c8f0af6debc099a0aa676b6fd | [
"Apache-2.0"
] | null | null | null | plugins/modules/panos_commit_firewall.py | bkarypid/pan-os-ansible | d7b376192b24fd7c8f0af6debc099a0aa676b6fd | [
"Apache-2.0"
] | 22 | 2020-10-19T06:12:10.000Z | 2022-03-07T10:04:30.000Z | plugins/modules/panos_commit_firewall.py | patrickdaj/pan-os-ansible | 1e3daf5fe0d862516561cc95e420691c03a38403 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2020 Palo Alto Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: panos_commit_firewall
short_description: Commit the firewall's candidate configuration.
description:
- Module that will commit the candidate configuration of a PAN-OS firewall.
- The new configuration will become active immediately.
author:
- Robert Hagen (@stealthllama)
version_added: '2.0.0'
requirements:
- pan-os-python
extends_documentation_fragment:
- paloaltonetworks.panos.fragments.provider
options:
description:
description:
- A description of the commit.
type: str
admins:
description:
- Commit only the changes made by specified list of administrators.
type: list
elements: str
exclude_device_and_network:
description:
- Exclude network and device configuration changes.
type: bool
default: False
exclude_shared_objects:
description:
- Exclude shared object configuration changes.
type: bool
default: False
exclude_policy_and_objects:
description:
- Exclude policy and object configuration changes.
type: bool
default: False
force:
description:
- Force the commit.
type: bool
default: False
sync:
description:
- Wait for the commit to complete.
type: bool
default: True
'''
EXAMPLES = r'''
- name: commit candidate configs on firewall
panos_commit_firewall:
provider: '{{ credentials }}'
- name: commit changes by specified admins on firewall
panos_commit_firewall:
provider: '{{ credentials }}'
admins: ['netops','secops','cloudops']
description: 'Saturday change window'
- name: commit only policy and object changes on firewall
panos_commit_firewall:
provider: '{{ credentials }}'
exclude_device_and_network: True
'''
RETURN = r'''
jobid:
description: The ID of the PAN-OS commit job.
type: int
returned: always
sample: 49152
details:
description: Commit job completion messages.
type: str
returned: on success
sample: Configuration committed successfully
'''
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.paloaltonetworks.panos.plugins.module_utils.panos import get_connection
try:
from panos.firewall import FirewallCommit
except ImportError:
pass
def main():
# Instantiate the connection helper
helper = get_connection(
min_pandevice_version=(1, 0, 0),
min_panos_version=(8, 0, 0),
argument_spec=dict(
description=dict(type='str'),
admins=dict(type='list', elements='str'),
exclude_device_and_network=dict(type='bool'),
exclude_shared_objects=dict(type='bool'),
exclude_policy_and_objects=dict(type='bool'),
force=dict(type='bool'),
sync=dict(type='bool', default=True)
)
)
# Initialize the Ansible module
module = AnsibleModule(
argument_spec=helper.argument_spec,
supports_check_mode=False,
required_one_of=helper.required_one_of
)
# Verify libs are present, get the parent object.
parent = helper.get_pandevice_parent(module)
# Construct the commit command
cmd = FirewallCommit(
description=module.params['description'],
admins=module.params['admins'],
exclude_device_and_network=module.params['exclude_device_and_network'],
exclude_shared_objects=module.params['exclude_shared_objects'],
exclude_policy_and_objects=module.params['exclude_policy_and_objects'],
force=module.params['force']
)
# Execute the commit
commit_results = dict(changed=False, jobid=0)
# commit_results = {}
sync = module.params['sync']
result = parent.commit(cmd=cmd, sync=sync)
# Exit with status
if result is None:
# No commit was needed
pass
elif not sync:
# When sync is False only jobid is returned
commit_results['jobid'] = int(result)
elif not result['success']:
# The commit failed
module.fail_json(msg=' | '.join(result["messages"]))
else:
# The commit succeeded
commit_results['changed'] = True
commit_results['jobid'] = result['jobid']
commit_results['details'] = result['messages']
module.exit_json(**commit_results)
if __name__ == '__main__':
main()
| 29.735632 | 96 | 0.674333 |
a0d3ca90476d77c4a62795d33ae3244ddfe23184 | 1,999 | py | Python | Doc/tools/jinja2/testsuite/debug.py | cocoatomo/Python3.2_C_API_Tutorial | e33d4a285429935aca3178dc2a97aca3ab484232 | [
"PSF-2.0"
] | 2 | 2019-03-03T00:04:36.000Z | 2020-10-06T16:22:38.000Z | Doc/tools/jinja2/testsuite/debug.py | cocoatomo/Python3.2_C_API_Tutorial | e33d4a285429935aca3178dc2a97aca3ab484232 | [
"PSF-2.0"
] | null | null | null | Doc/tools/jinja2/testsuite/debug.py | cocoatomo/Python3.2_C_API_Tutorial | e33d4a285429935aca3178dc2a97aca3ab484232 | [
"PSF-2.0"
] | 1 | 2019-03-03T00:04:38.000Z | 2019-03-03T00:04:38.000Z | # -*- coding: utf-8 -*-
"""
jinja2.testsuite.debug
~~~~~~~~~~~~~~~~~~~~~~
Tests the debug system.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import sys
import unittest
from jinja2.testsuite import JinjaTestCase, filesystem_loader
from jinja2 import Environment, TemplateSyntaxError
env = Environment(loader=filesystem_loader)
class DebugTestCase(JinjaTestCase):
if sys.version_info[:2] != (2, 4):
def test_runtime_error(self):
def test():
tmpl.render(fail=lambda: 1 / 0)
tmpl = env.get_template('broken.html')
self.assert_traceback_matches(test, r'''
File ".*?broken.html", line 2, in (top-level template code|<module>)
\{\{ fail\(\) \}\}
File ".*?debug.pyc?", line \d+, in <lambda>
tmpl\.render\(fail=lambda: 1 / 0\)
ZeroDivisionError: int(eger)? division or modulo by zero
''')
def test_syntax_error(self):
# XXX: the .*? is necessary for python3 which does not hide
# some of the stack frames we don't want to show. Not sure
# what's up with that, but that is not that critical. Should
# be fixed though.
self.assert_traceback_matches(lambda: env.get_template('syntaxerror.html'), r'''(?sm)
File ".*?syntaxerror.html", line 4, in (template|<module>)
\{% endif %\}.*?
(jinja2\.exceptions\.)?TemplateSyntaxError: Encountered unknown tag 'endif'. Jinja was looking for the following tags: 'endfor' or 'else'. The innermost block that needs to be closed is 'for'.
''')
def test_regular_syntax_error(self):
def test():
raise TemplateSyntaxError('wtf', 42)
self.assert_traceback_matches(test, r'''
File ".*debug.pyc?", line \d+, in test
raise TemplateSyntaxError\('wtf', 42\)
(jinja2\.exceptions\.)?TemplateSyntaxError: wtf
line 42''')
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(DebugTestCase))
return suite
| 32.770492 | 192 | 0.645823 |
db8d35379e4529229eb7ca7faee49f951d3c2bf8 | 1,284 | py | Python | Examples/expn.py | mystor/garter | be79188d67923ca18c97f4d3cfcfdf89d18b4057 | [
"PSF-2.0"
] | null | null | null | Examples/expn.py | mystor/garter | be79188d67923ca18c97f4d3cfcfdf89d18b4057 | [
"PSF-2.0"
] | null | null | null | Examples/expn.py | mystor/garter | be79188d67923ca18c97f4d3cfcfdf89d18b4057 | [
"PSF-2.0"
] | null | null | null | # Evaluate an input expression e of the form t { + t } where
# t is of form { * p } and p is of form ( e ) or explicit real constant
# For example, the value of 1.5 + 3.0 * ( 0.5 + 1.5 ) "halt" is 7.5
def expn(tokens : [str]) -> float:
v := term(tokens)
while tokens[0] == "+":
tokens.pop(0)
v += term(tokens)
return v
def term(tokens : [str]) -> float:
v := primary(tokens)
while tokens[0] == "*":
tokens.pop(0)
v *= primary(tokens)
return v
def primary(tokens : [str]) -> float:
v := 0.0
if tokens[0] == "(":
tokens.pop(0)
v = expn(tokens)
assert tokens[0] == ")"
else:
v = float(tokens[0])
tokens.pop(0)
return v
print("This is a recursive evaluator for infix expressions.")
print("The only operators accepted are + and *, as well as parentheses.")
print("Besides that, everything has to be separated by blanks.")
print("Terminate an expression by anything handy, such as a single period.")
print("Be gentle, this is just a demo of Garter mutual recursion.")
print()
while True:
print()
print("Give an arithmetic expression, with operators surrounded by blanks")
tokens := input().split(" ")
answer := expn(tokens)
print("Answer is:", answer)
| 29.181818 | 79 | 0.602025 |
cc1944ef5a8a47c1c22f98e7fdc304dce7cbbfc2 | 11,349 | py | Python | mpf/tests/test_Blinkenlight.py | haggispinball/mpf_fathom_fast | 1035c3fb90bb279de84cc3ed4aa1e1df38d0d563 | [
"MIT"
] | 163 | 2015-01-25T02:19:50.000Z | 2022-03-26T12:00:28.000Z | mpf/tests/test_Blinkenlight.py | haggispinball/mpf_fathom_fast | 1035c3fb90bb279de84cc3ed4aa1e1df38d0d563 | [
"MIT"
] | 1,086 | 2015-03-23T19:53:17.000Z | 2022-03-24T20:46:11.000Z | mpf/tests/test_Blinkenlight.py | haggispinball/mpf_fathom_fast | 1035c3fb90bb279de84cc3ed4aa1e1df38d0d563 | [
"MIT"
] | 148 | 2015-01-28T02:31:39.000Z | 2022-03-22T13:54:01.000Z | from mpf.tests.MpfGameTestCase import MpfGameTestCase
from mpf.core.rgb_color import RGBColor
class TestBlinkenlight(MpfGameTestCase):
def get_config_file(self):
return 'config.yaml'
def get_platform(self):
return 'smart_virtual'
def get_machine_path(self):
return 'tests/machine_files/blinkenlight/'
def test_add_color_to_one_blinkenlight(self):
self.post_event('start_mode1')
self.assertPlaceholderEvaluates(0, 'device.blinkenlights.my_blinkenlight1.num_colors')
self.assertPlaceholderEvaluates(0, 'device.blinkenlights.my_blinkenlight2.num_colors')
self.post_event('add_color_to_first_blinkenlight')
self.assertPlaceholderEvaluates(1, 'device.blinkenlights.my_blinkenlight1.num_colors')
self.assertPlaceholderEvaluates(0, 'device.blinkenlights.my_blinkenlight2.num_colors')
def test_add_color_to_two_blinkenlights(self):
self.post_event('start_mode1')
self.assertPlaceholderEvaluates(0, 'device.blinkenlights.my_blinkenlight1.num_colors')
self.assertPlaceholderEvaluates(0, 'device.blinkenlights.my_blinkenlight2.num_colors')
self.post_event('add_color_to_all_blinkenlights')
self.assertPlaceholderEvaluates(1, 'device.blinkenlights.my_blinkenlight1.num_colors')
self.assertPlaceholderEvaluates(1, 'device.blinkenlights.my_blinkenlight2.num_colors')
def test_remove_color_from_one_blinkenlight(self):
self.post_event('start_mode1')
self.post_event('add_color_to_second_blinkenlight')
self.assertPlaceholderEvaluates(0, 'device.blinkenlights.my_blinkenlight1.num_colors')
self.assertPlaceholderEvaluates(1, 'device.blinkenlights.my_blinkenlight2.num_colors')
self.post_event('remove_color_from_first_blinkenlight')
self.assertPlaceholderEvaluates(0, 'device.blinkenlights.my_blinkenlight1.num_colors')
self.assertPlaceholderEvaluates(1, 'device.blinkenlights.my_blinkenlight2.num_colors')
self.post_event('remove_color_from_second_blinkenlight')
self.assertPlaceholderEvaluates(0, 'device.blinkenlights.my_blinkenlight1.num_colors')
self.assertPlaceholderEvaluates(0, 'device.blinkenlights.my_blinkenlight2.num_colors')
def test_remove_all_colors_from_all_blinkenlights(self):
self.post_event('start_mode1')
self.post_event('start_mode2')
self.post_event('add_color_to_first_blinkenlight')
self.post_event('add_color_to_second_blinkenlight')
self.post_event('add_color_to_third_blinkenlight')
self.post_event('add_color_to_all_blinkenlights')
self.post_event('mode2_add_color_to_first_blinkenlight')
self.assertPlaceholderEvaluates(3, 'device.blinkenlights.my_blinkenlight1.num_colors')
self.assertPlaceholderEvaluates(2, 'device.blinkenlights.my_blinkenlight2.num_colors')
self.assertPlaceholderEvaluates(1, 'device.blinkenlights.my_blinkenlight3.num_colors')
self.post_event('remove_all_colors_from_all_blinkenlights')
self.assertPlaceholderEvaluates(0, 'device.blinkenlights.my_blinkenlight1.num_colors')
self.assertPlaceholderEvaluates(0, 'device.blinkenlights.my_blinkenlight2.num_colors')
self.assertPlaceholderEvaluates(0, 'device.blinkenlights.my_blinkenlight3.num_colors')
def test_remove_mode_colors_from_one_blinkenlight(self):
self.post_event('start_mode1')
self.post_event('start_mode2')
self.post_event('add_color_to_first_blinkenlight')
self.post_event('mode2_add_color_to_first_blinkenlight')
self.post_event('mode2_add_color2_to_first_blinkenlight')
self.assertPlaceholderEvaluates(3, 'device.blinkenlights.my_blinkenlight1.num_colors')
self.post_event('mode2_remove_mode_colors_from_first_blinkenlight')
self.assertPlaceholderEvaluates(1, 'device.blinkenlights.my_blinkenlight1.num_colors')
def test_remove_mode_colors_when_mode_ends(self):
self.post_event('start_mode1')
self.post_event('start_mode2')
self.post_event('add_color_to_first_blinkenlight')
self.post_event('add_color_to_second_blinkenlight')
self.post_event('mode2_add_color_to_first_blinkenlight')
self.post_event('mode2_add_color2_to_first_blinkenlight')
self.post_event('mode2_add_color_to_second_blinkenlight')
self.assertPlaceholderEvaluates(3, 'device.blinkenlights.my_blinkenlight1.num_colors')
self.assertPlaceholderEvaluates(2, 'device.blinkenlights.my_blinkenlight2.num_colors')
self.post_event('stop_mode2')
self.assertPlaceholderEvaluates(1, 'device.blinkenlights.my_blinkenlight1.num_colors')
self.assertPlaceholderEvaluates(1, 'device.blinkenlights.my_blinkenlight2.num_colors')
def test_flashing_cycle(self):
self.post_event('start_mode1')
self.post_event('add_color_to_all_blinkenlights')
self.post_event('add_color_to_first_blinkenlight')
self.post_event('add_color_to_second_blinkenlight')
self.post_event('add_color_to_third_blinkenlight')
self.assertPlaceholderEvaluates(2, 'device.blinkenlights.my_blinkenlight1.num_colors')
self.assertPlaceholderEvaluates(2, 'device.blinkenlights.my_blinkenlight2.num_colors')
self.assertPlaceholderEvaluates(1, 'device.blinkenlights.my_blinkenlight3.num_colors')
blinkenlight1 = self.machine.blinkenlights['my_blinkenlight1']
blinkenlight2 = self.machine.blinkenlights['my_blinkenlight2']
blinkenlight3 = self.machine.blinkenlights['my_blinkenlight3']
blue = RGBColor('blue')
green = RGBColor('green')
red = RGBColor('red')
yellow = RGBColor('yellow')
purple = RGBColor('purple')
cyan = RGBColor('cyan')
off = RGBColor('off')
self.assertEqual(blue, blinkenlight1.light._color)
self.assertEqual(green, blinkenlight2.light._color)
self.assertEqual(purple, blinkenlight3.light._color)
self.advance_time_and_run(1)
self.assertEqual(red, blinkenlight1.light._color)
self.assertEqual(green, blinkenlight2.light._color)
self.assertEqual(off, blinkenlight3.light._color)
self.advance_time_and_run(1)
self.assertEqual(off, blinkenlight1.light._color)
self.assertEqual(yellow, blinkenlight2.light._color)
self.assertEqual(purple, blinkenlight3.light._color)
self.advance_time_and_run(1)
self.assertEqual(blue, blinkenlight1.light._color)
self.assertEqual(yellow, blinkenlight2.light._color)
self.assertEqual(off, blinkenlight3.light._color)
self.advance_time_and_run(1)
self.assertEqual(red, blinkenlight1.light._color)
self.assertEqual(green, blinkenlight2.light._color)
self.assertEqual(purple, blinkenlight3.light._color)
self.advance_time_and_run(1)
self.assertEqual(off, blinkenlight1.light._color)
self.assertEqual(green, blinkenlight2.light._color)
self.assertEqual(off, blinkenlight3.light._color)
self.advance_time_and_run(1)
self.assertEqual(blue, blinkenlight1.light._color)
self.assertEqual(yellow, blinkenlight2.light._color)
self.assertEqual(purple, blinkenlight3.light._color)
self.post_event("remove_color_from_third_blinkenlight")
self.advance_time_and_run(1)
self.assertEqual(red, blinkenlight1.light._color)
self.assertEqual(yellow, blinkenlight2.light._color)
self.assertEqual(off, blinkenlight3.light._color)
self.advance_time_and_run(1)
self.assertEqual(off, blinkenlight1.light._color)
self.assertEqual(green, blinkenlight2.light._color)
self.assertEqual(off, blinkenlight3.light._color)
self.advance_time_and_run(1)
self.assertEqual(blue, blinkenlight1.light._color)
self.assertEqual(green, blinkenlight2.light._color)
self.assertEqual(off, blinkenlight3.light._color)
def test_priority_order(self):
self.post_event('start_mode1')
self.post_event('start_mode2')
blinkenlight1 = self.machine.blinkenlights['my_blinkenlight1']
red = RGBColor('red')
orange = RGBColor('orange')
off = RGBColor('off')
self.post_event('add_color_to_first_blinkenlight')
self.post_event('mode2_add_color_to_first_blinkenlight')
self.assertEqual(orange, blinkenlight1.light._color)
self.advance_time_and_run(1)
self.assertEqual(red, blinkenlight1.light._color)
self.advance_time_and_run(1)
self.assertEqual(off, blinkenlight1.light._color)
self.post_event('remove_all_colors_from_all_blinkenlights')
self.advance_time_and_run(1)
self.post_event('mode2_add_color_to_first_blinkenlight')
self.post_event('add_color_to_first_blinkenlight')
self.assertEqual(orange, blinkenlight1.light._color)
self.advance_time_and_run(1)
self.assertEqual(red, blinkenlight1.light._color)
self.advance_time_and_run(1)
self.assertEqual(off, blinkenlight1.light._color)
def test_replace_existing_color(self):
self.post_event('start_mode1')
blinkenlight1 = self.machine.blinkenlights['my_blinkenlight1']
blue = RGBColor('blue')
darkred = RGBColor('darkred')
off = RGBColor('off')
self.post_event('add_color_to_all_blinkenlights')
self.assertPlaceholderEvaluates(1, 'device.blinkenlights.my_blinkenlight1.num_colors')
self.assertEqual(blue, blinkenlight1.light._color)
self.advance_time_and_run(1.5)
self.assertEqual(off, blinkenlight1.light._color)
self.advance_time_and_run(1.5)
self.assertEqual(blue, blinkenlight1.light._color)
self.advance_time_and_run(1.5)
self.assertEqual(off, blinkenlight1.light._color)
self.advance_time_and_run(1.5)
self.post_event('add_color_to_first_blinkenlight_with_duplicate_key')
self.assertPlaceholderEvaluates(1, 'device.blinkenlights.my_blinkenlight1.num_colors')
self.assertEqual(darkred, blinkenlight1.light._color)
self.advance_time_and_run(1.5)
self.assertEqual(off, blinkenlight1.light._color)
self.advance_time_and_run(1.5)
self.assertEqual(darkred, blinkenlight1.light._color)
self.advance_time_and_run(1.5)
self.assertEqual(off, blinkenlight1.light._color)
def test_show_with_tokens(self):
self.post_event('start_mode2')
blinkenlight = self.machine.blinkenlights['my_blinkenlight2']
gray = RGBColor('gray')
off = RGBColor('off')
self.assertPlaceholderEvaluates(0, 'device.blinkenlights.my_blinkenlight2.num_colors')
self.post_event('play_blinkenlight_token_show')
self.assertPlaceholderEvaluates(1, 'device.blinkenlights.my_blinkenlight2.num_colors')
self.assertEqual(gray, blinkenlight.light._color)
self.advance_time_and_run(2)
self.assertEqual(off, blinkenlight.light._color)
self.advance_time_and_run(2)
self.assertEqual(gray, blinkenlight.light._color)
self.advance_time_and_run(2)
self.assertEqual(off, blinkenlight.light._color)
| 50.44 | 94 | 0.737686 |
2f0fc428ee557dd390c19b519605376b7ac8c0ed | 1,578 | py | Python | setup.py | ifrn-oficial/suap-duf | 264b4fc0a8d240aea9b4c960d530ce308bdd854d | [
"MIT"
] | null | null | null | setup.py | ifrn-oficial/suap-duf | 264b4fc0a8d240aea9b4c960d530ce308bdd854d | [
"MIT"
] | null | null | null | setup.py | ifrn-oficial/suap-duf | 264b4fc0a8d240aea9b4c960d530ce308bdd854d | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
from setuptools import find_packages, setup
from url_filter import __author__, __version__
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname), "rb") as fid:
return fid.read().decode("utf-8")
authors = read("AUTHORS.rst")
history = read("HISTORY.rst").replace(".. :changelog:", "")
licence = read("LICENSE.rst")
readme = read("README.rst")
req = read("requirements.txt").splitlines()
dev_req = read("requirements-dev.txt").splitlines()[2:]
requirements = req + ["setuptools"]
test_requirements = req + dev_req
setup(
name="suap-duf",
version=__version__,
author=__author__,
description="Django URL Filter provides a safe way to filter data via human-friendly URLs.",
long_description="\n\n".join([readme, history, authors, licence]),
url="https://github.com/miki725/django-url-filter",
license="MIT",
packages=find_packages(exclude=["test_project*", "tests*"]),
install_requires=requirements,
test_suite="tests",
tests_require=test_requirements,
keywords=" ".join(["django django-rest-framework"]),
classifiers=[
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Development Status :: 2 - Pre-Alpha",
],
)
| 30.941176 | 96 | 0.665399 |
b25c708bf987b6e987a8f142b6e6d3e6b5fb931a | 4,083 | py | Python | cde-root/local/munk/data/phd/phd/ptgraph/ptgraphviz.py | NirBenTalLab/proorigami-cde-package | 273379075830a9b94d3f2884661a54f853777ff6 | [
"MIT"
] | null | null | null | cde-root/local/munk/data/phd/phd/ptgraph/ptgraphviz.py | NirBenTalLab/proorigami-cde-package | 273379075830a9b94d3f2884661a54f853777ff6 | [
"MIT"
] | null | null | null | cde-root/local/munk/data/phd/phd/ptgraph/ptgraphviz.py | NirBenTalLab/proorigami-cde-package | 273379075830a9b94d3f2884661a54f853777ff6 | [
"MIT"
] | null | null | null | ###############################################################################
#
# ptgraphviz.py - write H-bond or topology graphs with GraphViz
#
# File: ptgraphviz.py
# Author: Alex Stivala
# Created: July 2007
#
# $Id: ptgraphviz.py 1142 2008-02-25 06:21:06Z astivala $
#
# Output H-bond or topology graph using GraphViz (for the -h, -n and -d options
# of ptgraph2).
#
# pydot: python interface to GraphViz dot language (0.9.10)
# http://dkbza.org/pydot.html
#
# which in turn requires pyparsing
# http://pyparsing.sourceforge.net/
#
# For the -n and -d options, GraphViz itself is also required (2.12)
# http://www.research.att.com/sw/tools/graphviz
#
###############################################################################
import pydot
###from ptgraph2 import PTGraph2
from ptnode import *
#-----------------------------------------------------------------------------
#
# Function definitions
#
#-----------------------------------------------------------------------------
def make_graphviz_graph(ptgraph):
"""
Build a pydot (interface to GraphViz) representation of
this PTGraph2 object. This can then be used to create the image
representation with write_gif() etc. methods of the pydot Dot object
returned.
Parameters: ptgraph - PTGraph2 object to draw graph from
Uses PTGraph2 member data: (readonly)
nodelist - list of nodes
stride_struct - StrideStruct representing the STRIDE output
Note also uses (Readonly) member data in each node
Return value:
pydot Dot object representing the graph for GraphViz
Precondition: node_list is sorted (by start res seq ascending);
this is done by build_graph_from_stride() before calling.
"""
g = pydot.Dot()
g.set_label(ptgraph.secstruct.pdb_header)
g.set_overlap('scale') #overlap removal
for nodelist in ptgraph.iter_chains():
# add every node and an edge from each node to next in sequence
prevnode = None
for node in nodelist:
dot_node = pydot.Node(node.nodeid)
dot_node.set_fixedsize(True) # don't expand width to fit label
if isinstance(node, PTNodeStrand):
dot_node.set_shape('rect')
dot_node.set_width(float(node.get_span()) / 5.0) # inches
if node.get_sheet_id() != None:
dot_node.set_label(node.nodeid +
" (sheet " + node.get_sheet_id()+")")
elif isinstance(node, PTNodeHelix):
dot_node.set_shape('ellipse')
dot_node.set_width(float(node.get_span()) / 5.0) # inches
g.add_node(dot_node)
if prevnode != None:
dot_edge = pydot.Edge(prevnode.nodeid, node.nodeid)
g.add_edge(dot_edge)
prevnode = node
if ptgraph.use_hbonds:
# add an edge for every hydrogen bond between structural elements
for node in nodelist:
for (other_node, rn1_unused, rn2_unused, dist) in \
node.get_hbond_list():
g.add_edge(pydot.Edge(node.nodeid, other_node.nodeid,
style='dotted',
label=str(dist),
len=dist/6.0, # inches
weight=100.0
)
)
else:
# add an edge for bridges between beta strands
for node in [ node for node in nodelist \
if isinstance(node, PTNodeStrand) ]:
for (other_node, bdir, side) in node.get_bridge_list():
g.add_edge(pydot.Edge(node.nodeid, other_node.nodeid,
style='dotted',
label=bdir + ' ' + side,
)
)
return g
| 36.783784 | 79 | 0.512123 |
a0b6da379dfcc3d32a1e150d7b35ebaee7a5d7bc | 976 | py | Python | custom_user/models.py | manoj-makkuboy/feed-ninja | 339bac6f24f9bf968e5cd0d83eb71e36d7c525cd | [
"MIT"
] | null | null | null | custom_user/models.py | manoj-makkuboy/feed-ninja | 339bac6f24f9bf968e5cd0d83eb71e36d7c525cd | [
"MIT"
] | null | null | null | custom_user/models.py | manoj-makkuboy/feed-ninja | 339bac6f24f9bf968e5cd0d83eb71e36d7c525cd | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
import logging
class Category(models.Model):
category_title = models.CharField(max_length=20, blank=False)
category_summary = models.CharField(max_length=60, blank=True)
class CustomUser(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
location = models.CharField(max_length=50, blank=True)
interested_categories = models.ForeignKey(Category,
on_delete=models.CASCADE,
null=False, default=1)
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
CustomUser.objects.create(user=instance)
@receiver(post_save, sender=User)
def save_user_profile(sender, instance, **kwargs):
instance.customuser.save()
| 28.705882 | 71 | 0.701844 |
a10141dc432ceec566a8d56272d1d5ae5b41a623 | 505 | py | Python | basis_func_list.py | jun63x/sparse_coding | 335416cf6522e2436316cf4264f7955e31aa3711 | [
"MIT"
] | null | null | null | basis_func_list.py | jun63x/sparse_coding | 335416cf6522e2436316cf4264f7955e31aa3711 | [
"MIT"
] | null | null | null | basis_func_list.py | jun63x/sparse_coding | 335416cf6522e2436316cf4264f7955e31aa3711 | [
"MIT"
] | null | null | null | import matplotlib.pyplot as plt
from tqdm import tqdm
class BasisFuncList:
def __init__(self, basis_func_list):
self.basis_func_list = basis_func_list
def save(self, img_dir):
for i, func in tqdm(
enumerate(self.basis_func_list),
total=len(self.basis_func_list),
ncols=50
):
plt.imsave(
img_dir+'/patch'+str(i).zfill(3)+'.png',
func,
cmap='binary'
)
| 25.25 | 56 | 0.532673 |
c4d324195d1d2c87b73274e1da3dd09d9ab9f0e6 | 2,357 | py | Python | Numpy/code1.py | ALDOR99/Python | a76f37bb3e573cd3fdcfc19f4f73494cafa9140e | [
"MIT"
] | 2 | 2021-05-27T19:13:02.000Z | 2021-06-02T13:26:35.000Z | Numpy/code1.py | ALDOR99/Python | a76f37bb3e573cd3fdcfc19f4f73494cafa9140e | [
"MIT"
] | null | null | null | Numpy/code1.py | ALDOR99/Python | a76f37bb3e573cd3fdcfc19f4f73494cafa9140e | [
"MIT"
] | 1 | 2021-06-07T18:17:35.000Z | 2021-06-07T18:17:35.000Z | # -*- coding: utf-8 -*-
"""
Created on Sat Jun 5 11:02:44 2021
@author: ali_d
"""
import numpy as np
liste = [0,1,2,3,4,5,6,7,8,9]
#numpy array
num_array = np.array([0,1,2,3,4,5,6,7,8,9])
print(num_array)
print(type(num_array))
#%% Dizinin boyutunu bulmak : ndarray.ndim
num_array1 = np.array([0,1,2,3,4,5,6,7,8,9])
print(num_array1.ndim)
#dizi boyutu
num_array2 = np.array([[0,1,2,3,4,5,6,7,8,9]])
print(num_array2)
print(num_array2.ndim)
#%% Dizinin satır ve sütun sayısını bulmak: ndarray.shape
num_array1 = np.array([0,1,2,3,4,5,6,7,8,9])
print(num_array1.shape,num_array1.ndim)
#10 tane elemandan olusan 1 boyutlu dizi(vektör)
#%%
num_array2 = np.array([[0,1,2,3,4,5,6,7,8,9]])
print(num_array2.ndim,num_array2.shape)
#1 satır ve 10 sütündan olusan 2 boyutlu bir dizi(matris)
#%%
#Dizinin satır ve sütun sayısını değiştirmek : ndarray.reshape()
num_array = np.array([0,1,2,3,4,5,6,7,8,9])
print(num_array)
print("-"*10)
print(num_array.reshape(10,1))
print()
print(num_array.reshape(5,2))
print()
print(num_array.reshape(2,5))
print()
# matris ve yeniden şekillendirilen matris aynı sayıda elemana sahip olmalıdır.
#%% np.arange()
#np.arange(başlangıç,bitiş,adım sayısı)
a = np.arange(0,10,2)
print(a)
b = np.arange(0,20)
print(b)
print()
c = np.arange(10)
print(c)
#%%Dizinin elemanlarını seçmek
numpy_array = np.array([0,1,2,3,4,5,6,7,8,9])
numpy_array = numpy_array.reshape(5,2)
print(numpy_array)
print("-")
#%%
#dizinin herhangi bir elemanını seçmek
#1.satır
first_row = numpy_array[0]
print(first_row)
print("-")
second_row = numpy_array[1]
print(second_row)
print()
#%%
#1.ve 2.satırını alalım
first_and_second_rows = numpy_array[0:2]
print(first_and_second_rows)
print("-")
print()
#%%
##Dizinin herhangi bir kolonunu seçmek
first_column = numpy_array[:,0]
print(first_column)
print()
#%%
#1. ve 2. sütun
first_and_second_column = numpy_array[:,0:2]
print(first_and_second_column)
print()
print(first_column)
print()
#%%Dizinin herhangi bir elemanını seçmek
selecting_item = numpy_array[3,1]
print(selecting_item)
selecting_item = numpy_array[0,0]
print(selecting_item)
selecting_item = numpy_array[0,1]
print(selecting_item)
selecting_item = numpy_array[1,1]
print(selecting_item)
selecting_item = numpy_array[2,1]
print(selecting_item)
| 12.087179 | 79 | 0.700042 |
120c8da586e55147941ec7fb9ca5e646f672ef52 | 3,782 | py | Python | trueconsensus/fastchain/ecdsa_sig.py | truechain/py-trueconsensus | 6f792fcc4247db492a68b8277dba3c6db7b2490d | [
"Apache-2.0"
] | 53 | 2018-05-25T16:34:04.000Z | 2019-09-02T07:56:00.000Z | trueconsensus/fastchain/ecdsa_sig.py | truechain/py-trueconsensus | 6f792fcc4247db492a68b8277dba3c6db7b2490d | [
"Apache-2.0"
] | null | null | null | trueconsensus/fastchain/ecdsa_sig.py | truechain/py-trueconsensus | 6f792fcc4247db492a68b8277dba3c6db7b2490d | [
"Apache-2.0"
] | 29 | 2018-06-04T02:08:34.000Z | 2022-01-25T08:46:13.000Z | import os
import sys
import hmac
import ecdsa
import hashlib
# import pickle
# import logging
# from trueconsensus.fastchain.config import KD
from fastchain.config import KD, \
_logger
C = ecdsa.NIST256p
SIG_SIZE = 256
HASH_SIZE = 32
# MAC_SIZE = 32
ASYMM_FILE_FORMATS = {
"sign": ".pem",
"verify": ".pub"
}
ASYMM_FUNC_MAP = {
"sign": ecdsa.SigningKey.from_pem,
"verify": ecdsa.VerifyingKey.from_pem
}
def generate_keys():
sk = ecdsa.SigningKey.generate(curve=C)
vk = sk.get_verifying_key()
return sk, vk
def sign(key, message):
return key.sign(message)
def sign_proto_key(key, message):
# import pdb; pdb.set_trace()
key = bytes(key.to_string())
h = hmac.new(key, message, hashlib.sha256)
return h.digest()
def verify(key, sig, message):
try:
rc = key.verify(sig, message)
return rc
except Exception as E:
_logger.error(E)
return False
def verify_proto_key(key, dig1, message):
# import pdb; pdb.set_trace()
key = bytes(key.to_string())
h = hmac.new(key, message, hashlib.sha256)
# return hmac.compare_digest(dig1, h.digest())
return dig1 == h.digest()
def get_key_path(i, ktype):
try:
KEY_NAME = ktype + str(i) + ASYMM_FILE_FORMATS[ktype]
# _logger.info("KPATH - FETCH - %s -- %s" % (ktype, KEY_NAME))
return os.path.join(KD, KEY_NAME)
# generic catch
except Exception as E:
_logger.error(E)
return
# raise
def write_new_keys(n):
if not os.path.isdir(KD):
os.mkdir(KD)
for i in range(0, n):
s_file = open(get_key_path(i, "sign"), 'wb')
v_file = open(get_key_path(i, "verify"), 'wb')
sk, vk = generate_keys()
s_file.write(sk.to_pem())
v_file.write(vk.to_pem())
s_file.close()
v_file.close()
msg = "written new keys to %s" % KD
_logger.info(msg)
return msg
def get_asymm_key(i, ktype=None):
kpath = get_key_path(i, ktype)
found_error = False
try:
if not os.path.isfile(kpath):
result = "File Not Found: %s" % kpath
_logger.error(result)
found_error = True
else:
key_pem = open(kpath, 'rb').read()
result = ASYMM_FUNC_MAP[ktype](key_pem)
except Exception as result:
found_error = True
if found_error:
_logger.error("%s" % result)
return
return result
#
# def get_verifying_key(i):
# kpath = get_key_path(i, "verify")
# if not os.path.isfile(kpath):
# _logger.error("can't find key file: ", kpath)
# sys.exit(0)
# key_pub = open(kpath, 'rb').read()
# return ecdsa.VerifyingKey.from_pem(key_pem)
def read_keys_test(n, validate=False):
if not os.path.isdir(KD):
_logger.error("Can't find key directory")
sys.exit(0)
s = []
v = []
for i in range(0, n):
secret_key = open(get_key_path(i, "sign"), "rb").read()
public_key = open(get_key_path(i, "verify"), "rb").read()
s.append(ecdsa.SigningKey.from_pem(secret_key))
v.append(ecdsa.VerifyingKey.from_pem(public_key))
if validate:
assert validate_keypair(i, s[-1], v[-1]) is True
return s, v
def validate_keypair(i, s, v):
msg = "message" + str(i)
sig = s.sign(msg)
ver = v.verify(sig, msg)
if not ver:
_logger.error("Error while reading keypair: " % i)
return False
_logger.info("Round succeeded for keypair: " % i)
return True
if __name__ == "__main__":
N = int(sys.argv[1])
write_new_keys(N)
s, v = read_keys_test(N)
| 25.213333 | 71 | 0.582232 |
605be762ee0d2a89875af7ec468ccb0c519fb409 | 1,552 | py | Python | Day3/main.py | d4yvie/advent_of_code_2021 | bb74b9dc7b23c5ba338dcd8d2e698c7ea4e34c59 | [
"Apache-2.0"
] | null | null | null | Day3/main.py | d4yvie/advent_of_code_2021 | bb74b9dc7b23c5ba338dcd8d2e698c7ea4e34c59 | [
"Apache-2.0"
] | null | null | null | Day3/main.py | d4yvie/advent_of_code_2021 | bb74b9dc7b23c5ba338dcd8d2e698c7ea4e34c59 | [
"Apache-2.0"
] | null | null | null | from file_util import read_lines
from print_aoc import finish_task1, finish_task2
from typing import Callable
def count_amount_of_ones(nums: list[str] | set[str], number_len=12) -> list[int]:
return [sum(num[i] == "1" for num in nums) for i in range(number_len)]
def task1(lines: list[str], number_len=12) -> int:
amount_of_ones = count_amount_of_ones(lines, number_len)
gamma = sum(2 ** (number_len - digit_index - 1) * (amount_of_ones[digit_index] > len(lines) / 2)
for digit_index in range(number_len))
epsilon = (2 ** number_len + ~gamma)
return gamma * epsilon
def to_decimal(num: str) -> int:
return int(num, 2)
def calc_rating(lines: list[str], comparator: Callable[[float, float], bool], number_len=12) -> int:
numbers_left = set(lines)
for i in range(number_len):
amount_of_ones = count_amount_of_ones(numbers_left)
criteria_bit = int(comparator(len(numbers_left) / 2, amount_of_ones[i]))
numbers_left -= set(number_left for number_left in numbers_left if int(number_left[i]) != criteria_bit)
if len(numbers_left) == 1:
(number_left,) = numbers_left
return to_decimal(number_left)
def task2(lines: list[str], number_len=12) -> int:
return calc_rating(lines, float.__le__, number_len) * calc_rating(lines, float.__gt__, number_len)
if __name__ == '__main__':
lines = read_lines()
number_len = len(lines[0])
finish_task1(3, task1(lines, number_len), 1307354)
finish_task2(3, task2(lines, number_len), 482500)
| 36.952381 | 111 | 0.690077 |
fcc2474000a70f1195d234a13567f965e1a3c3c8 | 19,412 | py | Python | sdk/python/lib/pulumi/runtime/rpc.py | Charliekenney23/pulumi | 504478c64109864345f6bec4e8e6de5d59eb0d1c | [
"Apache-2.0"
] | null | null | null | sdk/python/lib/pulumi/runtime/rpc.py | Charliekenney23/pulumi | 504478c64109864345f6bec4e8e6de5d59eb0d1c | [
"Apache-2.0"
] | null | null | null | sdk/python/lib/pulumi/runtime/rpc.py | Charliekenney23/pulumi | 504478c64109864345f6bec4e8e6de5d59eb0d1c | [
"Apache-2.0"
] | null | null | null | # Copyright 2016-2018, Pulumi Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Support for serializing and deserializing properties going into or flowing
out of RPC calls.
"""
import asyncio
import functools
import inspect
from typing import List, Any, Callable, Dict, Optional, TYPE_CHECKING
from google.protobuf import struct_pb2
from . import known_types, settings
from .. import log
if TYPE_CHECKING:
from ..output import Inputs, Input
from ..resource import Resource
UNKNOWN = "04da6b54-80e4-46f7-96ec-b56ff0331ba9"
"""If a value is None, we serialize as UNKNOWN, which tells the engine that it may be computed later."""
_special_sig_key = "4dabf18193072939515e22adb298388d"
"""_special_sig_key is sometimes used to encode type identity inside of a map. See pkg/resource/properties.go."""
_special_asset_sig = "c44067f5952c0a294b673a41bacd8c17"
"""special_asset_sig is a randomly assigned hash used to identify assets in maps. See pkg/resource/asset.go."""
_special_archive_sig = "0def7320c3a5731c473e5ecbe6d01bc7"
"""special_archive_sig is a randomly assigned hash used to identify assets in maps. See pkg/resource/asset.go."""
_special_secret_sig = "1b47061264138c4ac30d75fd1eb44270"
"""special_secret_sig is a randomly assigned hash used to identify secrets in maps. See pkg/resource/properties.go"""
async def serialize_properties(inputs: 'Inputs',
property_deps: Dict[str, List['Resource']],
input_transformer: Optional[Callable[[str], str]] = None) -> struct_pb2.Struct:
"""
Serializes an arbitrary Input bag into a Protobuf structure, keeping track of the list
of dependent resources in the `deps` list. Serializing properties is inherently async
because it awaits any futures that are contained transitively within the input bag.
"""
struct = struct_pb2.Struct()
for k, v in inputs.items():
deps = []
result = await serialize_property(v, deps, input_transformer)
# We treat properties that serialize to None as if they don't exist.
if result is not None:
# While serializing to a pb struct, we must "translate" all key names to be what the engine is going to
# expect. Resources provide the "transform" function for doing this.
translated_name = k
if input_transformer is not None:
translated_name = input_transformer(k)
log.debug(f"top-level input property translated: {k} -> {translated_name}")
# pylint: disable=unsupported-assignment-operation
struct[translated_name] = result
property_deps[translated_name] = deps
return struct
# pylint: disable=too-many-return-statements, too-many-branches
async def serialize_property(value: 'Input[Any]',
deps: List['Resource'],
input_transformer: Optional[Callable[[str], str]] = None) -> Any:
"""
Serializes a single Input into a form suitable for remoting to the engine, awaiting
any futures required to do so.
"""
if isinstance(value, list):
props = []
for elem in value:
props.append(await serialize_property(elem, deps, input_transformer))
return props
if known_types.is_custom_resource(value):
deps.append(value)
return await serialize_property(value.id, deps, input_transformer)
if known_types.is_asset(value):
# Serializing an asset requires the use of a magical signature key, since otherwise it would look
# like any old weakly typed object/map when received by the other side of the RPC boundary.
obj = {
_special_sig_key: _special_asset_sig
}
if hasattr(value, "path"):
obj["path"] = await serialize_property(value.path, deps, input_transformer)
elif hasattr(value, "text"):
obj["text"] = await serialize_property(value.text, deps, input_transformer)
elif hasattr(value, "uri"):
obj["uri"] = await serialize_property(value.uri, deps, input_transformer)
else:
raise AssertionError(f"unknown asset type: {value}")
return obj
if known_types.is_archive(value):
# Serializing an archive requires the use of a magical signature key, since otherwise it would look
# like any old weakly typed object/map when received by the other side of the RPC boundary.
obj = {
_special_sig_key: _special_archive_sig
}
if hasattr(value, "assets"):
obj["assets"] = await serialize_property(value.assets, deps, input_transformer)
elif hasattr(value, "path"):
obj["path"] = await serialize_property(value.path, deps, input_transformer)
elif hasattr(value, "uri"):
obj["uri"] = await serialize_property(value.uri, deps, input_transformer)
else:
raise AssertionError(f"unknown archive type: {value}")
return obj
if inspect.isawaitable(value):
# Coroutines and Futures are both awaitable. Coroutines need to be scheduled.
# asyncio.ensure_future returns futures verbatim while converting coroutines into
# futures by arranging for the execution on the event loop.
#
# The returned future can then be awaited to yield a value, which we'll continue
# serializing.
future_return = await asyncio.ensure_future(value)
return await serialize_property(future_return, deps, input_transformer)
if known_types.is_output(value):
deps.extend(value.resources())
# When serializing an Output, we will either serialize it as its resolved value or the "unknown value"
# sentinel. We will do the former for all outputs created directly by user code (such outputs always
# resolve isKnown to true) and for any resource outputs that were resolved with known values.
is_known = await value._is_known
is_secret = await value._is_secret
value = await serialize_property(value.future(), deps, input_transformer)
if not is_known:
return UNKNOWN
if is_secret and await settings.monitor_supports_secrets():
# Serializing an output with a secret value requires the use of a magical signature key,
# which the engine detects.
return {
_special_sig_key: _special_secret_sig,
"value": value
}
return value
if isinstance(value, dict):
obj = {}
for k, v in value.items():
transformed_key = k
if input_transformer is not None:
transformed_key = input_transformer(k)
log.debug(f"transforming input property: {k} -> {transformed_key}")
obj[transformed_key] = await serialize_property(v, deps, input_transformer)
return obj
return value
# pylint: disable=too-many-return-statements
def deserialize_properties(props_struct: struct_pb2.Struct) -> Any:
"""
Deserializes a protobuf `struct_pb2.Struct` into a Python dictionary containing normal
Python types.
"""
# Check out this link for details on what sort of types Protobuf is going to generate:
# https://developers.google.com/protocol-buffers/docs/reference/python-generated
#
# We assume that we are deserializing properties that we got from a Resource RPC endpoint,
# which has type `Struct` in our gRPC proto definition.
if _special_sig_key in props_struct:
if props_struct[_special_sig_key] == _special_asset_sig:
# This is an asset. Re-hydrate this object into an Asset.
if "path" in props_struct:
return known_types.new_file_asset(props_struct["path"])
if "text" in props_struct:
return known_types.new_string_asset(props_struct["text"])
if "uri" in props_struct:
return known_types.new_remote_asset(props_struct["uri"])
raise AssertionError("Invalid asset encountered when unmarshaling resource property")
elif props_struct[_special_sig_key] == _special_archive_sig:
# This is an archive. Re-hydrate this object into an Archive.
if "assets" in props_struct:
return known_types.new_asset_archive(deserialize_property(props_struct["assets"]))
if "path" in props_struct:
return known_types.new_file_archive(props_struct["path"])
if "uri" in props_struct:
return known_types.new_remote_archive(props_struct["uri"])
elif props_struct[_special_sig_key] == _special_secret_sig:
return {
_special_sig_key: _special_secret_sig,
"value": deserialize_property(props_struct["value"])
}
raise AssertionError("Unrecognized signature when unmarshaling resource property")
# Struct is duck-typed like a dictionary, so we can iterate over it in the normal ways. Note
# that if the struct had any secret properties, we push the secretness of the object up to us
# since we can only set secret outputs on top level properties.
output = {}
had_secret = False
for k, v in list(props_struct.items()):
value = deserialize_property(v)
# We treat values that deserialize to "None" as if they don't exist.
if value is not None:
if isinstance(value, dict) and _special_sig_key in value and value[_special_sig_key] == _special_secret_sig:
had_secret = True
value = value["value"]
output[k] = value
if had_secret:
return {
_special_sig_key: _special_secret_sig,
"value": output
}
return output
def deserialize_property(value: Any) -> Any:
"""
Deserializes a single protobuf value (either `Struct` or `ListValue`) into idiomatic
Python values.
"""
if value == UNKNOWN:
return None
# ListValues are projected to lists
if isinstance(value, struct_pb2.ListValue):
return [deserialize_property(v) for v in value]
# Structs are projected to dictionaries
if isinstance(value, struct_pb2.Struct):
return deserialize_properties(value)
# Everything else is identity projected.
return value
Resolver = Callable[[Any, bool, bool, Optional[Exception]], None]
"""
A Resolver is a function that takes four arguments:
1. A value, which represents the "resolved" value of a particular output (from the engine)
2. A boolean "is_known", which represents whether or not this value is known to have a particular value at this
point in time (not always true for previews), and
3. A boolean "is_secret", which represents whether or not this value is contains secret data, and
4. An exception, which (if provided) is an exception that occured when attempting to create the resource to whom
this resolver belongs.
If argument 4 is not none, this output is considered to be abnormally resolved and attempts to await its future will
result in the exception being re-thrown.
"""
def transfer_properties(res: 'Resource', props: 'Inputs') -> Dict[str, Resolver]:
resolvers: Dict[str, Resolver] = {}
for name in props.keys():
if name in ["id", "urn"]:
# these properties are handled specially elsewhere.
continue
resolve_value = asyncio.Future()
resolve_is_known = asyncio.Future()
resolve_is_secret = asyncio.Future()
def do_resolve(value_fut: asyncio.Future,
known_fut: asyncio.Future,
secret_fut: asyncio.Future,
value: Any,
is_known: bool,
is_secret: bool,
failed: Optional[Exception]):
# Was an exception provided? If so, this is an abnormal (exceptional) resolution. Resolve the futures
# using set_exception so that any attempts to wait for their resolution will also fail.
if failed is not None:
value_fut.set_exception(failed)
known_fut.set_exception(failed)
secret_fut.set_exception(failed)
else:
value_fut.set_result(value)
known_fut.set_result(is_known)
secret_fut.set_result(is_secret)
# Important to note here is that the resolver's future is assigned to the resource object using the
# name before translation. When properties are returned from the engine, we must first translate the name
# using res.translate_output_property and then use *that* name to index into the resolvers table.
log.debug(f"adding resolver {name}")
resolvers[name] = functools.partial(do_resolve, resolve_value, resolve_is_known, resolve_is_secret)
res.__setattr__(name, known_types.new_output({res}, resolve_value, resolve_is_known, resolve_is_secret))
return resolvers
def translate_output_properties(res: 'Resource', output: Any) -> Any:
"""
Recursively rewrite keys of objects returned by the engine to conform with a naming
convention specified by the resource's implementation of `translate_output_property`.
If output is a `dict`, every key is translated using `translate_output_property` while every value is transformed
by recursing.
If output is a `list`, every value is recursively transformed.
If output is a primitive (i.e. not a dict or list), the value is returned without modification.
"""
if isinstance(output, dict):
return {res.translate_output_property(k): translate_output_properties(res, v) for k, v in output.items()}
if isinstance(output, list):
return [translate_output_properties(res, v) for v in output]
return output
async def resolve_outputs(res: 'Resource', props: 'Inputs', outputs: struct_pb2.Struct, resolvers: Dict[str, Resolver]):
# Produce a combined set of property states, starting with inputs and then applying
# outputs. If the same property exists in the inputs and outputs states, the output wins.
all_properties = {}
for key, value in deserialize_properties(outputs).items():
# Outputs coming from the provider are NOT translated. Do so here.
translated_key = res.translate_output_property(key)
translated_value = translate_output_properties(res, value)
log.debug(f"incoming output property translated: {key} -> {translated_key}")
log.debug(f"incoming output value translated: {value} -> {translated_value}")
all_properties[translated_key] = translated_value
for key, value in props.items():
if key not in all_properties:
# input prop the engine didn't give us a final value for. Just use the value passed into the resource
# after round-tripping it through serialization. We do the round-tripping primarily s.t. we ensure that
# Output values are handled properly w.r.t. unknowns.
input_prop = await serialize_property(value, [])
if input_prop is None:
continue
all_properties[key] = deserialize_property(input_prop)
for key, value in all_properties.items():
# Skip "id" and "urn", since we handle those specially.
if key in ["id", "urn"]:
continue
# Otherwise, unmarshal the value, and store it on the resource object.
log.debug(f"looking for resolver using translated name {key}")
resolve = resolvers.get(key)
if resolve is None:
# engine returned a property that was not in our initial property-map. This can happen
# for outputs that were registered through direct calls to 'registerOutputs'. We do
# *not* want to do anything with these returned properties. First, the component
# resources that were calling 'registerOutputs' will have already assigned these fields
# directly on them themselves. Second, if we were to try to assign here we would have
# an incredibly bad race condition for two reasons:
#
# 1. This call to 'resolveProperties' happens asynchronously at some point far after
# the resource was constructed. So the user will have been able to observe the
# initial value up until we get to this point.
#
# 2. The component resource will have often assigned a value of some arbitrary type
# (say, a 'string'). If we overwrite this with an `Output<string>` we'll be changing
# the type at some non-deterministic point in the future.
continue
# Secrets are passed back as object with our special signiture key set to _special_secret_sig, in this case
# we have to unwrap the object to get the actual underlying value.
is_secret = False
if isinstance(value, dict) and _special_sig_key in value and value[_special_sig_key] == _special_secret_sig:
is_secret = True
value = value["value"]
# If either we are performing a real deployment, or this is a stable property value, we
# can propagate its final value. Otherwise, it must be undefined, since we don't know
# if it's final.
if not settings.is_dry_run():
# normal 'pulumi up'. resolve the output with the value we got back
# from the engine. That output can always run its .apply calls.
resolve(value, True, is_secret, None)
else:
# We're previewing. If the engine was able to give us a reasonable value back,
# then use it. Otherwise, inform the Output that the value isn't known.
resolve(value, value is not None, is_secret, None)
# `allProps` may not have contained a value for every resolver: for example, optional outputs may not be present.
# We will resolve all of these values as `None`, and will mark the value as known if we are not running a
# preview.
for key, resolve in resolvers.items():
if key not in all_properties:
resolve(None, not settings.is_dry_run(), False, None)
def resolve_outputs_due_to_exception(resolvers: Dict[str, Resolver], exn: Exception):
"""
Resolves all outputs with resolvers exceptionally, using the given exception as the reason why the resolver has
failed to resolve.
:param resolvers: Resolvers associated with a resource's outputs.
:param exn: The exception that occured when trying (and failing) to create this resource.
"""
for key, resolve in resolvers.items():
log.debug(f"sending exception to resolver for {key}")
resolve(None, False, False, exn)
| 47.002421 | 120 | 0.673913 |
4fe0799f108d9a214677aa3c00d866bae92850a5 | 689 | py | Python | treecorr/_version.py | zchvsre/TreeCorr | 825dc0a9d4754f9d98ebcf9c26dee9597915d650 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | treecorr/_version.py | zchvsre/TreeCorr | 825dc0a9d4754f9d98ebcf9c26dee9597915d650 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | treecorr/_version.py | zchvsre/TreeCorr | 825dc0a9d4754f9d98ebcf9c26dee9597915d650 | [
"BSD-2-Clause-FreeBSD"
] | 1 | 2020-12-14T16:23:33.000Z | 2020-12-14T16:23:33.000Z | # Copyright (c) 2003-2019 by Mike Jarvis
#
# TreeCorr is free software: redistribution and use in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions, and the disclaimer given in the accompanying LICENSE
# file.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the disclaimer given in the documentation
# and/or other materials provided with the distribution.
__version__ = '4.0.10'
__version_info__ = tuple(map(int, __version__.split('.')))
| 43.0625 | 80 | 0.766328 |
077912bddf7ed90e426df99852acb1c2d75b5f80 | 15,798 | py | Python | oss_server/explorer/update_db.py | pallet-io/Pallet-API | fd3b8ed4c8063d9010ed53ace6ec068c983ae22e | [
"BSD-3-Clause"
] | 10 | 2018-01-30T06:21:43.000Z | 2022-01-03T12:18:07.000Z | oss_server/explorer/update_db.py | pallet-io/Pallet-API | fd3b8ed4c8063d9010ed53ace6ec068c983ae22e | [
"BSD-3-Clause"
] | 4 | 2018-02-21T02:30:37.000Z | 2018-03-04T05:20:30.000Z | oss_server/explorer/update_db.py | pallet-io/Pallet-API | fd3b8ed4c8063d9010ed53ace6ec068c983ae22e | [
"BSD-3-Clause"
] | 2 | 2018-01-31T04:22:52.000Z | 2018-03-10T14:04:35.000Z | import logging
import os
from time import sleep
from django.conf import settings
from django.core.exceptions import MultipleObjectsReturned
from django.db import transaction
from django.db import connections
from blocktools.block import Block
from blocktools.blocktools import *
from .models import Address, Datadir, Tx, TxIn, TxOut, Orphan, Witness, OrphanTxIn
from .models import Block as BlockDb
logger = logging.getLogger(__name__)
BLK_DIR = settings.BTC_DIR + '/' + BLK_PATH[settings.NET]
# Orpahn block
# { hash_of_parent_block : list_of_orphan_block_object }
orphan_block = {}
# Orphan txin
# { hash_of_parent_tx : list_of(orphan_txin_object, outindex) }
orphan_txin = {}
MAX_BULK_CREATE_SIZE = 5000
MAX_THREAD = 90
def close_old_connections():
for conn in connections.all():
conn.close_if_unusable_or_obsolete()
class BlockDbException(Exception):
"""Exception for block db contents."""
class BlockUpdateDaemon(object):
def __init__(self, sleep_time=1, blk_dir=BLK_DIR, batch_num=50):
self.blk_dir = blk_dir
self.batch_num = batch_num
self.sleep_time = sleep_time
self.updater = BlockDBUpdater(self.blk_dir, self.batch_num)
def run_forever(self):
self._load_orphan_state()
while True:
try:
self.updater.update()
except Exception as e:
logger.exception('Error when updater.update(): {}'.format(e))
close_old_connections()
sleep(self.sleep_time)
def _load_orphan_state(self):
""" To ensure data integrity. """
for orphan in Orphan.objects.all():
orphan_list = orphan_block.setdefault(orphan.hash, [])
try:
orphan_db = BlockDb.objects.get(hash=orphan.orphan_hash)
if orphan_db.prev_block or orphan_db.height or orphan_db.chain_work:
# Because of atomic. Orphan DB must be updated when BlockDb of orpahan block are updated.
logger.error('Error, it must be None.')
orphan_list.append(orphan_db)
except Exception:
logger.exception('Error when load orphan state: {}'.format(orphan.orphan_hash))
for orphan in OrphanTxIn.objects.all():
orphan_list = orphan_txin.setdefault(orphan.hash, [])
try:
tx_db = Tx.objects.get(txid=orphan.txid)
txin_db = tx_db.tx_ins.get(position=orphan.position)
if txin_db.txout:
logger.error('Error, it must be None.')
orphan_list.append((txin_db, orphan.out_index))
except Exception:
logger.exception('Error when load orphan txin state: {} {}'.format(orphan.txid, orphan.position))
class BlockDBUpdater(object):
def __init__(self, blk_dir=BLK_DIR, batch_num=50):
self.blk_dir = blk_dir
self.batch_num = batch_num
self.blocks_hash_cache = []
def update(self):
# Read the blk file (possibly from last read position) as many as possible, and check if
# there's a following blk file to read. If so, continue to parse the file.
file_path, file_offset = self._get_blk_file_info()
self._parse_raw_block_to_db(file_path, file_offset)
self._get_next_blk_file_info()
def _update_chain_related_info(self):
self.blocks_hash_cache = []
self._update_block_in_longest()
self._update_txout_spent()
def _update_block_in_longest(self):
# Get the block with biggest chain_work.
current_block = BlockDb.objects.latest('chain_work')
main_branch_next_block = None
# From `current_block`, move backward and set `in_longest` of previous blocks
# to 1 until we meet the fork point.
while current_block and not current_block.in_longest:
self.blocks_hash_cache.append(current_block.hash)
current_block.in_longest = 1
current_block.save()
main_branch_next_block = current_block
current_block = current_block.prev_block
# From the fork point, set `in_longest` of blocks in the other fork to 0.
while current_block and main_branch_next_block:
self.blocks_hash_cache.append(current_block.hash)
next_blocks = (current_block.next_blocks
.filter(in_longest=1).exclude(hash=main_branch_next_block.hash))
if next_blocks.count() > 1:
raise BlockDbException('Discovered more than one fork with `in_longest=1`.')
elif next_blocks.count() == 0:
next_block = None
else:
next_block = next_blocks[0]
next_block.in_longest = 0
next_block.save()
current_block = next_block
def _update_txout_spent(self):
txout_with_txin = TxOut.objects.filter(tx_in__tx__block__hash__in=self.blocks_hash_cache)
txout_with_txin.filter(tx_in__tx__block__in_longest=0).update(spent=False)
txout_with_txin.filter(tx_in__tx__block__in_longest=1).update(spent=True)
def _parse_raw_block_to_db(self, file_path, file_offset):
try:
with open(file_path, 'rb') as blockchain:
blockchain.seek(file_offset)
blocks = []
for raw_block in self._parse_raw_block(blockchain):
blocks.append(raw_block)
# Use atomic transaction for every `batch_num` blocks.
if len(blocks) == self.batch_num:
self._batch_update_blocks(blockchain, blocks)
blocks = []
self._batch_update_blocks(blockchain, blocks)
except Exception, e:
logger.error('Failed to read blk files: ' + file_path)
def _batch_update_blocks(self, blockchain, block_batch):
try:
with transaction.atomic():
self._store_blocks(blockchain, block_batch)
self._update_chain_related_info()
except Exception, e:
logger.error('Failed to store blocks: ' + str(e) + '\n' +
str(blockchain) + '\n' +
str(block_batch))
def _parse_raw_block(self, blockchain_file):
continue_parsing = True
while continue_parsing:
# Keep current file offset.
file_offset = blockchain_file.tell()
block = Block(blockchain_file)
continue_parsing = block.continueParsing
if continue_parsing:
yield block
else:
# Revert to previous file offset if we didn't parse anything.
blockchain_file.seek(file_offset)
def _store_blocks(self, blockchain, blocks):
# Write blocks and update blk file offset in the database. Blk file offset is retrieved
# by calling tell() on `blockchain`. Transaction is used to ensure data integrity.
for block in blocks:
self._raw_block_to_db(block)
datadir = self._get_or_create_datadir()
datadir.blkfile_offset = blockchain.tell()
datadir.save()
self._store_orphan_state()
def _raw_block_to_db(self, block):
blockheader = block.blockHeader
block_db = BlockDb(
hash=blockheader.blockHash,
merkle_root=hashStr(blockheader.merkleHash),
time=blockheader.time,
bits=blockheader.bits,
nonce=blockheader.nonce,
version=blockheader.version,
size=block.blocksize,
in_longest=0, # `in_longest` set to 0 first and update later
tx_count=len(block.Txs)
)
prev_hash = hashStr(blockheader.previousHash)
prev_block_list = BlockDb.objects.filter(hash=prev_hash)
if len(prev_block_list) > 1:
logger.error("More than 1 parent block. {}".format(block_db.hash))
elif len(prev_block_list) == 1 and (prev_block_list[0].prev_block or prev_block_list[0].height == 0):
prev_block = prev_block_list[0]
block_db.prev_block = prev_block
block_db.chain_work = prev_block.chain_work + blockheader.blockWork
block_db.height = prev_block.height + 1
else:
if prev_hash == '0000000000000000000000000000000000000000000000000000000000000000':
# Genesis block
block_db.chain_work = blockheader.blockWork
block_db.height = 0
else:
# Orpahn block
orphan_list = orphan_block.setdefault(prev_hash, [])
orphan_list.append(block_db)
logger.info("Orphan!! Miss parent block: {}".format(prev_hash))
block_db.save()
logger.info("Block saved: {}".format(block_db.hash))
if block_db.prev_block:
# Try to update orphan block
self._orphan_to_db(block_db)
self._raw_txs_to_db(block.Txs, block_db)
# Try to save orphan block as more as possible by BFS.
@staticmethod
def _orphan_to_db(parent_db):
block_stack = [parent_db]
while block_stack:
parent_db = block_stack.pop()
orphan_list = orphan_block.get(parent_db.hash, [])
for orphan_db in orphan_list:
try:
orphan_db.prev_block = parent_db
orphan_db.height = parent_db.height + 1
orphan_db.chain_work = parent_db.chain_work + 1
orphan_db.save()
logger.info("Orphan block update: {}".format(orphan_db.hash))
tx_list = Tx.objects.filter(block=orphan_db)
tx_list.update(valid=True)
for tx_db in tx_list:
TxOut.objects.filter(tx=tx_db).update(valid=True)
orphan_block[parent_db.hash].remove(orphan_db)
if not orphan_block[parent_db.hash]:
del orphan_block[parent_db.hash]
except Exception as e:
logger.error("Fail to fetch orphan block.{}".format(e))
block_stack.append(orphan_db)
def _raw_txs_to_db(self, tx_list, block_db):
for tx in tx_list:
tx_db = Tx.objects.create(hash=tx.txHash,
block=block_db,
version=tx.version,
locktime=tx.lockTime,
size=tx.size,
time=block_db.time,
valid=True if block_db.prev_block else False,
txid=tx.txID
)
for i in range(tx.outCount):
self._raw_txout_to_db(tx.outputs[i], i, tx_db)
for i, txin in enumerate(tx.inputs):
self._raw_txin_to_db(txin, i, tx_db)
def _raw_txin_to_db(self, txin, position, tx_db):
txin_db = TxIn(
tx=tx_db,
scriptsig=txin.scriptSig,
sequence=txin.seqNo,
position=position
)
if hashStr(txin.prevhash) == '0000000000000000000000000000000000000000000000000000000000000000':
txin_db.txout = None
else:
try:
prev_tx = Tx.objects.get(txid=hashStr(txin.prevhash))
txin_db.txout = prev_tx.tx_outs.get(position=txin.txOutId)
except Tx.DoesNotExist, TxOut.DoesNotExist:
orphan_list = orphan_txin.setdefault(hashStr(txin.prevhash), [])
orphan_list.append((txin_db, txin.txOutId))
except MultipleObjectsReturned:
block = tx_db.block
while block:
try:
prev_tx = Tx.objects.get(txid=hashStr(txin.prevhash), block=block)
txin_db.txout = prev_tx.tx_outs.get(position=txin.txOutId)
break
except Tx.DoesNotExist:
block = block.prev_block
txin_db.save()
if txin.witnessCount > 0:
for witness in txin.witnesses:
self._raw_witness_to_db(witness, txin_db)
def _raw_txout_to_db(self, txout, position, tx_db):
address=Address.objects.get_or_create(address=txout.address)[0]
txout_db =TxOut.objects.create(tx=tx_db,
value=txout.value,
position=position,
scriptpubkey=txout.pubkey,
address=address,
valid=tx_db.valid
)
orphan_list = orphan_txin.get(tx_db.txid, [])
for txin_db, index in orphan_list:
if index == position:
txin_db.txout=txout_db
txin_db.save()
logger.info('Orphan txin id {} updated!'.format(txin_db.tx.txid))
orphan_txin[tx_db.txid].remove((txin_db, index))
if not orphan_txin[tx_db.txid]:
del orphan_txin[tx_db.txid]
break;
@staticmethod
def _raw_witness_to_db(witness, txin_db):
Witness.objects.create(txin=txin_db, scriptsig=witness.scriptSig)
def _get_or_create_datadir(self):
"""
Get the last created Datadir object from self.blk_dir, or create one if no Datadir exists.
We get the last created Datadir to make sure we are updating with latest blk file.
"""
datadir_all = Datadir.objects.filter(dirname=self.blk_dir).order_by('-create_time')
if datadir_all.count() == 0:
datadir = Datadir(dirname=self.blk_dir, blkfile_number=0, blkfile_offset=0)
datadir.save()
return datadir
else:
return datadir_all[0]
def _get_blk_file_info(self):
datadir = self._get_or_create_datadir()
file_name = 'blk{:05d}.dat'.format(datadir.blkfile_number)
file_path = os.path.join(datadir.dirname, file_name)
file_offset = datadir.blkfile_offset
return file_path, file_offset
def _get_next_blk_file_info(self):
"""Return next blk file path according to the latest Datadir or None if next blk file does not exist."""
datadir = self._get_or_create_datadir()
file_name = 'blk{:05d}.dat'.format(datadir.blkfile_number + 1)
file_path = os.path.join(datadir.dirname, file_name)
if os.path.exists(file_path):
Datadir(dirname=self.blk_dir,
blkfile_number=(datadir.blkfile_number + 1),
blkfile_offset=0).save()
return file_path
else:
return None
def _store_orphan_state(self):
""" To ensure data integrity. """
# Clean
Orphan.objects.all().delete()
OrphanTxIn.objects.all().delete()
# Store
for parent, orphan_list in orphan_block.iteritems():
for orphan in orphan_list:
Orphan.objects.create(hash=parent,
orphan_hash=orphan.hash
)
for parent, orphan_list in orphan_txin.iteritems():
for orphan, out_index in orphan_list:
OrphanTxIn.objects.create(hash=parent,
txid=orphan.tx.txid,
position=orphan.position,
out_index=out_index
)
| 40.611825 | 113 | 0.587923 |
aa8d99fd0f8d64fbd9853e64fbbf79dfb90fbc75 | 3,709 | py | Python | user_interface.py | cashmy/SweepstakesProject | a4fb2582e7e3beaf9b0f69746b6784ab1d10b0b6 | [
"MIT"
] | null | null | null | user_interface.py | cashmy/SweepstakesProject | a4fb2582e7e3beaf9b0f69746b6784ab1d10b0b6 | [
"MIT"
] | null | null | null | user_interface.py | cashmy/SweepstakesProject | a4fb2582e7e3beaf9b0f69746b6784ab1d10b0b6 | [
"MIT"
] | null | null | null | import os
def simulation_main_menu():
"""Main menu prompting user to choose an option"""
validate_user_selection = (False, None)
while validate_user_selection[0] is False:
print("\n\t\t-Sweepstake Processing-")
print("\tPress -1- to create a sweepstakes")
print("\tPress -2- to create and assign a contestant")
print("\tPress -3- to list all contestants")
print("\tPress -4- to generate a sweepstakes winner")
print("\tPress -5- to remove a sweepstakes")
print("\tPress -6- to exit")
user_input = try_parse_int(input())
validate_user_selection = validate_main_menu(user_input)
if validate_user_selection[0] is False:
print("Not a valid selection try again")
return validate_user_selection[1]
def validate_main_menu(user_input):
"""Validation function that checks if 'user_input' argument is an int 1-4. No errors."""
switcher = {
1: (True, 1),
2: (True, 2),
3: (True, 3),
4: (True, 4),
5: (True, 5),
6: (True, 6)
}
return switcher.get(user_input, (False, None))
def display_welcome():
"""Initial method asking user if they'll make a purchase. No errors."""
print("\nWelcome to the Sweepstakes backend management. \n")
user_response = bool_prompt("Would you like to work on your sweepstakes database? (y/n): ")
if user_response:
return True
else:
print("Please sign off and conserve your precious technological resources.")
return False
def output_text(text):
"""User input method that will print to console any string passed in as an argument"""
print(text)
def clear_console():
"""Used for clearing out the console. No errors."""
os.system('cls' if os.name == 'nt' else "clear")
def bool_prompt(text):
"""Validates a 'y' or 'yes' string and returns a True value. No errors."""
switcher = {
"y": True,
"yes": True
}
user_input = input(text).lower()
return switcher.get(user_input, False)
def try_parse_int(value):
"""Attempts to parse a string into an integer, returns 0 if unable to parse. No errors."""
try:
return int(value)
except:
return 0
def contest_info_print():
# TODO: print out information here
pass
def sweeps_type_menu():
validate_user_selection = (False, None)
while validate_user_selection[0] is False:
print("\n\t\t-Sweepstakes MANAGER Type-")
print("\tPress -1- to create a sweepstakes Stack")
print("\tPress -2- to create a sweepstakes Queue")
print("\tPress -3- to exit")
user_input = try_parse_int(input())
validate_user_selection = validate_sweeps_type_menu(user_input)
if validate_user_selection[0] is False:
print("Not a valid selection try again")
return validate_user_selection[1]
def validate_sweeps_type_menu(user_input):
"""Validation function that checks if 'user_input' argument is an int 1-3. No errors."""
switcher = {
1: (True, 1),
2: (True, 2),
3: (True, 3),
}
return switcher.get(user_input, (False, None))
def enter_sweepstake_name():
return input('Please enter the name of this sweepstake: ')
def get_contestant_info():
first_name = input("Enter contestant's first name: ")
last_name = input("Enter contestant's last name: ")
email = input("Enter contestant's email: ")
return first_name, last_name, email
def get_manager_name():
name = input('\nPlease enter the name of the manager: ')
return name
def require_sweepstakes_error():
print('You must create a sweepstake first.')
return
| 30.401639 | 95 | 0.648423 |
aea6a4cf5bd0af23771a7ee6253fc232dfa586be | 9,869 | py | Python | arepytools/_utils.py | rpitonak/BioPAL | 08c57b3ba2d8e5a06105f930b1067c2541636bb6 | [
"MIT"
] | 35 | 2021-04-26T13:11:34.000Z | 2022-03-13T02:27:53.000Z | arepytools/_utils.py | rpitonak/BioPAL | 08c57b3ba2d8e5a06105f930b1067c2541636bb6 | [
"MIT"
] | 13 | 2021-04-28T20:56:53.000Z | 2022-03-25T09:15:26.000Z | arepytools/_utils.py | rpitonak/BioPAL | 08c57b3ba2d8e5a06105f930b1067c2541636bb6 | [
"MIT"
] | 12 | 2021-04-26T13:11:43.000Z | 2022-03-28T06:19:19.000Z | # SPDX-FileCopyrightText: Aresys S.r.l. <info@aresys.it>
# SPDX-License-Identifier: MIT
"""
Utilities
---------
"""
import os
from functools import wraps
import numpy as np
_ERROR_CHECK_TYPE = "Wrong {} type: {} != {}"
_ERROR_CHECK_SIZE_OF_NUMPY_ARRAY = "Vector {} has wrong size: {} != {}"
_ERROR_CHECK_DTYPE_OF_NUMPY_ARRAY = "Vector {} has wrong dtype: {} != {}"
_ERROR_CHECK_SHAPE_OF_NUMPY_ARRAY = "Vector {} has wrong shape: {} != {}"
_ERROR_CHECK_NDIM_OF_NUMPY_ARRAY = "Vector {} has wrong ndim: {} != {}"
_ERROR_CHECK_FIRST_AXIS_SIZE_OF_NUMPY_ARRAY = "Vector {} has wrong first axis size: {} != {}"
def check_type(data, dtype, name="", throw_on_error=True) -> bool:
"""
Checks that data is of type dtype, raises a RunTimeError
:param data: input variable
:param dtype: expected type
:param name: name of the variable (for the error msg) [optional]
:param throw_on_error: whether to raise a RunTimeError on check failure [optional, default true]
"""
if not isinstance(data, dtype):
if throw_on_error:
raise RuntimeError(_ERROR_CHECK_TYPE.format(name, type(data), dtype))
return False
return True
def check_size_of_numpy_array(vector: np.ndarray, size, name="", throw_on_error=True) -> bool:
"""
Checks the size of vector, raises a RunTimeError if it is not as expected.
(It does not check if it is a numpy array)
:param vector: should be a np.ndarray
:param size: expected size
:param name: name of the variable (for the error msg) [optional]
:param throw_on_error: whether to raise a RunTimeError on check failure [optional, default true]
"""
if size != vector.size:
if throw_on_error:
raise RuntimeError(_ERROR_CHECK_SIZE_OF_NUMPY_ARRAY.format(name, vector.size, size))
return False
return True
def check_dtype_of_numpy_array(vector: np.ndarray, dtype, name="", throw_on_error=True) -> bool:
"""
Checks the data type (dtype) of vector, raises a RunTimeError if not as expected.
(It does not check if it is a numpy array)
:param vector: should be a np.ndarray
:param dtype: expected type of the data inside
:param name: name of the variable (for the error msg) [optional]
:param throw_on_error: whether to raise a RunTimeError on check failure [optional, default true]
"""
if dtype != vector.dtype:
if throw_on_error:
raise RuntimeError(_ERROR_CHECK_DTYPE_OF_NUMPY_ARRAY.format(name, vector.dtype, dtype))
return False
return True
def check_shape_of_numpy_array(vector: np.ndarray, shape, name="", throw_on_error=True) -> bool:
"""
Checks the data type (dtype) of vector, raises a RunTimeError if not as expected.
(It does not check if it is a numpy array)
:param vector: should be a np.ndarray
:param shape: expected shape of the data inside
:param name: name of the variable (for the error msg) [optional]
:param throw_on_error: whether to raise a RunTimeError on check failure [optional, default true]
"""
if shape != vector.shape or len(shape) != len(vector.shape):
if throw_on_error:
raise RuntimeError(_ERROR_CHECK_SHAPE_OF_NUMPY_ARRAY.format(name, vector.shape, shape))
return False
return True
def check_ndim_of_numpy_array(vector: np.ndarray, ndim, name="", throw_on_error=True) -> bool:
"""
Checks the num of dimensions (ndim) of vector, raises a RunTimeError if not as expected.
(It does not check if it is a numpy array)
:param vector: should be a np.ndarray
:param ndim: expected ndim of the data
:param name: name of the variable (for the error msg) [optional]
:param throw_on_error: whether to raise a RunTimeError on check failure [optional, default true]
"""
if ndim != vector.ndim:
if throw_on_error:
raise RuntimeError(_ERROR_CHECK_NDIM_OF_NUMPY_ARRAY.format(name, vector.ndim, ndim))
return False
return True
def check_first_axis_size_of_numpy_array(vector: np.ndarray, first_axis_size, name="", throw_on_error=True) -> bool:
"""
Checks that the size of the first axis (first_axis_size) of vector is equal to first_axis_size,
raises a RunTimeError if not as expected.
Example: check first_axis_size=3 is ok for (3,) (3,1,1), but not for (1,3,1)
(It does not check if it is a numpy array)
:param vector: should be a np.ndarray
:param first_axis_size: expected first_axis_size of the data
:param name: name of the variable (for the error msg) [optional]
:param throw_on_error: whether to raise a RunTimeError on check failure [optional, default true]
"""
if first_axis_size != vector.shape[0]:
if throw_on_error:
raise RuntimeError(
_ERROR_CHECK_FIRST_AXIS_SIZE_OF_NUMPY_ARRAY.format(name, vector.shape[0], first_axis_size))
return False
return True
def input_data_to_numpy_array_with_checks(data, dtype=None, size=None, shape=None, ndim=None, first_axis_size=None,
name="") -> np.ndarray:
"""
It takes a data and returns data converted to a vector, after some checks
If data is a np.ndarray it checks dtype, size, shap, ndim (if provided) and returns the data as it is.
If data is a list it converts it to a np.ndarray and then performs the same checks as stated above
If data is a single object, the object is converted to a numpy array (following, shape and ndim if given
and then the checks are performed.
be (1, ) if ndim is not provided, (1,1,...) of length ndim if ndim was provided.
:param data: the data
:param dtype: expected type
:param size: expected size
:param ndim: expected ndim
:param shape: expected shape
:param first_axis_size: expected value for shape[0]
:param name: name for error msg
:return: return a numpy vector with dtype size as shape as expect, or it raises a RunTimeError
"""
if isinstance(data, list):
data_array = np.array(data)
elif isinstance(data, np.ndarray):
data_array = data
else:
if shape is not None:
data_array = np.full(shape, data)
else:
if ndim is not None:
shape_out = tuple(1 for _ in range(ndim))
else:
shape_out = (1,)
data_array = np.full(shape_out, data)
check_numpy_array(data_array, dtype=dtype, size=size, shape=shape, ndim=ndim, first_axis_size=first_axis_size,
name=name)
return data_array
def check_numpy_array(data, dtype=None, size=None, shape=None, ndim=None, first_axis_size=None, name="",
throw_on_error=True) -> bool:
"""
Verify that data is a numpy.ndarray and that it satisfies the specified constraints.
If some constraints is not as expected an exception is raised.
To disable this behavior and just get a boolean specify throw_on_error=False.
Specify a name to customize the error messages.
:param data: the data
:param dtype: expected type
:param size: expected size
:param ndim: expected ndim
:param first_axis_size: shape[0]
:param shape: expected shape
:param name: name for error msg
:param throw_on_error: whether to raise or not an exception on check failure [optional, default true]
"""
correctness = check_type(data, np.ndarray, name=name, throw_on_error=throw_on_error)
if dtype is not None:
correctness = correctness and check_dtype_of_numpy_array(data, dtype, name=name, throw_on_error=throw_on_error)
if size is not None:
correctness = correctness and check_size_of_numpy_array(data, size, name=name, throw_on_error=throw_on_error)
if shape is not None:
correctness = correctness and check_shape_of_numpy_array(data, shape, name=name, throw_on_error=throw_on_error)
if ndim is not None:
correctness = correctness and check_ndim_of_numpy_array(data, ndim, name=name, throw_on_error=throw_on_error)
if first_axis_size is not None:
correctness = correctness and check_first_axis_size_of_numpy_array(data, first_axis_size, name=name,
throw_on_error=throw_on_error)
return correctness
def _size_from_shape(shape: tuple) -> int:
"""
From a tuple reprensenting a shape of a vector it retrives the size.
For example (5, 1, 8) --> 40
:param shape: the shape
:return: the corresponding size
"""
if not shape:
return 0
out = 1
for k in shape:
out *= k
return out
def check_exists(path_to_file, tag=None):
"""
Check if path exist, if not raise a runtime error
:param path_to_file: the path to be checked
:param tag: tag for error msg
"""
if not os.path.exists(path_to_file):
raise RuntimeError("{}: {} not found".format("Requested file" if tag is None else tag, path_to_file))
def check_file_exists(*positions):
"""
Parametrized decorator.
:param positions: position(s) of the arguments to be checked.
Example:
@check_file_exists(2, 4)
def fun(a, b , path_in, c, path_in2, path_out):
pass
when calling fun the path_in and path_in2 will be validated
the default arguments will be ignored
"""
for pos in positions:
if not isinstance(pos, int):
raise RuntimeError("Decorator parameter should be one or more int")
def decorator(fun):
@wraps(fun)
def decorated_fun(*args_fun):
for position in positions:
if position < len(args_fun):
check_exists(args_fun[position], "Argument in position {}".format(position))
return fun(*args_fun)
return decorated_fun
return decorator
| 37.957692 | 119 | 0.67788 |
b83d75b106fbab1689aa91df34530d5455cd6c7c | 3,103 | py | Python | polynomial_regression.py | abrumador/Polynomial-regression | d3b8d8acaf436223505b934103a4306efc5c90e6 | [
"MIT"
] | null | null | null | polynomial_regression.py | abrumador/Polynomial-regression | d3b8d8acaf436223505b934103a4306efc5c90e6 | [
"MIT"
] | null | null | null | polynomial_regression.py | abrumador/Polynomial-regression | d3b8d8acaf436223505b934103a4306efc5c90e6 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Aug 18 17:20:20 2019
@author: ahmetkaanipekoren
"""
def main():
df = pd.read_csv("kc_house_data.csv")
data_x = df["sqft_living"]
data_y = df["price"]
train_x, test_x, train_y , test_y = train_test_split(data_x,data_y,test_size = 0.3 ,random_state = 42)
train_x = normalization(train_x)
test_x = normalization(test_x)
train_y = normalization(train_y)
test_y = normalization(test_y)
return train_x, test_x, train_y , test_y
def normalization(df):
df = (df - df.min()) / (df.max() - df.min())
return df
def sorting_the_inputs(x,y):
x = np.array(x)
y = np.array(y)
mapped = zip(x,y)
mapped = sorted(mapped)
x,y = list(zip(*mapped))
x = np.array(x)
y = np.array(y)
return x,y
def weights(x_column,identity_matrix,ridge,y):
x_x = np.linalg.inv(np.add(np.dot(x_column.T,x_column), ridge * identity_matrix))
x_y = np.dot(x_column.T, y)
weight = np.dot(x_x,x_y)
return weight
def identity_and_input_matrix(x,degree):
identity_matrix = np.identity(degree + 1)
full_ones = np.ones(len(x))
x_column = np.array([x,full_ones]).T
temp = 2
while temp < degree + 1:
x_column = np.append(np.array(np.power(x,temp))[np.newaxis].T,x_column,axis=1)
temp += 1
return x_column,identity_matrix
def polynomial_regression(train_x,train_y):
complexity = 1
prediction_list = []
error_list = []
while complexity < 7 :
x_column,identity_matrix = identity_and_input_matrix(train_x,complexity)
weight = weights(x_column,identity_matrix,0.00001,train_y)
reverse_weights = weight[::-1]
y_prediction = reverse_weights[0]
i = 1
while i < len(reverse_weights):
y_prediction = y_prediction + (train_x**i) * reverse_weights[i]
i += 1
plt.scatter(train_x,train_y)
plt.plot(train_x,y_prediction)
plt.show()
error_list.append(mse(np.array(train_y),np.array(y_prediction)))
prediction_list.append(y_prediction)
complexity += 1
return error_list
def mse(y_true, y_prediction):
return np.sqrt((y_true - y_prediction)**2).mean()
def rmse_plot(error_list,color):
plt.title("Root Mean Square Error of Train and Test Data")
plt.xlabel("Complexity")
plt.ylabel("RMSE score")
plt.ylim([0,1])
plt.plot(range(0,len(error_list)),error_list,color= color)
if __name__=="__main__":
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
train_x, test_x, train_y , test_y = main()
train_x,train_y = sorting_the_inputs(train_x,train_y)
error_list = polynomial_regression(train_x, train_y)
print(error_list)
| 21.108844 | 106 | 0.601998 |
aadeb28be276fe49bab36868f86acefc990cc991 | 202 | py | Python | examples/autopipegen/testp/touch1.py | microns-ariadne/ariadne-pipeline-test-harness | 73e749c48d1ff103fee2044833778e33c70be73b | [
"MIT"
] | 2 | 2016-03-15T15:07:06.000Z | 2016-05-10T23:01:05.000Z | examples/autopipegen/testp/touch1.py | microns-ariadne/ariadne-pipeline-test-harness | 73e749c48d1ff103fee2044833778e33c70be73b | [
"MIT"
] | null | null | null | examples/autopipegen/testp/touch1.py | microns-ariadne/ariadne-pipeline-test-harness | 73e749c48d1ff103fee2044833778e33c70be73b | [
"MIT"
] | null | null | null | # generated by shell2pipe on 2016-03-15
import os
import plugin
plugin_class='touch1'
class touch1(plugin.AriadneOp):
name='touch1'
def run(self, args):
os.system('touch tmp/yarr')
| 14.428571 | 39 | 0.688119 |
eddd8d8944b9738dea8f6befdd9c11cd2eceba09 | 1,309 | py | Python | tools/gn-utils/__copy.py | omega-graphics/autom | 1434152fe1fea89c3d77a347bbab28c301436183 | [
"BSD-3-Clause"
] | null | null | null | tools/gn-utils/__copy.py | omega-graphics/autom | 1434152fe1fea89c3d77a347bbab28c301436183 | [
"BSD-3-Clause"
] | null | null | null | tools/gn-utils/__copy.py | omega-graphics/autom | 1434152fe1fea89c3d77a347bbab28c301436183 | [
"BSD-3-Clause"
] | null | null | null | import sys
import os,glob,argparse,json
import shutil
parser = argparse.ArgumentParser()
parser.add_argument("--src",type=str)
parser.add_argument("--dest",type=str)
parser.add_argument("--calc-glob-outputs",action="store_const",const=True,default=False)
parser.add_argument("--glob",dest="glob",action="store_const",const=True,default=False)
args = parser.parse_args()
src = args.src
dest = args.dest
_glob:bool = args.glob
if args.calc_glob_outputs:
if os.path.isdir(dest) == False:
print(f"This is NOT a dir:{dest}")
exit(1)
results = glob.glob(src)
out_results:"list[str]" = []
for res in results:
_file_dest = f"{dest}/{os.path.basename(res)}"
out_results.append(_file_dest)
sys.stdout.write(json.dumps(out_results))
elif _glob:
if os.path.isdir(dest) == False:
print(f"This is NOT a dir:{dest}")
exit(1)
results = glob.glob(src)
for res in results:
_file_dest = f"{dest}/{os.path.basename(res)}"
if os.path.exists(_file_dest) == False:
shutil.copy2(res,dest)
if os.path.isfile(src):
if os.path.exists(dest):
os.remove(dest)
shutil.copy2(src,dest)
elif os.path.isdir(src):
if os.path.exists(dest):
shutil.rmtree(dest)
shutil.copytree(src,dest,True)
| 25.666667 | 88 | 0.655462 |
4ca4f06b5219de2ab9ca51bc08222031fc8f0e0a | 5,301 | py | Python | rlkit/samplers/data_collector/path_collector.py | Asap7772/railrl_evalsawyer | baba8ce634d32a48c7dfe4dc03b123e18e96e0a3 | [
"MIT"
] | null | null | null | rlkit/samplers/data_collector/path_collector.py | Asap7772/railrl_evalsawyer | baba8ce634d32a48c7dfe4dc03b123e18e96e0a3 | [
"MIT"
] | null | null | null | rlkit/samplers/data_collector/path_collector.py | Asap7772/railrl_evalsawyer | baba8ce634d32a48c7dfe4dc03b123e18e96e0a3 | [
"MIT"
] | null | null | null | from collections import deque, OrderedDict
from functools import partial
import numpy as np
from rlkit.envs.vae_wrappers import VAEWrappedEnv
from rlkit.misc.eval_util import create_stats_ordered_dict
from rlkit.samplers.data_collector.base import PathCollector
from rlkit.samplers.rollout_functions import rollout
class MdpPathCollector(PathCollector):
def __init__(
self,
env,
policy,
max_num_epoch_paths_saved=None,
render=False,
render_kwargs=None,
rollout_fn=rollout,
save_env_in_snapshot=True,
):
if render_kwargs is None:
render_kwargs = {}
self._env = env
self._policy = policy
self._max_num_epoch_paths_saved = max_num_epoch_paths_saved
self._epoch_paths = deque(maxlen=self._max_num_epoch_paths_saved)
self._render = render
self._render_kwargs = render_kwargs
self._rollout_fn = rollout_fn
self._num_steps_total = 0
self._num_paths_total = 0
self._save_env_in_snapshot = save_env_in_snapshot
def collect_new_paths(
self,
max_path_length,
num_steps,
discard_incomplete_paths,
):
paths = []
num_steps_collected = 0
while num_steps_collected < num_steps:
max_path_length_this_loop = min( # Do not go over num_steps
max_path_length,
num_steps - num_steps_collected,
)
path = self._rollout_fn(
self._env,
self._policy,
max_path_length=max_path_length_this_loop,
render=self._render,
render_kwargs=self._render_kwargs,
)
path_len = len(path['actions'])
if (
path_len != max_path_length
and not path['terminals'][-1]
and discard_incomplete_paths
):
break
num_steps_collected += path_len
paths.append(path)
self._num_paths_total += len(paths)
self._num_steps_total += num_steps_collected
self._epoch_paths.extend(paths)
return paths
def get_epoch_paths(self):
return self._epoch_paths
def end_epoch(self, epoch):
self._epoch_paths = deque(maxlen=self._max_num_epoch_paths_saved)
def get_diagnostics(self):
path_lens = [len(path['actions']) for path in self._epoch_paths]
stats = OrderedDict([
('num steps total', self._num_steps_total),
('num paths total', self._num_paths_total),
])
stats.update(create_stats_ordered_dict(
"path length",
path_lens,
always_show_all_stats=True,
))
return stats
def get_snapshot(self):
snapshot_dict = dict(
policy=self._policy,
)
if self._save_env_in_snapshot:
snapshot_dict['env'] = self._env
return snapshot_dict
class GoalConditionedPathCollector(MdpPathCollector):
def __init__(
self,
*args,
observation_key='observation',
desired_goal_key='desired_goal',
goal_sampling_mode=None,
**kwargs
):
def obs_processor(o):
return np.hstack((o[observation_key], o[desired_goal_key]))
rollout_fn = partial(
rollout,
preprocess_obs_for_policy_fn=obs_processor,
)
super().__init__(*args, rollout_fn=rollout_fn, **kwargs)
self._observation_key = observation_key
self._desired_goal_key = desired_goal_key
self._goal_sampling_mode = goal_sampling_mode
def collect_new_paths(self, *args, **kwargs):
self._env.goal_sampling_mode = self._goal_sampling_mode
return super().collect_new_paths(*args, **kwargs)
def get_snapshot(self):
snapshot = super().get_snapshot()
snapshot.update(
observation_key=self._observation_key,
desired_goal_key=self._desired_goal_key,
)
return snapshot
class ObsDictPathCollector(MdpPathCollector):
def __init__(
self,
*args,
observation_key='observation',
**kwargs
):
def obs_processor(obs):
return obs[observation_key]
rollout_fn = partial(
rollout,
preprocess_obs_for_policy_fn=obs_processor,
)
super().__init__(*args, rollout_fn=rollout_fn, **kwargs)
self._observation_key = observation_key
def get_snapshot(self):
snapshot = super().get_snapshot()
snapshot.update(
observation_key=self._observation_key,
)
return snapshot
class VAEWrappedEnvPathCollector(GoalConditionedPathCollector):
def __init__(
self,
env: VAEWrappedEnv,
policy,
decode_goals=False,
**kwargs
):
super().__init__(env, policy, **kwargs)
self._decode_goals = decode_goals
def collect_new_paths(self, *args, **kwargs):
self._env.decode_goals = self._decode_goals
return super().collect_new_paths(*args, **kwargs)
| 30.641618 | 73 | 0.606867 |
42c1c46ce15aa519befa0cc28b3dad68bf8d8f8b | 3,729 | py | Python | tensorflow_v2/dragen1860/Tutorials/10-ColorBot/model.py | gottaegbert/penter | 8cbb6be3c4bf67c7c69fa70e597bfbc3be4f0a2d | [
"MIT"
] | 13 | 2020-01-04T07:37:38.000Z | 2021-08-31T05:19:58.000Z | tensorflow_v2/dragen1860/Tutorials/10-ColorBot/model.py | gottaegbert/penter | 8cbb6be3c4bf67c7c69fa70e597bfbc3be4f0a2d | [
"MIT"
] | 3 | 2020-06-05T22:42:53.000Z | 2020-08-24T07:18:54.000Z | tensorflow_v2/dragen1860/Tutorials/10-ColorBot/model.py | gottaegbert/penter | 8cbb6be3c4bf67c7c69fa70e597bfbc3be4f0a2d | [
"MIT"
] | 9 | 2020-10-19T04:53:06.000Z | 2021-08-31T05:20:01.000Z | import tensorflow as tf
from tensorflow import keras
class RNNColorbot(keras.Model):
"""
Multi-layer (LSTM) RNN that regresses on real-valued vector labels.
"""
def __init__(self, rnn_cell_sizes, label_dimension, keep_prob):
"""Constructs an RNNColorbot.
Args:
rnn_cell_sizes: list of integers denoting the size of each LSTM cell in
the RNN; rnn_cell_sizes[i] is the size of the i-th layer cell
label_dimension: the length of the labels on which to regress
keep_prob: (1 - dropout probability); dropout is applied to the outputs of
each LSTM layer
"""
super(RNNColorbot, self).__init__(name="")
self.rnn_cell_sizes = rnn_cell_sizes
self.label_dimension = label_dimension
self.keep_prob = keep_prob
self.cells = [keras.layers.LSTMCell(size) for size in rnn_cell_sizes]
self.relu = keras.layers.Dense(label_dimension, activation=tf.nn.relu)
def call(self, inputs, training=None):
"""
Implements the RNN logic and prediction generation.
Args:
inputs: A tuple (chars, sequence_length), where chars is a batch of
one-hot encoded color names represented as a Tensor with dimensions
[batch_size, time_steps, 256] and sequence_length holds the length
of each character sequence (color name) as a Tensor with dimension
[batch_size].
training: whether the invocation is happening during training
Returns:
A tensor of dimension [batch_size, label_dimension] that is produced by
passing chars through a multi-layer RNN and applying a ReLU to the final
hidden state.
"""
(chars, sequence_length) = inputs
# Transpose the first and second dimensions so that chars is of shape
# [time_steps, batch_size, dimension].
chars = tf.transpose(chars, [1, 0, 2])
# The outer loop cycles through the layers of the RNN; the inner loop
# executes the time steps for a particular layer.
batch_size = int(chars.shape[1])
for l in range(len(self.cells)): # for each layer
cell = self.cells[l]
outputs = []
# h_zero, c_zero
state = (tf.zeros((batch_size, self.rnn_cell_sizes[l])),
tf.zeros((batch_size, self.rnn_cell_sizes[l])))
# Unstack the inputs to obtain a list of batches, one for each time step.
chars = tf.unstack(chars, axis=0)
for ch in chars: # for each time stamp
output, state = cell(ch, state)
outputs.append(output)
# The outputs of this layer are the inputs of the subsequent layer.
# [t, b, h]
chars = tf.stack(outputs, axis=0)
if training:
chars = tf.nn.dropout(chars, self.keep_prob)
# Extract the correct output (i.e., hidden state) for each example. All the
# character sequences in this batch were padded to the same fixed length so
# that they could be easily fed through the above RNN loop. The
# `sequence_length` vector tells us the true lengths of the character
# sequences, letting us obtain for each sequence the hidden state that was
# generated by its non-padding characters.
batch_range = [i for i in range(batch_size)]
# stack [64] with [64] => [64, 2]
indices = tf.stack([sequence_length - 1, batch_range], axis=1)
# [t, b, h]
# print(chars.shape)
hidden_states = tf.gather_nd(chars, indices)
# print(hidden_states.shape)
return self.relu(hidden_states) | 46.6125 | 85 | 0.628587 |
e456686b2ae00f96dc8327beba644ee613881e7d | 497 | py | Python | setup.py | csomaati/clockify-cli | 92f7b260679cfd2fa4b00b4894c28e808d77c201 | [
"MIT"
] | null | null | null | setup.py | csomaati/clockify-cli | 92f7b260679cfd2fa4b00b4894c28e808d77c201 | [
"MIT"
] | null | null | null | setup.py | csomaati/clockify-cli | 92f7b260679cfd2fa4b00b4894c28e808d77c201 | [
"MIT"
] | null | null | null | from setuptools import setup
setup(
name='clockify_cli',
version='0.10',
py_modules=['clockify_cli'],
author='Theodore Hu',
url='https://github.com/t5/clockify-cli',
install_requires=[
'click>=7.0',
'certifi==2018.8.13',
'chardet==3.0.4',
'click==6.7',
'idna==2.7',
'requests>=2.20.0',
'urllib3==1.23',
],
entry_points='''
[console_scripts]
clockify=clockify_cli.clockify_cli:main
''',
)
| 21.608696 | 47 | 0.541247 |
918d81a5a848a1779898a75a82fbac6371b002eb | 1,270 | py | Python | layout/review.py | gbkim000/wxPython | b1604d71cf04801f9efa8b26b935561a88ef1daa | [
"BSD-2-Clause"
] | 80 | 2018-05-25T00:37:25.000Z | 2022-03-13T12:31:02.000Z | layout/review.py | gbkim000/wxPython | b1604d71cf04801f9efa8b26b935561a88ef1daa | [
"BSD-2-Clause"
] | 1 | 2021-01-08T20:22:52.000Z | 2021-01-08T20:22:52.000Z | layout/review.py | gbkim000/wxPython | b1604d71cf04801f9efa8b26b935561a88ef1daa | [
"BSD-2-Clause"
] | 32 | 2018-05-24T05:40:55.000Z | 2022-03-24T00:32:11.000Z | #!/usr/bin/python
"""
ZetCode wxPython tutorial
In this example we create review
layout with wx.FlexGridSizer.
author: Jan Bodnar
website: www.zetcode.com
last modified: April 2018
"""
import wx
class Example(wx.Frame):
def __init__(self, parent, title):
super(Example, self).__init__(parent, title=title)
self.InitUI()
self.Centre()
self.Show()
def InitUI(self):
panel = wx.Panel(self)
hbox = wx.BoxSizer(wx.HORIZONTAL)
fgs = wx.FlexGridSizer(3, 2, 9, 25)
title = wx.StaticText(panel, label="Title")
author = wx.StaticText(panel, label="Author")
review = wx.StaticText(panel, label="Review")
tc1 = wx.TextCtrl(panel)
tc2 = wx.TextCtrl(panel)
tc3 = wx.TextCtrl(panel, style=wx.TE_MULTILINE)
fgs.AddMany([(title), (tc1, 1, wx.EXPAND), (author),
(tc2, 1, wx.EXPAND), (review, 1, wx.EXPAND), (tc3, 1, wx.EXPAND)])
fgs.AddGrowableRow(2, 1)
fgs.AddGrowableCol(1, 1)
hbox.Add(fgs, proportion=1, flag=wx.ALL|wx.EXPAND, border=15)
panel.SetSizer(hbox)
def main():
app = wx.App()
ex = Example(None, title='Review')
ex.Show()
app.MainLoop()
if __name__ == '__main__':
main()
| 20.819672 | 78 | 0.60315 |
df883b2d7ecc94a0676b79f0dc1bf9f224c207ae | 1,042 | py | Python | python_program/cball2.py | LiuKaiqiang94/PyStudyExample | b30212718b218c71e06b68677f55c33e3a1dbf46 | [
"MIT"
] | 5 | 2018-09-10T02:52:35.000Z | 2018-09-20T07:50:42.000Z | python_program/cball2.py | LiuKaiqiang94/PyStudyExample | b30212718b218c71e06b68677f55c33e3a1dbf46 | [
"MIT"
] | null | null | null | python_program/cball2.py | LiuKaiqiang94/PyStudyExample | b30212718b218c71e06b68677f55c33e3a1dbf46 | [
"MIT"
] | null | null | null | #炮弹问题,面向对象方案
from math import sin,cos,radians
class Projectile:
def __init__(self,angle,velocity,height):
self.xpos=0.0
self.ypos=height
theta=radians(angle)
self.xvel=velocity * cos(theta)
self.yvel=velocity * sin(theta)
def update(self,time):
self.xpos=self.xpos+time*self.xvel
yvel1=self.yvel-9.8*time
self.ypos=self.ypos+time*(self.yvel+yvel1)/2.0
self.yvel=yvel1
def getY(self):
return self.ypos
def getX(self):
return self.xpos
def getInputs():
a = float(input("Enter the launch angle (in degrees):"))
v = float(input("Enter the initial velocity (in meters/sec):"))
h = float(input("Enter the initial height (in meters):"))
t = float(input("Enter the initial between position calculations:"))
return a,v,h,t
def main():
angle,vel,h0,time=getInputs()
cball=Projectile(angle,vel,h0)
while cball.getY()>=0:
cball.update(time)
print("Distance traveled:{0:0.1f}".format(cball.getX()))
| 26.717949 | 72 | 0.634357 |
2a3b4d920c1db65e3f6ecffe3a7f2284610a1947 | 4,991 | py | Python | mce/spec.py | mvcisback/mce-spec-inference | 58432b35e35b75cab1c77cbbe2057aff94794597 | [
"MIT"
] | null | null | null | mce/spec.py | mvcisback/mce-spec-inference | 58432b35e35b75cab1c77cbbe2057aff94794597 | [
"MIT"
] | null | null | null | mce/spec.py | mvcisback/mce-spec-inference | 58432b35e35b75cab1c77cbbe2057aff94794597 | [
"MIT"
] | null | null | null | __all__ = ["ConcreteSpec", "concretize"]
from functools import lru_cache
from typing import FrozenSet, Sequence, Mapping
import attr
import aiger_bdd
import aiger_bv as BV
import aiger_coins as C
import funcy as fn
from aiger_bv.bundle import BundleMap
from bdd2dfa.b2d import to_dfa, BNode
from bidict import bidict
from dfa import DFA
from mce.preimage import preimage
from mce.order import BitOrder
from mce.bdd import to_bdd2
from mce.utils import TIMED_INPUT_MATCHER
Action = Mapping[str, Sequence[bool]]
Actions = Sequence[Action]
Bits = Sequence[bool]
def xor(x, y):
return (x | y) & ~(x & y)
@attr.s(frozen=True, auto_attribs=True, eq=False)
class ConcreteSpec:
"""
Models an concrete specification over sequences of
system/environment action pairs encoded as bitvectors.
"""
bexpr: "BDD" # noqa: F821
order: BitOrder # TODO: Make this a derived quantity.
mdp: C.MDP
sys_inputs: FrozenSet[str] = attr.ib(converter=frozenset)
@property
def dyn(self) -> BV.AIGBV:
return self.mdp.aigbv
@property
def imap(self) -> BundleMap:
"""System input map."""
return self.dyn.imap.project(self.sys_inputs)
@property
def emap(self) -> BundleMap:
"""Environment input map."""
return self.dyn.imap.omit(self.sys_inputs)
@property
def horizon(self) -> int:
return self.order.horizon
@property
def manager(self):
return self.bexpr.bdd
def flatten(self, actions: Actions) -> Bits:
"""
Converts structured sequence of (sys, env) actions to a
sequence of bits that this concrete specification recognizes.
"""
timed_actions = {}
bmap = self.imap + self.emap
for t, action in enumerate(actions):
old2new = {k: f'{k}##time_{t}' for k in bmap.keys()}
bmap_t = bmap.relabel(old2new)
action_t = fn.walk_keys(old2new.get, action)
timed_actions.update(bmap_t.blast(action_t))
idx2key = bidict(self.bexpr.bdd.vars).inv
return [timed_actions[idx2key[i]] for i in range(len(idx2key))]
def unflatten(self, bits: Bits) -> Actions:
"""
Take a sequence of bits and group into bitvector inputs for
dynamics circuit.
"""
assert (len(bits) % self.order.total_bits) == 0
return list(self._unflatten(bits))
def _unflatten(self, bits: Bits):
size = self.order.total_bits
for i, chunk in enumerate(fn.chunks(self.order.total_bits, bits)):
mapping = {}
for j, bit in enumerate(chunk):
lvl = i*size + j
var = self.bexpr.bdd.var_at_level(lvl)
name, _, idx = TIMED_INPUT_MATCHER.match(var).groups()
mapping[f'{name}[{idx}]'] = bit
yield self.dyn.imap.unblast(mapping)
def accepts(self, actions: Actions) -> bool:
"""Does this spec accept the given sequence of (sys, env) actions."""
flattened = self.flatten(actions)
assert len(flattened) == self.order.horizon * self.order.total_bits
return self._as_dfa(qdd=True).label(flattened)
def toggle(self, actions: Actions):
"""Toggles a sequence of (sys, env) actions."""
assert len(actions) == self.horizon
aps = fn.lpluck(0, self.dyn.simulate(actions))
expr = preimage(aps=aps, mdp=self.dyn)
bexpr, *_ = aiger_bdd.to_bdd(
expr, manager=self.manager, renamer=lambda _, x: x
)
return attr.evolve(self, bexpr=xor(self.bexpr, bexpr))
@lru_cache
def _unrolled(self) -> BV.AIGBV:
return self.dyn.unroll(self.horizon)
@lru_cache
def _as_dfa(self, qdd=False) -> DFA:
"""
Returns a dfa with binary alphabet which models the
ConcreteSpecification with the order given by self.bexpr.
"""
return to_dfa(self.bexpr, qdd=qdd)
def abstract_trace(self, actions: Actions) -> Sequence[BNode]:
"""Path a set of (sys, env) actions takes through BDD."""
return self._as_dfa(qdd=True).trace(self.flatten(actions))
def concretize(
monitor, sys: C.MDP, horizon: int, manager=None
) -> ConcreteSpec:
"""
Convert an abstract specification monitor and a i/o transition
system into a concrete specification over the horizion.
"""
# Make format correct.
if not isinstance(monitor, C.MDP):
assert hasattr(monitor, 'aig') or hasattr(monitor, 'aigbv')
if hasattr(monitor, 'aigbv'):
monitor = monitor.aigbv
else:
monitor = BV.aig2aigbv(monitor.aig)
monitor = C.MDP(monitor)
# Remove ignored outputs of sys.
for sym in (monitor.inputs ^ sys.outputs):
size = sys._aigbv.omap[sym].size
monitor >>= C.MDP(BV.sink(size, [sym]))
bexpr, manager, order = to_bdd2(sys >> monitor, horizon)
return ConcreteSpec(bexpr, order, sys_inputs=sys.inputs, mdp=sys)
| 31.789809 | 77 | 0.633741 |
c460c05b8a1a5cbd5eb6e559ea30e2c7b9fa5a66 | 368 | py | Python | idb/common/video.py | amonshiz/idb | 3a5ef5c76966657c617133dd1f5d58db99d15d89 | [
"MIT"
] | 1 | 2021-03-09T07:29:18.000Z | 2021-03-09T07:29:18.000Z | idb/common/video.py | amonshiz/idb | 3a5ef5c76966657c617133dd1f5d58db99d15d89 | [
"MIT"
] | 6 | 2021-05-10T08:32:56.000Z | 2022-02-26T01:41:09.000Z | idb/common/video.py | fakeNetflix/facebook-repo-idb | eb4ed5a7dc4a14b224a22e833294d7366fe4725e | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import AsyncIterator
from idb.grpc.idb_pb2 import RecordResponse
async def generate_video_bytes(
stream: AsyncIterator[RecordResponse],
) -> AsyncIterator[bytes]:
async for response in stream:
data = response.payload.data
yield data
| 24.533333 | 71 | 0.73913 |
fb454bde1ed50353b461c53016515cc29fbdfe02 | 1,309 | py | Python | _app/text_file.py | Wilfongjt/lb-data | eca16bcec6cae5822146dfce8ea56e5f533c7f87 | [
"MIT"
] | null | null | null | _app/text_file.py | Wilfongjt/lb-data | eca16bcec6cae5822146dfce8ea56e5f533c7f87 | [
"MIT"
] | 6 | 2020-04-15T10:15:30.000Z | 2020-07-10T16:12:42.000Z | _app/text_file.py | Wilfongjt/lb-data | eca16bcec6cae5822146dfce8ea56e5f533c7f87 | [
"MIT"
] | null | null | null | from file import ListFile
class TextFile(ListFile):
def __init__(self, folder, filename):
super().__init__(folder, filename)
def main():
from pathlib import Path
from util import Util
import os
from app_settings import AppSettings, AppSettingsTest
os.environ['LB-TESTING'] = '1'
appSettings = AppSettingsTest()
foldername = appSettings.getFolder('temp-folder')
filename = 'test.tmpl'
filename2 = 'test2.tmpl'
textFile = TextFile(foldername, filename)
textFile.append('line A')
textFile.append('line B')
textFile.append('line C')
assert textFile[0] == 'line A'
assert textFile[1] == 'line B'
assert textFile[2] == 'line C'
assert textFile == ['line A', 'line B', 'line C']
textFile.write() # write orignial
assert textFile.exists()
textFile.copy(foldername, filename2) # copy
textFile2 = TextFile(foldername, filename2).read() # read
assert textFile2 == ['line A', 'line B', 'line C']
# cleanup
textFile.delete() # delete original
assert not textFile.exists()
textFile2.delete() # delete copy
assert not textFile2.exists()
if __name__ == "__main__":
main() | 28.456522 | 73 | 0.59893 |
24f798fa0b34bd8ceb380a9d2c30a42982d1e811 | 1,420 | py | Python | qa/rpc-tests/reindex.py | sibbstar/Money | f471093d48fb938787fc9aa1a4a744881b9e5ea8 | [
"MIT"
] | null | null | null | qa/rpc-tests/reindex.py | sibbstar/Money | f471093d48fb938787fc9aa1a4a744881b9e5ea8 | [
"MIT"
] | null | null | null | qa/rpc-tests/reindex.py | sibbstar/Money | f471093d48fb938787fc9aa1a4a744881b9e5ea8 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test -reindex and -reindex-chainstate with CheckBlockIndex
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import os.path
import time
class ReindexTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 1
def setup_network(self):
self.nodes = []
self.is_network_split = False
self.nodes.append(start_node(0, self.options.tmpdir))
def reindex(self, justchainstate=False):
self.nodes[0].generate(3)
blockcount = self.nodes[0].getblockcount()
stop_node(self.nodes[0], 0)
wait_bitcoinds()
self.nodes[0]=start_node(0, self.options.tmpdir, ["-debug", "-reindex-chainstate" if justchainstate else "-reindex", "-checkblockindex=1"])
while self.nodes[0].getblockcount() < blockcount:
time.sleep(0.1)
assert_equal(self.nodes[0].getblockcount(), blockcount)
print("Success")
def run_test(self):
self.reindex(False)
self.reindex(True)
self.reindex(False)
self.reindex(True)
if __name__ == '__main__':
ReindexTest().main()
| 31.555556 | 147 | 0.678169 |
82242517f2120d205b3fb5939942035ec8f92ee4 | 6,296 | py | Python | mogptk/models/sm_lmc.py | vishalbelsare/mogptk | 4f7001fbfacea778bd62a1e4e6c5b404c473e313 | [
"MIT"
] | null | null | null | mogptk/models/sm_lmc.py | vishalbelsare/mogptk | 4f7001fbfacea778bd62a1e4e6c5b404c473e313 | [
"MIT"
] | null | null | null | mogptk/models/sm_lmc.py | vishalbelsare/mogptk | 4f7001fbfacea778bd62a1e4e6c5b404c473e313 | [
"MIT"
] | null | null | null | import numpy as np
from ..dataset import DataSet
from ..model import Model, Exact, logger
from ..gpr import LinearModelOfCoregionalizationKernel, SpectralKernel
class SM_LMC(Model):
"""
Spectral Mixture Linear Model of Coregionalization kernel with `Q` components and `Rq` latent functions. The SM kernel as proposed by [1] is combined with the LMC kernel as proposed by [2]. The parameters will be randomly instantiated, use `init_parameters()` to initialize the parameters to reasonable values for the current data set.
Args:
dataset (mogptk.dataset.DataSet): `DataSet` object of data for all channels.
Q (int): Number of components.
Rq (int): Number of subcomponents.
model: Gaussian process model to use, such as `mogptk.model.Exact`.
mean (mogptk.gpr.mean.Mean): The mean class.
name (str): Name of the model.
rescale_x (bool): Rescale the X axis to [0,1000] to help training.
Attributes:
dataset: The associated mogptk.dataset.DataSet.
model: The mogptk.gpr.model.Model.
kernel: The mogptk.gpr.kernel.Kernel.
Examples:
>>> import numpy as np
>>> import mogptk
>>>
>>> t = np.linspace(0, 10, 100)
>>> y1 = np.sin(0.5 * t)
>>> y2 = 2.0 * np.sin(0.2 * t)
>>>
>>> dataset = mogptk.DataSet(t, [y1, y2])
>>> model = mogptk.SM_LMC(dataset, Q=2)
>>> model.init_parameters()
>>> model.train()
>>> model.predict()
>>> dataset.plot()
[1] A.G. Wilson and R.P. Adams, "Gaussian Process Kernels for Pattern Discovery and Extrapolation", International Conference on Machine Learning 30, 2013\
[2] P. Goovaerts, "Geostatistics for Natural Resource Evaluation", Oxford University Press, 1997
"""
def __init__(self, dataset, Q=1, Rq=1, inference=Exact(), mean=None, name="SM-LMC", rescale_x=True):
if not isinstance(dataset, DataSet):
dataset = DataSet(dataset)
spectral = SpectralKernel(dataset.get_input_dims()[0])
kernel = LinearModelOfCoregionalizationKernel(
spectral,
output_dims=dataset.get_output_dims(),
input_dims=dataset.get_input_dims()[0],
Q=Q,
Rq=Rq)
super().__init__(dataset, kernel, inference, mean, name, rescale_x)
self.Q = Q
self.Rq = Rq
nyquist = np.amin(self.dataset.get_nyquist_estimation(), axis=0)
for q in range(Q):
self.model.kernel[q].weight.assign(1.0, trainable=False) # handled by LMCKernel
self.model.kernel[q].mean.assign(upper=nyquist)
def init_parameters(self, method='BNSE', sm_init='BNSE', sm_method='Adam', sm_iters=100, sm_params={}, sm_plot=False):
"""
Estimate kernel parameters from the data set. The initialization can be done using three methods:
- BNSE estimates the PSD via Bayesian non-parametris spectral estimation (Tobar 2018) and then selecting the greater Q peaks in the estimated spectrum, and use the peak's position, magnitude and width to initialize the mean, magnitude and variance of the kernel respectively.
- LS is similar to BNSE but uses Lomb-Scargle to estimate the spectrum, which is much faster but may give poorer results.
- SM fits independent Gaussian processes for each channel, each one with a spectral mixture kernel, and uses the fitted parameters as initial values for the multi-output kernel.
In all cases the noise is initialized with 1/30 of the variance of each channel.
Args:
method (str): Method of estimation, such as BNSE, LS, or SM.
sm_init (str): Parameter initialization strategy for SM initialization.
sm_method (str): Optimization method for SM initialization.
sm_iters (str): Number of iterations for SM initialization.
sm_params (object): Additional parameters for PyTorch optimizer for SM initialization.
sm_plot (bool): Show the PSD of the kernel after fitting for SM initialization.
"""
output_dims = self.dataset.get_output_dims()
if not method.lower() in ['bnse', 'ls', 'sm']:
raise ValueError("valid methods of estimation are BNSE, LS, and SM")
if method.lower() == 'bnse':
amplitudes, means, variances = self.dataset.get_bnse_estimation(self.Q)
elif method.lower() == 'ls':
amplitudes, means, variances = self.dataset.get_lombscargle_estimation(self.Q)
else:
amplitudes, means, variances = self.dataset.get_sm_estimation(self.Q, method=sm_init, optimizer=sm_method, iters=sm_iters, params=sm_params, plot=sm_plot)
if len(amplitudes) == 0:
logger.warning('{} could not find peaks for SM-LMC'.format(method))
return
# flatten output_dims and mixtures
channels = [channel for channel, amplitude in enumerate(amplitudes) for q in range(amplitude.shape[0])]
amplitudes = [amplitude[q,:] for amplitude in amplitudes for q in range(amplitude.shape[0])]
means = [mean[q,:] for mean in means for q in range(mean.shape[0])]
variances = [variance[q,:] for variance in variances for q in range(variance.shape[0])]
idx = np.argsort([amplitude.mean() for amplitude in amplitudes])[::-1]
constant = np.random.random((output_dims, self.Q, self.Rq))
for q in range(len(idx)):
i = idx[q]
channel = channels[i]
constant[channel,q % self.Q,:] = amplitudes[i].mean()
if q < self.Q:
self.model.kernel[q].mean.assign(means[i])
self.model.kernel[q].variance.assign(variances[i] * 2.0)
# normalize proportional to channel variance
for i, channel in enumerate(self.dataset):
_, y = channel.get_train_data(transformed=True)
if 0.0 < constant[i,:,:].sum():
constant[i,:,:] = constant[i,:,:] / constant[i,:,:].sum() * y.var() * 2
self.model.kernel.weight.assign(constant)
noise = np.empty((output_dims,))
for i, channel in enumerate(self.dataset):
_, y = channel.get_train_data(transformed=True)
noise[i] = y.var() / 30.0
self.model.kernel.noise.assign(noise)
| 49.968254 | 339 | 0.647078 |
547b9cffa039dbfb9483af5aba073c580085f598 | 2,466 | py | Python | MyGPyOpt/testing/functional_tests/test_duplicate_manager.py | Paul1298/GPyOpt | d26bd4fb57a0b83a5cdaa0e7e97aa3692eb31187 | [
"BSD-3-Clause"
] | null | null | null | MyGPyOpt/testing/functional_tests/test_duplicate_manager.py | Paul1298/GPyOpt | d26bd4fb57a0b83a5cdaa0e7e97aa3692eb31187 | [
"BSD-3-Clause"
] | null | null | null | MyGPyOpt/testing/functional_tests/test_duplicate_manager.py | Paul1298/GPyOpt | d26bd4fb57a0b83a5cdaa0e7e97aa3692eb31187 | [
"BSD-3-Clause"
] | 1 | 2021-03-22T17:21:15.000Z | 2021-03-22T17:21:15.000Z | import unittest
import numpy as np
from MyGPyOpt.core.task.space import Design_space
from MyGPyOpt.experiment_design import initial_design
from MyGPyOpt.util.duplicate_manager import DuplicateManager
class TestDuplicateManager(unittest.TestCase):
def test_duplicate(self):
space = [
{'name': 'var_1', 'type': 'continuous', 'domain':(-3,1), 'dimensionality': 1},
{'name': 'var_2', 'type': 'discrete', 'domain': (0,1,2,3)},
{'name': 'var_3', 'type': 'categorical', 'domain': (0, 1)}
]
design_space = Design_space(space)
np.random.seed(666)
number_points = 5
zipped_X = initial_design("random",design_space,number_points)
d = DuplicateManager(design_space, zipped_X)
duplicate = np.atleast_2d(zipped_X[0,:].copy())
assert d.is_zipped_x_duplicate(duplicate)
assert d.is_unzipped_x_duplicate(design_space.unzip_inputs(duplicate))
non_duplicate = np.array([[-2.5, 2., 0.]])
for x in zipped_X:
assert not np.all(non_duplicate==x)
assert not d.is_zipped_x_duplicate(non_duplicate)
assert not d.is_unzipped_x_duplicate(design_space.unzip_inputs(non_duplicate))
def test_duplicate_with_ignored_and_pending(self):
space = [
{'name': 'var_1', 'type': 'continuous', 'domain':(-3,1), 'dimensionality': 1},
{'name': 'var_2', 'type': 'discrete', 'domain': (0,1,2,3)},
{'name': 'var_3', 'type': 'categorical', 'domain': (0, 1)}
]
design_space = Design_space(space)
np.random.seed(666)
number_points = 5
zipped_X = initial_design("random",design_space,number_points)
pending_zipped_X = initial_design("random", design_space, number_points)
ignored_zipped_X = initial_design("random", design_space, number_points)
d = DuplicateManager(design_space, zipped_X, pending_zipped_X, ignored_zipped_X)
duplicate_in_pending_state = np.atleast_2d(pending_zipped_X[0,:].copy())
assert d.is_zipped_x_duplicate(duplicate_in_pending_state)
assert d.is_unzipped_x_duplicate(design_space.unzip_inputs(duplicate_in_pending_state))
duplicate_in_ignored_state = np.atleast_2d(ignored_zipped_X[0,:].copy())
assert d.is_zipped_x_duplicate(duplicate_in_ignored_state)
assert d.is_unzipped_x_duplicate(design_space.unzip_inputs(duplicate_in_ignored_state))
| 35.228571 | 95 | 0.673966 |
8bafecc928b7d33f3fb71a8774d8d7c73e417453 | 739 | py | Python | SnakeBot/basic/code.py | gamblor21/Adafruit_Learning_System_Guides | f5dab4a758bc82d0bfc3c299683fe89dc093912a | [
"MIT"
] | 665 | 2017-09-27T21:20:14.000Z | 2022-03-31T09:09:25.000Z | SnakeBot/basic/code.py | gamblor21/Adafruit_Learning_System_Guides | f5dab4a758bc82d0bfc3c299683fe89dc093912a | [
"MIT"
] | 641 | 2017-10-03T19:46:37.000Z | 2022-03-30T18:28:46.000Z | SnakeBot/basic/code.py | gamblor21/Adafruit_Learning_System_Guides | f5dab4a758bc82d0bfc3c299683fe89dc093912a | [
"MIT"
] | 734 | 2017-10-02T22:47:38.000Z | 2022-03-30T14:03:51.000Z | import time
from adafruit_crickit import crickit
ss = crickit.seesaw
left_wheel = crickit.dc_motor_1
right_wheel = crickit.dc_motor_2
# These allow easy correction for motor speed variation.
# Factors are determined by observation and fiddling.
# Start with both having a factor of 1.0 (i.e. none) and
# adjust until the bot goes more or less straight
def set_right(speed):
right_wheel.throttle = speed * 0.9
def set_left(speed):
left_wheel.throttle = speed
# Uncomment this to find the above factors
# set_right(1.0)
# set_left(1.0)
# while True:
# pass
while True:
# tack left
set_left(0.25)
set_right(1.0)
time.sleep(0.75)
# tack right
set_left(1.0)
set_right(0.25)
time.sleep(0.75)
| 19.972973 | 56 | 0.710419 |
941779ad9a9f38fe024c2598994b1b324e89a61c | 1,857 | py | Python | lib/lambdascrapers/modules/js2py/pyjs.py | proxium/script.module.lambdascrapers | f96ad4c7c44c011c9d0007a83edde8c4797e0e2f | [
"Beerware"
] | 11 | 2018-12-21T22:52:37.000Z | 2021-09-02T02:13:50.000Z | lib/lambdascrapers/modules/js2py/pyjs.py | proxium/script.module.lambdascrapers | f96ad4c7c44c011c9d0007a83edde8c4797e0e2f | [
"Beerware"
] | null | null | null | lib/lambdascrapers/modules/js2py/pyjs.py | proxium/script.module.lambdascrapers | f96ad4c7c44c011c9d0007a83edde8c4797e0e2f | [
"Beerware"
] | 2 | 2020-04-01T22:11:12.000Z | 2020-05-07T23:54:52.000Z | from .base import *
from .constructors.jsmath import Math
from .constructors.jsdate import Date
from .constructors.jsobject import Object
from .constructors.jsfunction import Function
from .constructors.jsstring import String
from .constructors.jsnumber import Number
from .constructors.jsboolean import Boolean
from .constructors.jsregexp import RegExp
from .constructors.jsarray import Array
from .prototypes.jsjson import JSON
from .host.console import console
from .host.jseval import Eval
from .host.jsfunctions import parseFloat, parseInt, isFinite, isNaN
# Now we have all the necessary items to create global environment for script
__all__ = ['Js', 'PyJsComma', 'PyJsStrictEq', 'PyJsStrictNeq',
'PyJsException', 'PyJsBshift', 'Scope', 'PyExceptionToJs',
'JsToPyException', 'JS_BUILTINS', 'appengine', 'set_global_object',
'JsRegExp', 'PyJsException', 'PyExceptionToJs', 'JsToPyException', 'PyJsSwitchException']
# these were defined in base.py
builtins = ('true','false','null','undefined','Infinity',
'NaN', 'console', 'String', 'Number', 'Boolean', 'RegExp',
'Math', 'Date', 'Object', 'Function', 'Array',
'parseFloat', 'parseInt', 'isFinite', 'isNaN')
#Array, Function, JSON, Error is done later :)
# also some built in functions like eval...
def set_global_object(obj):
obj.IS_CHILD_SCOPE = False
this = This({})
this.own = obj.own
this.prototype = obj.prototype
PyJs.GlobalObject = this
# make this available
obj.register('this')
obj.put('this', this)
scope = dict(zip(builtins, [globals()[e] for e in builtins]))
# Now add errors:
for name, error in ERRORS.items():
scope[name] = error
#add eval
scope['eval'] = Eval
scope['JSON'] = JSON
JS_BUILTINS = {}
for k,v in scope.iteritems():
JS_BUILTINS[k]=v
| 34.388889 | 100 | 0.695207 |
f5e95bd7706f36d0e04f9f5e3c3f3254d5e28b53 | 1,080 | py | Python | prepare/yt.py | osfans/MCPDict | 89b9dfdb4964ae9979b4a8d26f1494c0f25b3534 | [
"MIT"
] | 40 | 2020-10-16T05:32:58.000Z | 2022-03-07T16:06:46.000Z | prepare/yt.py | osfans/MCPDict | 89b9dfdb4964ae9979b4a8d26f1494c0f25b3534 | [
"MIT"
] | 5 | 2021-10-03T22:35:01.000Z | 2022-03-07T05:47:59.000Z | prepare/yt.py | osfans/MCPDict | 89b9dfdb4964ae9979b4a8d26f1494c0f25b3534 | [
"MIT"
] | 7 | 2020-09-28T14:46:48.000Z | 2022-03-20T09:27:22.000Z | #!/usr/bin/env python3
from openpyxl import load_workbook
from collections import defaultdict
import os.path
def get_dict():
yt = dict()
if os.path.exists("yt.tsv"):
f = open("yt.tsv")
for line in f:
fs = line.strip().split("\t")
yt[int(fs[0])] = fs[1]
f.close()
else:
for sheet in load_workbook("韵图音系同音字表.xlsx"):
for row in sheet.rows:
y = row[0].value
for cell in row[1:]:
if v := cell.value:
if type(v) is float:
yt[int(v)]=y
elif type(v) is str and "#" in v:
for i in v.split("#"):
yt[int(i)]=y
f = open("yt.tsv", "w")
for i in sorted(yt.keys()):
y = yt[i].replace('g', 'ɡ')
t = y[-1]
if not t.isdigit(): y = y + "4"
print("%s\t%s" %(i,y),file=f)
f.close()
pq = dict()
for line in open("PrengQim.txt"):
line = line.strip()
if line.startswith("#"): continue
fs = line.split(" ")
pq[fs[0]] = yt[int(fs[0])]
d=defaultdict(list)
for line in open("Dzih.txt"):
line = line.strip()
fs = line.split(" ")
hz = fs[0]
if len(hz) == 1:
d[hz].append(yt[int(fs[1])])
return d
| 22.040816 | 46 | 0.557407 |
ee49edff1ed84638c98a19f61619a5cef872d1e1 | 1,091 | py | Python | tests/unit/h_api/bulk_api/executor_test.py | hypothesis/h-api | 9e8b6a46abdae796241c61e41ad02b695446dc00 | [
"BSD-2-Clause"
] | null | null | null | tests/unit/h_api/bulk_api/executor_test.py | hypothesis/h-api | 9e8b6a46abdae796241c61e41ad02b695446dc00 | [
"BSD-2-Clause"
] | 7 | 2020-04-16T12:58:42.000Z | 2021-05-11T08:13:30.000Z | tests/unit/h_api/bulk_api/executor_test.py | hypothesis/h-api | 9e8b6a46abdae796241c61e41ad02b695446dc00 | [
"BSD-2-Clause"
] | 1 | 2020-05-28T16:31:09.000Z | 2020-05-28T16:31:09.000Z | from unittest.mock import sentinel
import pytest
from h_matchers import Any
from h_api.bulk_api.executor import AutomaticReportExecutor
from h_api.bulk_api.model.report import Report
from h_api.enums import CommandType
class TestAutomaticReportExecutor:
@pytest.mark.parametrize(
"command_type",
(CommandType.CREATE, CommandType.UPSERT),
)
def test_execute_batch_returns_an_appropriate_type(self, command_type):
results = AutomaticReportExecutor().execute_batch(
command_type, sentinel.data_type, sentinel.config, batch=[sentinel.command]
)
assert results == [Any.instance_of(Report)]
def test_execute_batch_generates_fake_ids(self):
results = AutomaticReportExecutor().execute_batch(
sentinel.command_type,
sentinel.data_type,
sentinel.config,
batch=[sentinel.command, sentinel.command, sentinel.command],
)
assert results == Any.list.comprised_of(Any.instance_of(Report)).of_size(3)
assert len({report.id for report in results}) == 3
| 33.060606 | 87 | 0.712191 |
e575ca0a2b93c20ff9c039826bdc2f3ddeba4091 | 10,314 | py | Python | lib/filetype_unittest.py | khromiumos/chromiumos-chromite | a42a85481cdd9d635dc40a04585e427f89f3bb3f | [
"BSD-3-Clause"
] | null | null | null | lib/filetype_unittest.py | khromiumos/chromiumos-chromite | a42a85481cdd9d635dc40a04585e427f89f3bb3f | [
"BSD-3-Clause"
] | 2 | 2021-03-26T00:29:32.000Z | 2021-04-30T21:29:33.000Z | lib/filetype_unittest.py | khromiumos/chromiumos-chromite | a42a85481cdd9d635dc40a04585e427f89f3bb3f | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2014 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unittests for the filetype.py module."""
from __future__ import print_function
import os
import stat
import sys
from chromite.lib import cros_test_lib
from chromite.lib import osutils
from chromite.lib import filetype
from chromite.lib import unittest_lib
pytestmark = cros_test_lib.pytestmark_inside_only
assert sys.version_info >= (3, 6), 'This module requires Python 3.6+'
class SplitShebangTest(cros_test_lib.TestCase):
"""Test the SplitShebang function."""
def testSimpleCase(self):
"""Test a simple case."""
self.assertEqual(('/bin/sh', ''), filetype.SplitShebang('#!/bin/sh'))
def testCaseWithArguments(self):
"""Test a case with arguments."""
self.assertEqual(('/bin/sh', '-i -c "ls"'),
filetype.SplitShebang('#!/bin/sh -i -c "ls"'))
def testCaseWithEndline(self):
"""Test a case finished with a newline char."""
self.assertEqual(('/bin/sh', '-i'),
filetype.SplitShebang('#!/bin/sh -i\n'))
def testCaseWithSpaces(self):
"""Test a case with several spaces in the line."""
self.assertEqual(('/bin/sh', '-i'),
filetype.SplitShebang('#! /bin/sh -i \n'))
def testValidBytes(self):
"""Test bytes inputs."""
self.assertEqual(('/foo', '-v'), filetype.SplitShebang(b'#!/foo -v'))
def testInvalidBytes(self):
"""Test bytes input but not valid UTF-8."""
self.assertRaises(ValueError, filetype.SplitShebang, b'#!/fo\xff')
def testInvalidCases(self):
"""Thes invalid cases."""
self.assertRaises(ValueError, filetype.SplitShebang, '/bin/sh -i')
self.assertRaises(ValueError, filetype.SplitShebang, '#!')
self.assertRaises(ValueError, filetype.SplitShebang, '#!env python')
class FileTypeDecoderTest(cros_test_lib.TempDirTestCase):
"""Test the FileTypeDecoder class."""
def testSpecialFiles(self):
"""Tests special files, such as symlinks, directories and named pipes."""
somedir = os.path.join(self.tempdir, 'somedir')
osutils.SafeMakedirs(somedir)
self.assertEqual('inode/directory',
filetype.FileTypeDecoder.DecodeFile(somedir))
a_fifo = os.path.join(self.tempdir, 'a_fifo')
os.mknod(a_fifo, stat.S_IFIFO)
self.assertEqual('inode/special',
filetype.FileTypeDecoder.DecodeFile(a_fifo))
empty_file = os.path.join(self.tempdir, 'empty_file')
osutils.WriteFile(empty_file, '')
self.assertEqual('inode/empty',
filetype.FileTypeDecoder.DecodeFile(empty_file))
a_link = os.path.join(self.tempdir, 'a_link')
os.symlink('somewhere', a_link)
self.assertEqual('inode/symlink',
filetype.FileTypeDecoder.DecodeFile(a_link))
def testTextShebangFiles(self):
"""Test shebangs (#!) file decoding based on the executed path."""
# If the file has only one line is considered a "shebang" rather than a
# script.
shebang = os.path.join(self.tempdir, 'shebang')
osutils.WriteFile(shebang, '#!/bin/python --foo --bar\n')
self.assertEqual('text/shebang',
filetype.FileTypeDecoder.DecodeFile(shebang))
# A shebang with contents is considered a script.
script = os.path.join(self.tempdir, 'script')
osutils.WriteFile(script, '#!/bin/foobar --foo --bar\n\nexit 1\n')
self.assertEqual('text/script',
filetype.FileTypeDecoder.DecodeFile(script))
bash_script = os.path.join(self.tempdir, 'bash_script')
osutils.WriteFile(bash_script,
'#!/bin/bash --debug\n# Copyright\nexit 42\n')
self.assertEqual('text/script/bash',
filetype.FileTypeDecoder.DecodeFile(bash_script))
pyscript = os.path.join(self.tempdir, 'pyscript')
osutils.WriteFile(pyscript,
'#!/usr/bin/env PYTHONPATH=/foo python-2.7 -3\n# foo\n')
self.assertEqual('text/script/python',
filetype.FileTypeDecoder.DecodeFile(pyscript))
perlscript = os.path.join(self.tempdir, 'perlscript')
osutils.WriteFile(perlscript, '#!/usr/local/bin/perl\n#\n')
self.assertEqual('text/script/perl',
filetype.FileTypeDecoder.DecodeFile(perlscript))
def testTextPEMFiles(self):
"""Test decoding various PEM files."""
# A RSA private key (sample from update_engine unittest).
some_cert = os.path.join(self.tempdir, 'some_cert')
osutils.WriteFile(some_cert,
"""-----BEGIN CERTIFICATE-----
MIIDJTCCAo6gAwIBAgIJAP6IycaMXlqsMA0GCSqGSIb3DQEBBQUAMIGLMQswCQYD
VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTETMBEGA1UEChMKR29vZ2xlIElu
YzESMBAGA1UECxMJQ2hyb21lIE9TMRgwFgYDVQQDEw9PcGVuU1NMIFRlc3QgQ0Ex
JDAiBgkqhkiG9w0BCQEWFXNlY3VyaXR5QGNocm9taXVtLm9yZzAgFw0xMjA1MTcx
OTQ1MjJaGA8yMTEyMDExNDE5NDUyMlowgZ0xCzAJBgNVBAYTAlVTMRMwEQYDVQQI
EwpDYWxpZm9ybmlhMRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MRMwEQYDVQQKEwpH
b29nbGUgSW5jMRIwEAYDVQQLEwlDaHJvbWUgT1MxEjAQBgNVBAMTCTEyNy4wLjAu
MTEkMCIGCSqGSIb3DQEJARYVc2VjdXJpdHlAY2hyb21pdW0ub3JnMIGfMA0GCSqG
SIb3DQEBAQUAA4GNADCBiQKBgQC5bxzyvNJFDmyThIGoFoZkN3rlQB8QoR80rS1u
8pLyqW5Vk2A0pNOvcxPrUHAUTgWhikqzymz4a4XoLxat53H/t/XmRYwZ9GVNZocz
Q4naWxtPyPqIBosMLnWu6FHUVO1lTdvhC6Pjw2i1S9Rq3dMsANU1IER4NR8XM+v6
qBg1XQIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVuU1NM
IEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQU+biqy5tbRGlUNLsEtjmy
7v1eYqowHwYDVR0jBBgwFoAUC0E889mD7bn2fXcEtA3HvUimV+0wDQYJKoZIhvcN
AQEFBQADgYEA2fJxpGwDbnUF5Z3mvZ81/pk8fVQdJvV5U93EA29VE1SaFA5S5qYS
zP1Ue0MX+RqMLKjnH+E6yEoo+kYD9rzagnvORefbJeM92SiHgHPeSm8F1nQtGclj
p8izLBlcKgPHwQLKxELmbS/xvt4cyHaLSIy50lLrdJeKtXjqq4PbH3Y=
-----END CERTIFICATE-----
""")
self.assertEqual('text/pem/cert',
filetype.FileTypeDecoder.DecodeFile(some_cert))
# A RSA private key (sample from vboot_reference unittest).
rsa_key = os.path.join(self.tempdir, 'rsa_key')
osutils.WriteFile(rsa_key,
"""-----BEGIN RSA PRIVATE KEY-----
MIICXAIBAAKBgQCdYBOJIJvGX9vC4E5XD1jb9zJ99FzR4G0n8HNyWy5ZKyy/hi80
ibXpy6QdWcm4wqTvmVjU+20sP4AgzKC65fKyFvvAHUiD4yGr1qWtg4YFUcBbUiXO
CQ66W3AC4g2Ju9C16AzMpBk043bQsUQvxILEumQqQ1VS33uM7Kq8dWpL6QIDAQAB
AoGAb12y1WIu+gWRtWkX5wHkRty6bWmEWbzwYcgFWlJuDQnBg9MICqy8/7Js85w7
ZLTRFQC2XRmDW0GggRVtVHUu9X2jwkHR9+TWza4xAtYcSwDl6VJTHX2ygptrG/n9
qPFinfvnpiP7b2WNjC53V3cnjg3m+1B5zrmFxsVLDMVLQhECQQDN7i1NWZFVNfYa
GT2GSgMpD0nPXA1HHUvFFgnI9xJkBCewHzega+PrrrpMKZZWLpc4YCm3PK9nI8Nk
EmJE5HwNAkEAw6OpiOgWdRaJWx3+XBsFOhz6K86xwV0NpVb6ocrBKU/t0OqP+gZh
B/YBDfwXPr2w5FCwozUs/MrBdoYR3WnsTQJABNn/pzrc+azzx1mg4XEM8gKyMnhw
t6QxDMugH2Pywvh2FuglX1orXHoZWYIBULZ4SZO6Z96+IyfsiocEWasoYQJBALZ/
onO7BM/+0Oz1osSq1Aps45Yf/0OAmW0mITDyIZR3IkJjvSEf+D3j5wHzqn91lmC1
QMFOpoO+ZBA7asjfuXUCQGmHgpC0BuD4S1QlcF0nrVHTG7Y8KZ18s9qPJS3csuGf
or10mrNRF3tyGy8e/sw88a74Q/6v/PgChZHmq6QjOOU=
-----END RSA PRIVATE KEY-----
""")
self.assertEqual('text/pem/rsa-private',
filetype.FileTypeDecoder.DecodeFile(rsa_key))
def testBinaryELFFiles(self):
"""Test decoding ELF files."""
liba_so = os.path.join(self.tempdir, 'liba.so')
unittest_lib.BuildELF(liba_so, ['func_a'])
self.assertEqual('binary/elf/dynamic-so',
filetype.FileTypeDecoder.DecodeFile(liba_so))
prog = os.path.join(self.tempdir, 'prog')
unittest_lib.BuildELF(prog,
undefined_symbols=['func_a'],
used_libs=['a'],
executable=True)
self.assertEqual('binary/elf/dynamic-bin',
filetype.FileTypeDecoder.DecodeFile(prog))
prog_static = os.path.join(self.tempdir, 'prog_static')
unittest_lib.BuildELF(prog_static, executable=True, static=True)
self.assertEqual('binary/elf/static',
filetype.FileTypeDecoder.DecodeFile(prog_static))
def testBinaryCompressedFiles(self):
"""Test decoding compressed files."""
compressed = os.path.join(self.tempdir, 'compressed')
# `echo hola | gzip -9`
osutils.WriteFile(compressed,
b'\x1f\x8b\x08\x00<\xce\x07T\x02\x03\xcb\xc8\xcfI\xe4\x02'
b'\x00x\xad\xdb\xd1\x05\x00\x00\x00', mode='wb')
self.assertEqual('binary/compressed/gzip',
filetype.FileTypeDecoder.DecodeFile(compressed))
# `echo hola | bzip2 -9`
osutils.WriteFile(compressed,
b'BZh91AY&SY\xfa\xd4\xdb5\x00\x00\x01A\x00\x00\x10 D\xa0'
b'\x00!\x83A\x9a\t\xa8qw$S\x85\t\x0f\xadM\xb3P',
mode='wb')
self.assertEqual('binary/compressed/bzip2',
filetype.FileTypeDecoder.DecodeFile(compressed))
# `echo hola | xz -9`
osutils.WriteFile(
compressed,
b'\xfd7zXZ\x00\x00\x04\xe6\xd6\xb4F\x02\x00!\x01\x16\x00\x00\x00t/\xe5'
b'\xa3\x01\x00\x04hola\n\x00\x00\x00\x00\xdd\xb0\x00\xac6w~\x9d\x00\x01'
b'\x1d\x05\xb8-\x80\xaf\x1f\xb6\xf3}\x01\x00\x00\x00\x00\x04YZ',
mode='wb')
self.assertEqual('binary/compressed/xz',
filetype.FileTypeDecoder.DecodeFile(compressed))
def testBinaryMiscFiles(self):
"""Test for various binary file formats."""
# A timezone file.
some_timezone = os.path.join(self.tempdir, 'some_timezone')
osutils.WriteFile(
some_timezone,
b'TZif2\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x01\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00UTC\x00\x00\x00TZif2'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x01\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00UTC\x00\x00\x00\nUTC0\n',
mode='wb')
self.assertEqual('binary/tzfile',
filetype.FileTypeDecoder.DecodeFile(some_timezone))
# A x86 boot sector with just nops.
bootsec = os.path.join(self.tempdir, 'bootsec')
osutils.WriteFile(bootsec, b'\x90' * 510 + b'\x55\xaa', mode='wb')
self.assertEqual('binary/bootsector/x86',
filetype.FileTypeDecoder.DecodeFile(bootsec))
| 43.154812 | 80 | 0.702055 |
c050eb32a8ccd103a253d96b636c3daf5ab377f8 | 15,450 | py | Python | jgem/specific.py | kensugino/JUGEMu | 3ebf19c96e41f1c90d63d772fd4c9c5cc3d6886f | [
"MIT"
] | null | null | null | jgem/specific.py | kensugino/JUGEMu | 3ebf19c96e41f1c90d63d772fd4c9c5cc3d6886f | [
"MIT"
] | null | null | null | jgem/specific.py | kensugino/JUGEMu | 3ebf19c96e41f1c90d63d772fd4c9c5cc3d6886f | [
"MIT"
] | null | null | null | """
.. module:: assembler
:synopsis: assemble genes from RNASeq data (normalized genome coverage (bigwig) and junctions)
.. moduleauthor:: Ken Sugino <ken.sugino@gmail.com>
"""
# system imports
import gzip
import os
from functools import reduce
from operator import iadd, iand
from collections import Counter
from itertools import repeat
import logging
logging.basicConfig(level=logging.DEBUG)
LOG = logging.getLogger(__name__)
# 3rd party imports
import pandas as PD
import numpy as N
import matplotlib.pyplot as P
# library imports
from jgem import utils as UT
# class
from ponder import plotutils as PU
def make_dict(df,f1,f2):
dic = {}
for k,v in df[[f1,f2]].values:
dic.setdefault(k,set()).add(v)
dic = {k:list(dic[k]) for k in dic}
return dic
class SpecificByDM(object):
"""
Args:
si: sample info dataframe
gcov: gene coverage (normalized)
gcovlevel: column name in sample info dataframe containing
gcov column entries (usually name)
maxeth: min maxexpression required (default 1)
zth1: normed logdiff smaller than this will be ignored for pairs whose minmin < minth
zth2: normed logdiff smaller than this will be ignored for pairs whose minmin >= minth
mmth: threshold to decide a pair is both expressed (min of min)
rdth: threshold for calculating RD (reproducible detectable ratio)
gbed: for annotation
"""
def __init__(self, si, gcov, gbed, gcovlevel='name',maxeth=1,zth1=0.2,zth2=0.6,mmth=1,rdth=0.5):
# first make sure si is limited to gcov
self.snames = snames = [x for x in si[gcovlevel] if x in gcov.columns]
print('#sname={0}, #si={1}, #gcov={2}'.format(len(snames),len(si),gcov.shape[1]))
self.si = si[si[gcovlevel].isin(snames)].copy()
self.gcov = gcov[snames] # use intersection
self.gcovlevel = gcovlevel # sampleinfo column name for gcov columns
self.gbed = gbed
self.maxeth = maxeth
self.zth1 = zth1
self.zth2 = zth2
self.mmth = mmth
self.rdth = rdth
self.dms = {} # holds dm
def make_dm(self, targetlevel):
"""calculate 2 DMs (logdiff and minmin) at specified level """
# first make gcovlevel <=> targetlevel mapping
si = self.si
gl = self.gcovlevel
gc = self.gcov
ts = si.groupby(targetlevel, sort=False).first().index.values
g2t = UT.df2dict(si, gl, targetlevel)
t2g = make_dict(si, targetlevel, gl)
lgc = N.log2(gc+1)
v0 = lgc.groupby(g2t, axis=1).mean() # target level
maxe = v0.max(axis=1)
gids = maxe[maxe>N.log2(self.maxeth+1)].index.values
v = v0.ix[gids][ts] # restrict to expressed
# do the math in numpy to get normalized logdiff DM
m = v.values
logdiff = N.abs(m[:,:,N.newaxis]-m[:,N.newaxis,:])
maxdiff = logdiff.max(axis=2).max(axis=1)
normdiff = logdiff/maxdiff[:,N.newaxis,N.newaxis] # normalized
dm = PD.Panel(normdiff, v.index, ts, ts)
# calculate minmin DM
gmin = gc.ix[gids].groupby(g2t, axis=1).min()[ts].values
a = gmin[:,:,N.newaxis] # i
b = gmin[:,N.newaxis,:] # j
minmin = N.minimum(a,b)
mm = PD.Panel(minmin, v.index, ts, ts)
self.dms[targetlevel] = dict(ts=ts,g2t=g2t,t2g=t2g,dm=dm,mm=mm,v=v)
def get_snames(self, targetlevel, levelnames):
si = self.si
# [levelnames,...] in targetlevel column (group or cg1) => sample names (name)
return si[si[targetlevel].isin(levelnames)]['name'].values
def calc_one_specific(self, targetlevel, levelnames, gids=None):
"""
Args:
targetlevel: a column name in sampleinfo dataframe
g1: a set of names in the targetlevel column
"""
dm = self.dms[targetlevel]['dm']
mm = self.dms[targetlevel]['mm']
v = self.dms[targetlevel]['v']
if gids is not None:
dm = dm.ix[gids]
mm = mm.ix[gids]
v = v.ix[gids]
c = dm.major_axis
i1 = c.isin(levelnames)
i2 = ~i1
n1 = N.sum(i1)
n2 = N.sum(i2)
print(n1,n2)
mask1 = i1[:,N.newaxis]*i1[N.newaxis,:]+i2[:,N.newaxis]*i2[N.newaxis,:]
mask2 = N.ones(len(i1)) - mask1
sum1 = mask1.sum() - (n1+n2)
sum2 = mask2.sum()
dmv = dm.values
dmv1 = dmv*mask1[N.newaxis,:,:]
idx1 = dmv1<self.zth1
idx2 = dmv1<self.zth2
mmv = mm.values
mmmask = mmv>=self.mmth
idx1 = idx1*(~mmmask)
idx2 = idx2*mmmask
dmv1[idx1] = 0. # ignore those pairs within groups and min of the pair > minminth
dmv1[idx2] = 0.
dmv2 = dmv*mask2[N.newaxis,:,:] # don't apply zeroth to between groups
sc1 = dmv1.sum(axis=2).sum(axis=1)
sc2 = dmv2.sum(axis=2).sum(axis=1)
df = PD.DataFrame({'_gidx':dm.items, 'sc1':sc1,'sc2':sc2,})
df['score'] = (df['sc2']-df['sc1'])/(2.*n1*n2)
df['di1'] = df['sc1']/float(sum1)
df['di2'] = df['sc2']/float(sum2)
df['didiff'] = df['di2']-df['di1']
# calculate rd <=== should do this at sample level!
#v1 = v.ix[df['_gidx'].values]
#df['rd'] = (v1[levelnames]>self.rdth).mean(axis=1).values
#df['gcov'] = v1[levelnames].mean(axis=1).values
#cmpl = [x for x in v1.columns if x not in levelnames]
#df['gcov2'] = v1[cmpl].mean(axis=1).values
gc = self.gcov.ix[df['_gidx'].values] # align
sn = self.get_snames(targetlevel, levelnames) # corresponding samples
df['rd'] = (gc[sn]>self.rdth).mean(axis=1).values
df['gcov'] = gc[sn].mean(axis=1).values
cmpl = [x for x in gc.columns if x not in sn]
df['gcov2'] = gc[cmpl].mean(axis=1).values
df = df.sort_values('score',ascending=False)
self._locals = locals()
return df
def calc_many_specific(self, targetlevel, key2names, scoreth=None, rdratioth=0.6):
"""
Args:
targetlevel: name or cg1
key2names: dict groupname (key) to names in targetlevel
"""
dfs = []
for k, ln in key2names.items():
print('{0}...'.format(k))
df = self.calc_one_specific(targetlevel, ln)
cols = list(df.columns)
if scoreth is not None:
df = df[df['score']>scoreth].copy()
print('scoreth{0}:{1}'.format(scoreth,len(df)))
if rdratioth is not None:
idx1 = (df['gcov']>df['gcov2'])&(df['rd']>rdratioth)
idx2 = (df['gcov']<=df['gcov2'])&((1-df['rd'])>rdratioth)
df = df[idx1|idx2].copy()
print('rdratioth{0}:{1}'.format(rdratioth,len(df)))
df['key'] = k
df = df.sort_values('score',ascending=False)
df['rank'] = N.arange(len(df))
df['id'] = df['key']+'.'+df['rank'].astype(str)
dfs.append(df)
df0 = PD.concat(dfs, ignore_index=True)
g2cg1 = UT.df2dict(self.si, 'group', 'cg1')
df0['region'] = [g2cg1.get(x,x) for x in df0['key']]
df0 = df0[['region','key','id']+cols]
df0 = self.annotate(df0)
return df0
def annotate(self, df):
gbed = self.gbed
gbedcols=['gen9_sym0','glocus','#uexons','#junc','pCSF','p60','gknown']
for c in gbedcols:
df[c] = gbed.ix[df['_gidx']][c].values
df = df.rename(columns={'gen9_sym0':'symbol'}).set_index('_gidx')
idx = df['symbol'].isnull()
df.loc[idx, 'symbol'] = gbed.ix[df[idx].index]['gname']
return df
def plot_dm(self, targetlevel, gid, key2pos, lvmax, title=None, fontsize=6,
ymax=None, plotylim=True, g1=None, ax=None):
if ax is None:
fig, ax = P.subplots(1,1,figsize=(3,4))
# make axes
dmrect = (0.,0.25,1.,0.750) # main
exrect = (0.,0.00,1.,0.145) # expression
sbrect = (0.,0.15,1.,0.090) # legend
args = dict(axisbg='w',frameon=True, xticks=[], yticks=[])
ax_dm = PU.add_subplot_axes(ax,dmrect,args)
ax_sb = PU.add_subplot_axes(ax,sbrect,args)
args['frameon'] = False
ax_ex = PU.add_subplot_axes(ax,exrect,args)
dm1 = self.dms[targetlevel]['dm'].ix[gid]
mm1 = self.dms[targetlevel]['mm'].ix[gid]
v1 = self.dms[targetlevel]['v'].ix[gid]
gc1 = self.gcov.ix[gid]
cols = [x[1] for x in sorted([(key2pos[x],x) for x in dm1.columns])]
dm1 = dm1[cols].ix[cols]
mm1 = mm1[cols].ix[cols]
v1 = v1.ix[cols]
snames = [y for x in cols for y in self.get_snames(targetlevel, [x])]
gc1 = gc1.ix[snames]
# main
dmv = dm1.values.copy()
if g1 is not None:
mmv = mm1.values
c = dm1.columns
i1 = c.isin(g1)
i2 = ~i1
mask = i1[:,N.newaxis]*i1[N.newaxis,:]+i2[:,N.newaxis]*i2[N.newaxis,:]
th1 = dmv.max()*self.zth1
th2 = dmv.max()*self.zth2
idx1 = (mmv<self.mmth)&(dmv<th1)
idx2 = (mmv>=self.mmth)&(dmv<th2)
dmv[idx1*mask] = 0.
dmv[idx1*mask] = 0.
print(N.sum(i1),N.sum(i2))
ax_dm.imshow(dmv, aspect='auto', interpolation='none', cmap='gray')
# expression
#ax_ex.plot(v1.values, '-', color='k', lw=0.3)
ax_ex.plot(gc1.values, '-', color='k', lw=0.3)
ymin = min(0, N.floor(N.min(v1)))
if ymax is None:
ymax = max(1, N.ceil(N.max(v1)))
ax_ex.set_ylim((ymin,ymax))
xmax = ax_ex.get_xlim()[1]
if plotylim:
ax_ex.text(xmax,ymax,'{:.0f}'.format(ymax), va='top',ha='left',fontsize=fontsize-2)
ax_ex.text(xmax,ymin,'{:.0f}'.format(ymin), va='bottom',ha='left',fontsize=fontsize-2)
# legend
self._plot_legend(cols, key2pos, lvmax, ax_sb)
# title
if title is None:
title='gid:{0}'.format(gid)
ax.set_title(title,fontsize=fontsize)
P.setp(ax, xticks=[], yticks=[], frame_on=False)
return ax
def plot_all_gcov(self, df, targetlevel, key2pos, key2level, lvmax,
figsize=(9,6), normrow=True, proportional=False):
# df: output of calc_many_specific
gcov = self.dms[targetlevel]['v'] # log2(ug1kn+1).groupby(...)
cols = [x[1] for x in sorted([(key2pos[x],x) for x in gcov.columns])]
gcov = gcov[cols]
# make sure highest score is selected
if df.index.name == '_gidx':
df = df.reset_index()
df = df.sort_values(['_gidx','score'],ascending=False)
df = df.groupby('_gidx', sort=False).first()
# set pos & level
df['_pos_'] = [key2pos[x] for x in df['key']]
df['_lvl_'] = [key2level[x] for x in df['key']]
# make axes
fig,ax = P.subplots(1,1,figsize=figsize)
args = dict(axisbg='w', frameon=True, xticks=[], yticks=[])
axl = PU.add_subplot_axes(ax, [0,0,0.9,0.07],args) # legends at the bottom
if proportional:
nsub = N.array([N.sum(df['_lvl_']==3-i) for i in range(3)])
hs = 0.9*nsub/N.sum(nsub)
ys = [0.08, 0.08+0.01+hs[0], 0.08+0.02+hs[0]+hs[1]]
else:
hs = [0.3 for i in range(3)]
ys = [0.08+0.31*i for i in range(3)]
axm = [PU.add_subplot_axes(ax, [0.00,ys[i],0.90,hs[i]],args) for i in range(3)]
axu = [PU.add_subplot_axes(ax, [0.91,ys[i],0.03,hs[i]],args) for i in range(3)]
axc = [PU.add_subplot_axes(ax, [0.96,ys[i],0.03,hs[i]/2.],args) for i in range(3)]
def _plot_main(sub,gcov,ax):
gids = [x for x in sub.index.values]
m = gcov.ix[gids].values
mmin = m.min(axis=1)
mmax = m.max(axis=1)
#mn = (m-mmin[:,N.newaxis])/(mmax-mmin)[:,N.newaxis]
if normrow:
mn = m/mmax[:,N.newaxis]
zmin,zmax = 0,1
else:
mn = m
zmin = 0 #mmin.min()
zmax = mmax.max()
ax.imshow(mn, aspect='auto', interpolation='nearest', cmap='gray_r')
ax.text(0,0,str(len(sub)),horizontalalignment='right', transform=ax.transAxes)
#ax.text(0,1,'0',horizontalalignment='right', transform=ax.transAxes)
return zmin,zmax
def _plot_ukbar(sub, ax):
uk = N.array([[x.startswith('u.') for x in sub['gknown']]]).T
ax.imshow(uk, aspect='auto', interpolation='nearest', cmap='Reds')
def _plot_cbar(zmin,zmax,ax):
cb = N.array([N.arange(0,zmax,zmax/32.)[::-1]]).T
ax.imshow(cb,aspect='auto', interpolation='nearest', cmap='gray_r', vmax=zmax)
ax.text(1,32,str(zmin))
ax.text(1,0,str(zmax))
# main&known/unknwon&colorbar
for i in range(3):
sub = df[df['_lvl_']==3-i].sort_values('_pos_')
zmin,zmax = _plot_main(sub, gcov, axm[i])
_plot_ukbar(sub, axu[i])
if normrow:
if i==0:
_plot_cbar(zmin, zmax, axc[i])
else:
P.setp(axc[i], xticks=[], yticks=[], frame_on=False)
else:
_plot_cbar(zmin, zmax, axc[i])
# legends
self._plot_legend(cols, key2pos, lvmax, axl)
P.setp(ax, xticks=[], yticks=[], frame_on=False)
return fig
def _plot_legend(self, cols, key2pos, lvmax, ax):
si = self.si
h = 1./3
args = dict(axisbg='w', frameon=False, xticks=[], yticks=[])
#args['frameon'] = False
ax_sb = [PU.add_subplot_axes(ax, [0,h*i,1,h],args) for i in range(3)]
#flds = ['gtrans3','region','type']
clrs = ['jet','nipy_spectral','gist_rainbow']
#for i,(a,fld,cm) in enumerate(zip(ax_sb,flds,clrs)):
for i,(a,cm) in enumerate(zip(ax_sb,clrs)):
m = N.array([[key2pos[x][2-i] for x in cols]])
vmax = lvmax[2-i]
a.imshow(m, aspect='auto', interpolation='none', cmap=cm, vmin=0, vmax=vmax)
CLS = dict(gtrans3='jet',region='nipy_spectral',type='gist_rainbow')
FLDS = ['gtrans3','region','type']
def make_legend_bars(orders,flds=FLDS, cls=CLS):
nfld = len(flds)
nums = N.array([len(orders[x]) for x in flds])
tot = N.sum(nums)
spaces = 0.01*(nfld-1)
dy = (1-spaces)/tot
hs = [dy*x for x in nums]
ys = [0.01*i+N.sum(hs[:i]) for i in range(nfld)]
fig, ax = P.subplots(1,1,figsize=(3,0.25*tot+0.1*(nfld-1)))
args = dict(axisbg='w',frameon=True, xticks=[], yticks=[])
for i in range(nfld):
axi = PU.add_subplot_axes(ax, [0,ys[i],0.15, hs[i]], args)
m = N.array([range(nums[i])]).T
axi.imshow(m, aspect='auto', interpolation='nearest', cmap=cls[flds[i]], vmin=0, vmax=nums[i])
axi.set_yticks(range(nums[i]))
axi.set_yticklabels(orders[flds[i]])
axi.tick_params(axis='both', which='both',length=0)
P.setp(ax, frame_on=False, xticks=[], yticks=[])
return fig | 39.514066 | 102 | 0.543819 |
8a562dc169d196dd76d30b9b0de9ce4aedecef0d | 761 | py | Python | hubblestack/matchers/grain_match.py | buddwm/hubble | b384ee48556ca144ae6f09dd0b45db29288e5293 | [
"Apache-2.0"
] | 363 | 2017-01-10T22:02:47.000Z | 2022-03-21T10:44:40.000Z | hubblestack/matchers/grain_match.py | buddwm/hubble | b384ee48556ca144ae6f09dd0b45db29288e5293 | [
"Apache-2.0"
] | 439 | 2017-01-12T22:39:42.000Z | 2021-10-11T18:43:28.000Z | hubblestack/matchers/grain_match.py | buddwm/hubble | b384ee48556ca144ae6f09dd0b45db29288e5293 | [
"Apache-2.0"
] | 138 | 2017-01-05T22:10:59.000Z | 2021-09-01T14:35:00.000Z | # -*- coding: utf-8 -*-
"""
This is the default grains matcher function.
"""
import logging
import hubblestack.utils.data # pylint: disable=3rd-party-module-not-gated
from hubblestack.defaults import ( # pylint: disable=3rd-party-module-not-gated
DEFAULT_TARGET_DELIM,
)
log = logging.getLogger(__name__)
def match(tgt, delimiter=DEFAULT_TARGET_DELIM, opts=None):
"""
Reads in the grains glob match
"""
if not opts:
opts = __opts__
log.debug("grains target: %s", tgt)
if delimiter not in tgt:
log.error(
"Got insufficient arguments for grains match " "statement from master"
)
return False
return hubblestack.utils.data.subdict_match(opts["grains"], tgt, delimiter=delimiter)
| 24.548387 | 89 | 0.676741 |
c17dc1a93121d6f79d45417c38785923b43619a8 | 35,735 | py | Python | skimage/filters/thresholding.py | smheidrich/scikit-image | e9cf8b850c4c2800cc221be6f1dfff6a2a32a4eb | [
"BSD-3-Clause"
] | 1 | 2019-02-17T23:16:44.000Z | 2019-02-17T23:16:44.000Z | skimage/filters/thresholding.py | smheidrich/scikit-image | e9cf8b850c4c2800cc221be6f1dfff6a2a32a4eb | [
"BSD-3-Clause"
] | null | null | null | skimage/filters/thresholding.py | smheidrich/scikit-image | e9cf8b850c4c2800cc221be6f1dfff6a2a32a4eb | [
"BSD-3-Clause"
] | 1 | 2022-02-05T07:52:54.000Z | 2022-02-05T07:52:54.000Z | import itertools
import math
import numpy as np
from scipy import ndimage as ndi
from collections import OrderedDict
from collections.abc import Iterable
from ..exposure import histogram
from .._shared.utils import assert_nD, warn, deprecated
from ..transform import integral_image
from ..util import crop, dtype_limits
__all__ = ['try_all_threshold',
'threshold_otsu',
'threshold_yen',
'threshold_isodata',
'threshold_li',
'threshold_local',
'threshold_minimum',
'threshold_mean',
'threshold_niblack',
'threshold_sauvola',
'threshold_triangle',
'apply_hysteresis_threshold']
def _try_all(image, methods=None, figsize=None, num_cols=2, verbose=True):
"""Returns a figure comparing the outputs of different methods.
Parameters
----------
image : (N, M) ndarray
Input image.
methods : dict, optional
Names and associated functions.
Functions must take and return an image.
figsize : tuple, optional
Figure size (in inches).
num_cols : int, optional
Number of columns.
verbose : bool, optional
Print function name for each method.
Returns
-------
fig, ax : tuple
Matplotlib figure and axes.
"""
from matplotlib import pyplot as plt
# Handle default value
methods = methods or {}
num_rows = math.ceil((len(methods) + 1.) / num_cols)
fig, ax = plt.subplots(num_rows, num_cols, figsize=figsize,
sharex=True, sharey=True)
ax = ax.ravel()
ax[0].imshow(image, cmap=plt.cm.gray)
ax[0].set_title('Original')
i = 1
for name, func in methods.items():
ax[i].set_title(name)
try:
ax[i].imshow(func(image), cmap=plt.cm.gray)
except Exception as e:
ax[i].text(0.5, 0.5, "%s" % type(e).__name__,
ha="center", va="center", transform=ax[i].transAxes)
i += 1
if verbose:
print(func.__orifunc__)
for a in ax:
a.axis('off')
fig.tight_layout()
return fig, ax
def try_all_threshold(image, figsize=(8, 5), verbose=True):
"""Returns a figure comparing the outputs of different thresholding methods.
Parameters
----------
image : (N, M) ndarray
Input image.
figsize : tuple, optional
Figure size (in inches).
verbose : bool, optional
Print function name for each method.
Returns
-------
fig, ax : tuple
Matplotlib figure and axes.
Notes
-----
The following algorithms are used:
* isodata
* li
* mean
* minimum
* otsu
* triangle
* yen
Examples
--------
>>> from skimage.data import text
>>> fig, ax = try_all_threshold(text(), figsize=(10, 6), verbose=False)
"""
def thresh(func):
"""
A wrapper function to return a thresholded image.
"""
def wrapper(im):
return im > func(im)
try:
wrapper.__orifunc__ = func.__orifunc__
except AttributeError:
wrapper.__orifunc__ = func.__module__ + '.' + func.__name__
return wrapper
# Global algorithms.
methods = OrderedDict({'Isodata': thresh(threshold_isodata),
'Li': thresh(threshold_li),
'Mean': thresh(threshold_mean),
'Minimum': thresh(threshold_minimum),
'Otsu': thresh(threshold_otsu),
'Triangle': thresh(threshold_triangle),
'Yen': thresh(threshold_yen)})
return _try_all(image, figsize=figsize,
methods=methods, verbose=verbose)
def threshold_local(image, block_size, method='gaussian', offset=0,
mode='reflect', param=None, cval=0):
"""Compute a threshold mask image based on local pixel neighborhood.
Also known as adaptive or dynamic thresholding. The threshold value is
the weighted mean for the local neighborhood of a pixel subtracted by a
constant. Alternatively the threshold can be determined dynamically by a
given function, using the 'generic' method.
Parameters
----------
image : (N, M) ndarray
Input image.
block_size : int
Odd size of pixel neighborhood which is used to calculate the
threshold value (e.g. 3, 5, 7, ..., 21, ...).
method : {'generic', 'gaussian', 'mean', 'median'}, optional
Method used to determine adaptive threshold for local neighbourhood in
weighted mean image.
* 'generic': use custom function (see `param` parameter)
* 'gaussian': apply gaussian filter (see `param` parameter for custom\
sigma value)
* 'mean': apply arithmetic mean filter
* 'median': apply median rank filter
By default the 'gaussian' method is used.
offset : float, optional
Constant subtracted from weighted mean of neighborhood to calculate
the local threshold value. Default offset is 0.
mode : {'reflect', 'constant', 'nearest', 'mirror', 'wrap'}, optional
The mode parameter determines how the array borders are handled, where
cval is the value when mode is equal to 'constant'.
Default is 'reflect'.
param : {int, function}, optional
Either specify sigma for 'gaussian' method or function object for
'generic' method. This functions takes the flat array of local
neighbourhood as a single argument and returns the calculated
threshold for the centre pixel.
cval : float, optional
Value to fill past edges of input if mode is 'constant'.
Returns
-------
threshold : (N, M) ndarray
Threshold image. All pixels in the input image higher than the
corresponding pixel in the threshold image are considered foreground.
References
----------
.. [1] https://docs.opencv.org/modules/imgproc/doc/miscellaneous_transformations.html?highlight=threshold#adaptivethreshold
Examples
--------
>>> from skimage.data import camera
>>> image = camera()[:50, :50]
>>> binary_image1 = image > threshold_local(image, 15, 'mean')
>>> func = lambda arr: arr.mean()
>>> binary_image2 = image > threshold_local(image, 15, 'generic',
... param=func)
"""
if block_size % 2 == 0:
raise ValueError("The kwarg ``block_size`` must be odd! Given "
"``block_size`` {0} is even.".format(block_size))
assert_nD(image, 2)
thresh_image = np.zeros(image.shape, 'double')
if method == 'generic':
ndi.generic_filter(image, param, block_size,
output=thresh_image, mode=mode, cval=cval)
elif method == 'gaussian':
if param is None:
# automatically determine sigma which covers > 99% of distribution
sigma = (block_size - 1) / 6.0
else:
sigma = param
ndi.gaussian_filter(image, sigma, output=thresh_image, mode=mode,
cval=cval)
elif method == 'mean':
mask = 1. / block_size * np.ones((block_size,))
# separation of filters to speedup convolution
ndi.convolve1d(image, mask, axis=0, output=thresh_image, mode=mode,
cval=cval)
ndi.convolve1d(thresh_image, mask, axis=1, output=thresh_image,
mode=mode, cval=cval)
elif method == 'median':
ndi.median_filter(image, block_size, output=thresh_image, mode=mode,
cval=cval)
else:
raise ValueError("Invalid method specified. Please use `generic`, "
"`gaussian`, `mean`, or `median`.")
return thresh_image - offset
def threshold_otsu(image, nbins=256):
"""Return threshold value based on Otsu's method.
Parameters
----------
image : (N, M) ndarray
Grayscale input image.
nbins : int, optional
Number of bins used to calculate histogram. This value is ignored for
integer arrays.
Returns
-------
threshold : float
Upper threshold value. All pixels with an intensity higher than
this value are assumed to be foreground.
Raises
------
ValueError
If `image` only contains a single grayscale value.
References
----------
.. [1] Wikipedia, https://en.wikipedia.org/wiki/Otsu's_Method
Examples
--------
>>> from skimage.data import camera
>>> image = camera()
>>> thresh = threshold_otsu(image)
>>> binary = image <= thresh
Notes
-----
The input image must be grayscale.
"""
if len(image.shape) > 2 and image.shape[-1] in (3, 4):
msg = "threshold_otsu is expected to work correctly only for " \
"grayscale images; image shape {0} looks like an RGB image"
warn(msg.format(image.shape))
# Check if the image is multi-colored or not
if image.min() == image.max():
raise ValueError("threshold_otsu is expected to work with images "
"having more than one color. The input image seems "
"to have just one color {0}.".format(image.min()))
hist, bin_centers = histogram(image.ravel(), nbins, source_range='image')
hist = hist.astype(float)
# class probabilities for all possible thresholds
weight1 = np.cumsum(hist)
weight2 = np.cumsum(hist[::-1])[::-1]
# class means for all possible thresholds
mean1 = np.cumsum(hist * bin_centers) / weight1
mean2 = (np.cumsum((hist * bin_centers)[::-1]) / weight2[::-1])[::-1]
# Clip ends to align class 1 and class 2 variables:
# The last value of `weight1`/`mean1` should pair with zero values in
# `weight2`/`mean2`, which do not exist.
variance12 = weight1[:-1] * weight2[1:] * (mean1[:-1] - mean2[1:]) ** 2
idx = np.argmax(variance12)
threshold = bin_centers[:-1][idx]
return threshold
def threshold_yen(image, nbins=256):
"""Return threshold value based on Yen's method.
Parameters
----------
image : (N, M) ndarray
Input image.
nbins : int, optional
Number of bins used to calculate histogram. This value is ignored for
integer arrays.
Returns
-------
threshold : float
Upper threshold value. All pixels with an intensity higher than
this value are assumed to be foreground.
References
----------
.. [1] Yen J.C., Chang F.J., and Chang S. (1995) "A New Criterion
for Automatic Multilevel Thresholding" IEEE Trans. on Image
Processing, 4(3): 370-378. :DOI:`10.1109/83.366472`
.. [2] Sezgin M. and Sankur B. (2004) "Survey over Image Thresholding
Techniques and Quantitative Performance Evaluation" Journal of
Electronic Imaging, 13(1): 146-165, :DOI:`10.1117/1.1631315`
http://www.busim.ee.boun.edu.tr/~sankur/SankurFolder/Threshold_survey.pdf
.. [3] ImageJ AutoThresholder code, http://fiji.sc/wiki/index.php/Auto_Threshold
Examples
--------
>>> from skimage.data import camera
>>> image = camera()
>>> thresh = threshold_yen(image)
>>> binary = image <= thresh
"""
hist, bin_centers = histogram(image.ravel(), nbins, source_range='image')
# On blank images (e.g. filled with 0) with int dtype, `histogram()`
# returns `bin_centers` containing only one value. Speed up with it.
if bin_centers.size == 1:
return bin_centers[0]
# Calculate probability mass function
pmf = hist.astype(np.float32) / hist.sum()
P1 = np.cumsum(pmf) # Cumulative normalized histogram
P1_sq = np.cumsum(pmf ** 2)
# Get cumsum calculated from end of squared array:
P2_sq = np.cumsum(pmf[::-1] ** 2)[::-1]
# P2_sq indexes is shifted +1. I assume, with P1[:-1] it's help avoid '-inf'
# in crit. ImageJ Yen implementation replaces those values by zero.
crit = np.log(((P1_sq[:-1] * P2_sq[1:]) ** -1) *
(P1[:-1] * (1.0 - P1[:-1])) ** 2)
return bin_centers[crit.argmax()]
def threshold_isodata(image, nbins=256, return_all=False):
"""Return threshold value(s) based on ISODATA method.
Histogram-based threshold, known as Ridler-Calvard method or inter-means.
Threshold values returned satisfy the following equality::
threshold = (image[image <= threshold].mean() +
image[image > threshold].mean()) / 2.0
That is, returned thresholds are intensities that separate the image into
two groups of pixels, where the threshold intensity is midway between the
mean intensities of these groups.
For integer images, the above equality holds to within one; for floating-
point images, the equality holds to within the histogram bin-width.
Parameters
----------
image : (N, M) ndarray
Input image.
nbins : int, optional
Number of bins used to calculate histogram. This value is ignored for
integer arrays.
return_all: bool, optional
If False (default), return only the lowest threshold that satisfies
the above equality. If True, return all valid thresholds.
Returns
-------
threshold : float or int or array
Threshold value(s).
References
----------
.. [1] Ridler, TW & Calvard, S (1978), "Picture thresholding using an
iterative selection method"
IEEE Transactions on Systems, Man and Cybernetics 8: 630-632,
:DOI:`10.1109/TSMC.1978.4310039`
.. [2] Sezgin M. and Sankur B. (2004) "Survey over Image Thresholding
Techniques and Quantitative Performance Evaluation" Journal of
Electronic Imaging, 13(1): 146-165,
http://www.busim.ee.boun.edu.tr/~sankur/SankurFolder/Threshold_survey.pdf
:DOI:`10.1117/1.1631315`
.. [3] ImageJ AutoThresholder code,
http://fiji.sc/wiki/index.php/Auto_Threshold
Examples
--------
>>> from skimage.data import coins
>>> image = coins()
>>> thresh = threshold_isodata(image)
>>> binary = image > thresh
"""
hist, bin_centers = histogram(image.ravel(), nbins, source_range='image')
# image only contains one unique value
if len(bin_centers) == 1:
if return_all:
return bin_centers
else:
return bin_centers[0]
hist = hist.astype(np.float32)
# csuml and csumh contain the count of pixels in that bin or lower, and
# in all bins strictly higher than that bin, respectively
csuml = np.cumsum(hist)
csumh = np.cumsum(hist[::-1])[::-1] - hist
# intensity_sum contains the total pixel intensity from each bin
intensity_sum = hist * bin_centers
# l and h contain average value of all pixels in that bin or lower, and
# in all bins strictly higher than that bin, respectively.
# Note that since exp.histogram does not include empty bins at the low or
# high end of the range, csuml and csumh are strictly > 0, except in the
# last bin of csumh, which is zero by construction.
# So no worries about division by zero in the following lines, except
# for the last bin, but we can ignore that because no valid threshold
# can be in the top bin. So we just patch up csumh[-1] to not cause 0/0
# errors.
csumh[-1] = 1
l = np.cumsum(intensity_sum) / csuml
h = (np.cumsum(intensity_sum[::-1])[::-1] - intensity_sum) / csumh
# isodata finds threshold values that meet the criterion t = (l + m)/2
# where l is the mean of all pixels <= t and h is the mean of all pixels
# > t, as calculated above. So we are looking for places where
# (l + m) / 2 equals the intensity value for which those l and m figures
# were calculated -- which is, of course, the histogram bin centers.
# We only require this equality to be within the precision of the bin
# width, of course.
all_mean = (l + h) / 2.0
bin_width = bin_centers[1] - bin_centers[0]
# Look only at thresholds that are below the actual all_mean value,
# for consistency with the threshold being included in the lower pixel
# group. Otherwise can get thresholds that are not actually fixed-points
# of the isodata algorithm. For float images, this matters less, since
# there really can't be any guarantees anymore anyway.
distances = all_mean - bin_centers
thresholds = bin_centers[(distances >= 0) & (distances < bin_width)]
if return_all:
return thresholds
else:
return thresholds[0]
# Computing a histogram using np.histogram on a uint8 image with bins=256
# doesn't work and results in aliasing problems. We use a fully specified set
# of bins to ensure that each uint8 value false into its own bin.
_DEFAULT_ENTROPY_BINS = tuple(np.arange(-0.5, 255.51, 1))
def _cross_entropy(image, threshold, bins=_DEFAULT_ENTROPY_BINS):
"""Compute cross-entropy between distributions above and below a threshold.
Parameters
----------
image : array
The input array of values.
threshold : float
The value dividing the foreground and background in ``image``.
bins : int or array of float, optional
The number of bins or the bin edges. (Any valid value to the ``bins``
argument of ``np.histogram`` will work here.) For an exact calculation,
each unique value should have its own bin. The default value for bins
ensures exact handling of uint8 images: ``bins=256`` results in
aliasing problems due to bin width not being equal to 1.
Returns
-------
nu : float
The cross-entropy target value as defined in [1]_.
Notes
-----
See Li and Lee, 1993 [1]_; this is the objective function `threshold_li`
minimizes. This function can be improved but this implementation most
closely matches equation 8 in [1]_ and equations 1-3 in [2]_.
References
----------
.. [1] Li C.H. and Lee C.K. (1993) "Minimum Cross Entropy Thresholding"
Pattern Recognition, 26(4): 617-625
:DOI:`10.1016/0031-3203(93)90115-D`
.. [2] Li C.H. and Tam P.K.S. (1998) "An Iterative Algorithm for Minimum
Cross Entropy Thresholding" Pattern Recognition Letters, 18(8): 771-776
:DOI:`10.1016/S0167-8655(98)00057-9`
"""
histogram, bin_edges = np.histogram(image, bins=bins, density=True)
bin_centers = np.convolve(bin_edges, [0.5, 0.5], mode='valid')
t = np.flatnonzero(bin_centers > threshold)[0]
m0a = np.sum(histogram[:t]) # 0th moment, background
m0b = np.sum(histogram[t:])
m1a = np.sum(histogram[:t] * bin_centers[:t]) # 1st moment, background
m1b = np.sum(histogram[t:] * bin_centers[t:])
mua = m1a / m0a # mean value, background
mub = m1b / m0b
nu = -m1a * np.log(mua) - m1b * np.log(mub)
return nu
def threshold_li(image, *, tolerance=None):
"""Compute threshold value by Li's iterative Minimum Cross Entropy method.
Parameters
----------
image : ndarray
Input image.
tolerance : float, optional
Finish the computation when the change in the threshold in an iteration
is less than this value. By default, this is half of the range of the
input image, divided by 256.
Returns
-------
threshold : float
Upper threshold value. All pixels with an intensity higher than
this value are assumed to be foreground.
References
----------
.. [1] Li C.H. and Lee C.K. (1993) "Minimum Cross Entropy Thresholding"
Pattern Recognition, 26(4): 617-625
:DOI:`10.1016/0031-3203(93)90115-D`
.. [2] Li C.H. and Tam P.K.S. (1998) "An Iterative Algorithm for Minimum
Cross Entropy Thresholding" Pattern Recognition Letters, 18(8): 771-776
:DOI:`10.1016/S0167-8655(98)00057-9`
.. [3] Sezgin M. and Sankur B. (2004) "Survey over Image Thresholding
Techniques and Quantitative Performance Evaluation" Journal of
Electronic Imaging, 13(1): 146-165
:DOI:`10.1117/1.1631315`
.. [4] ImageJ AutoThresholder code, http://fiji.sc/wiki/index.php/Auto_Threshold
Examples
--------
>>> from skimage.data import camera
>>> image = camera()
>>> thresh = threshold_li(image)
>>> binary = image > thresh
"""
# Remove nan:
image = image[~np.isnan(image)]
if image.size == 0:
return np.nan
# Make sure image has more than one value; otherwise, return that value
# This works even for np.inf
if np.all(image == image.flat[0]):
return image.flat[0]
# At this point, the image only contains np.inf, -np.inf, or valid numbers
image = image[np.isfinite(image)]
# if there are no finite values in the image, return 0. This is because
# at this point we *know* that there are *both* inf and -inf values,
# because inf == inf evaluates to True. We might as well separate them.
if image.size == 0:
return 0.
# Li's algorithm requires positive image (because of log(mean))
image_min = np.min(image)
image -= image_min
image_range = np.max(image)
tolerance = tolerance or 0.5 * image_range / 256
# Initial estimate
t_curr = np.mean(image)
t_next = t_curr + 2 * tolerance
# Stop the iterations when the difference between the
# new and old threshold values is less than the tolerance
while abs(t_next - t_curr) > tolerance:
t_curr = t_next
foreground = (image > t_curr)
mean_fore = np.mean(image[foreground])
mean_back = np.mean(image[~foreground])
t_next = ((mean_back - mean_fore) /
(np.log(mean_back) - np.log(mean_fore)))
threshold = t_next + image_min
return threshold
def threshold_minimum(image, nbins=256, max_iter=10000):
"""Return threshold value based on minimum method.
The histogram of the input `image` is computed and smoothed until there are
only two maxima. Then the minimum in between is the threshold value.
Parameters
----------
image : (M, N) ndarray
Input image.
nbins : int, optional
Number of bins used to calculate histogram. This value is ignored for
integer arrays.
max_iter: int, optional
Maximum number of iterations to smooth the histogram.
Returns
-------
threshold : float
Upper threshold value. All pixels with an intensity higher than
this value are assumed to be foreground.
Raises
------
RuntimeError
If unable to find two local maxima in the histogram or if the
smoothing takes more than 1e4 iterations.
References
----------
.. [1] C. A. Glasbey, "An analysis of histogram-based thresholding
algorithms," CVGIP: Graphical Models and Image Processing,
vol. 55, pp. 532-537, 1993.
.. [2] Prewitt, JMS & Mendelsohn, ML (1966), "The analysis of cell
images", Annals of the New York Academy of Sciences 128: 1035-1053
:DOI:`10.1111/j.1749-6632.1965.tb11715.x`
Examples
--------
>>> from skimage.data import camera
>>> image = camera()
>>> thresh = threshold_minimum(image)
>>> binary = image > thresh
"""
def find_local_maxima_idx(hist):
# We can't use scipy.signal.argrelmax
# as it fails on plateaus
maximum_idxs = list()
direction = 1
for i in range(hist.shape[0] - 1):
if direction > 0:
if hist[i + 1] < hist[i]:
direction = -1
maximum_idxs.append(i)
else:
if hist[i + 1] > hist[i]:
direction = 1
return maximum_idxs
hist, bin_centers = histogram(image.ravel(), nbins, source_range='image')
smooth_hist = np.copy(hist).astype(np.float64)
for counter in range(max_iter):
smooth_hist = ndi.uniform_filter1d(smooth_hist, 3)
maximum_idxs = find_local_maxima_idx(smooth_hist)
if len(maximum_idxs) < 3:
break
if len(maximum_idxs) != 2:
raise RuntimeError('Unable to find two maxima in histogram')
elif counter == max_iter - 1:
raise RuntimeError('Maximum iteration reached for histogram'
'smoothing')
# Find lowest point between the maxima
threshold_idx = np.argmin(smooth_hist[maximum_idxs[0]:maximum_idxs[1] + 1])
return bin_centers[maximum_idxs[0] + threshold_idx]
def threshold_mean(image):
"""Return threshold value based on the mean of grayscale values.
Parameters
----------
image : (N, M[, ..., P]) ndarray
Grayscale input image.
Returns
-------
threshold : float
Upper threshold value. All pixels with an intensity higher than
this value are assumed to be foreground.
References
----------
.. [1] C. A. Glasbey, "An analysis of histogram-based thresholding
algorithms," CVGIP: Graphical Models and Image Processing,
vol. 55, pp. 532-537, 1993.
:DOI:`10.1006/cgip.1993.1040`
Examples
--------
>>> from skimage.data import camera
>>> image = camera()
>>> thresh = threshold_mean(image)
>>> binary = image > thresh
"""
return np.mean(image)
def threshold_triangle(image, nbins=256):
"""Return threshold value based on the triangle algorithm.
Parameters
----------
image : (N, M[, ..., P]) ndarray
Grayscale input image.
nbins : int, optional
Number of bins used to calculate histogram. This value is ignored for
integer arrays.
Returns
-------
threshold : float
Upper threshold value. All pixels with an intensity higher than
this value are assumed to be foreground.
References
----------
.. [1] Zack, G. W., Rogers, W. E. and Latt, S. A., 1977,
Automatic Measurement of Sister Chromatid Exchange Frequency,
Journal of Histochemistry and Cytochemistry 25 (7), pp. 741-753
:DOI:`10.1177/25.7.70454`
.. [2] ImageJ AutoThresholder code,
http://fiji.sc/wiki/index.php/Auto_Threshold
Examples
--------
>>> from skimage.data import camera
>>> image = camera()
>>> thresh = threshold_triangle(image)
>>> binary = image > thresh
"""
# nbins is ignored for integer arrays
# so, we recalculate the effective nbins.
hist, bin_centers = histogram(image.ravel(), nbins, source_range='image')
nbins = len(hist)
# Find peak, lowest and highest gray levels.
arg_peak_height = np.argmax(hist)
peak_height = hist[arg_peak_height]
arg_low_level, arg_high_level = np.where(hist>0)[0][[0, -1]]
# Flip is True if left tail is shorter.
flip = arg_peak_height - arg_low_level < arg_high_level - arg_peak_height
if flip:
hist = hist[::-1]
arg_low_level = nbins - arg_high_level - 1
arg_peak_height = nbins - arg_peak_height - 1
# If flip == True, arg_high_level becomes incorrect
# but we don't need it anymore.
del(arg_high_level)
# Set up the coordinate system.
width = arg_peak_height - arg_low_level
x1 = np.arange(width)
y1 = hist[x1 + arg_low_level]
# Normalize.
norm = np.sqrt(peak_height**2 + width**2)
peak_height /= norm
width /= norm
# Maximize the length.
# The ImageJ implementation includes an additional constant when calculating
# the length, but here we omit it as it does not affect the location of the
# minimum.
length = peak_height * x1 - width * y1
arg_level = np.argmax(length) + arg_low_level
if flip:
arg_level = nbins - arg_level - 1
return bin_centers[arg_level]
def _validate_window_size(axis_sizes):
"""Ensure all sizes in ``axis_sizes`` are odd.
Parameters
----------
axis_sizes : iterable of int
Raises
------
ValueError
If any given axis size is even.
"""
for axis_size in axis_sizes:
if axis_size % 2 == 0:
msg = ('Window size for `threshold_sauvola` or '
'`threshold_niblack` must not be even on any dimension. '
'Got {}'.format(axis_sizes))
raise ValueError(msg)
def _mean_std(image, w):
"""Return local mean and standard deviation of each pixel using a
neighborhood defined by a rectangular window size ``w``.
The algorithm uses integral images to speedup computation. This is
used by :func:`threshold_niblack` and :func:`threshold_sauvola`.
Parameters
----------
image : ndarray
Input image.
w : int, or iterable of int
Window size specified as a single odd integer (3, 5, 7, …),
or an iterable of length ``image.ndim`` containing only odd
integers (e.g. ``(1, 5, 5)``).
Returns
-------
m : ndarray of float, same shape as ``image``
Local mean of the image.
s : ndarray of float, same shape as ``image``
Local standard deviation of the image.
References
----------
.. [1] F. Shafait, D. Keysers, and T. M. Breuel, "Efficient
implementation of local adaptive thresholding techniques
using integral images." in Document Recognition and
Retrieval XV, (San Jose, USA), Jan. 2008.
:DOI:`10.1117/12.767755`
"""
if not isinstance(w, Iterable):
w = (w,) * image.ndim
_validate_window_size(w)
pad_width = tuple((k // 2 + 1, k // 2) for k in w)
padded = np.pad(image.astype('float'), pad_width,
mode='reflect')
padded_sq = padded * padded
integral = integral_image(padded)
integral_sq = integral_image(padded_sq)
kern = np.zeros(tuple(k + 1 for k in w))
for indices in itertools.product(*([[0, -1]] * image.ndim)):
kern[indices] = (-1) ** (image.ndim % 2 != np.sum(indices) % 2)
total_window_size = np.prod(w)
sum_full = ndi.correlate(integral, kern, mode='constant')
m = crop(sum_full, pad_width) / total_window_size
sum_sq_full = ndi.correlate(integral_sq, kern, mode='constant')
g2 = crop(sum_sq_full, pad_width) / total_window_size
# Note: we use np.clip because g2 is not guaranteed to be greater than
# m*m when floating point error is considered
s = np.sqrt(np.clip(g2 - m * m, 0, None))
return m, s
def threshold_niblack(image, window_size=15, k=0.2):
"""Applies Niblack local threshold to an array.
A threshold T is calculated for every pixel in the image using the
following formula::
T = m(x,y) - k * s(x,y)
where m(x,y) and s(x,y) are the mean and standard deviation of
pixel (x,y) neighborhood defined by a rectangular window with size w
times w centered around the pixel. k is a configurable parameter
that weights the effect of standard deviation.
Parameters
----------
image: ndarray
Input image.
window_size : int, or iterable of int, optional
Window size specified as a single odd integer (3, 5, 7, …),
or an iterable of length ``image.ndim`` containing only odd
integers (e.g. ``(1, 5, 5)``).
k : float, optional
Value of parameter k in threshold formula.
Returns
-------
threshold : (N, M) ndarray
Threshold mask. All pixels with an intensity higher than
this value are assumed to be foreground.
Notes
-----
This algorithm is originally designed for text recognition.
References
----------
.. [1] Niblack, W (1986), An introduction to Digital Image
Processing, Prentice-Hall.
Examples
--------
>>> from skimage import data
>>> image = data.page()
>>> binary_image = threshold_niblack(image, window_size=7, k=0.1)
"""
m, s = _mean_std(image, window_size)
return m - k * s
def threshold_sauvola(image, window_size=15, k=0.2, r=None):
"""Applies Sauvola local threshold to an array. Sauvola is a
modification of Niblack technique.
In the original method a threshold T is calculated for every pixel
in the image using the following formula::
T = m(x,y) * (1 + k * ((s(x,y) / R) - 1))
where m(x,y) and s(x,y) are the mean and standard deviation of
pixel (x,y) neighborhood defined by a rectangular window with size w
times w centered around the pixel. k is a configurable parameter
that weights the effect of standard deviation.
R is the maximum standard deviation of a greyscale image.
Parameters
----------
image: ndarray
Input image.
window_size : int, or iterable of int, optional
Window size specified as a single odd integer (3, 5, 7, …),
or an iterable of length ``image.ndim`` containing only odd
integers (e.g. ``(1, 5, 5)``).
k : float, optional
Value of the positive parameter k.
r : float, optional
Value of R, the dynamic range of standard deviation.
If None, set to the half of the image dtype range.
Returns
-------
threshold : (N, M) ndarray
Threshold mask. All pixels with an intensity higher than
this value are assumed to be foreground.
Notes
-----
This algorithm is originally designed for text recognition.
References
----------
.. [1] J. Sauvola and M. Pietikainen, "Adaptive document image
binarization," Pattern Recognition 33(2),
pp. 225-236, 2000.
:DOI:`10.1016/S0031-3203(99)00055-2`
Examples
--------
>>> from skimage import data
>>> image = data.page()
>>> t_sauvola = threshold_sauvola(image, window_size=15, k=0.2)
>>> binary_image = image > t_sauvola
"""
if r is None:
imin, imax = dtype_limits(image, clip_negative=False)
r = 0.5 * (imax - imin)
m, s = _mean_std(image, window_size)
return m * (1 + k * ((s / r) - 1))
def apply_hysteresis_threshold(image, low, high):
"""Apply hysteresis thresholding to `image`.
This algorithm finds regions where `image` is greater than `high`
OR `image` is greater than `low` *and* that region is connected to
a region greater than `high`.
Parameters
----------
image : array, shape (M,[ N, ..., P])
Grayscale input image.
low : float, or array of same shape as `image`
Lower threshold.
high : float, or array of same shape as `image`
Higher threshold.
Returns
-------
thresholded : array of bool, same shape as `image`
Array in which `True` indicates the locations where `image`
was above the hysteresis threshold.
Examples
--------
>>> image = np.array([1, 2, 3, 2, 1, 2, 1, 3, 2])
>>> apply_hysteresis_threshold(image, 1.5, 2.5).astype(int)
array([0, 1, 1, 1, 0, 0, 0, 1, 1])
References
----------
.. [1] J. Canny. A computational approach to edge detection.
IEEE Transactions on Pattern Analysis and Machine Intelligence.
1986; vol. 8, pp.679-698.
:DOI:`10.1109/TPAMI.1986.4767851`
"""
low = np.clip(low, a_min=None, a_max=high) # ensure low always below high
mask_low = image > low
mask_high = image > high
# Connected components of mask_low
labels_low, num_labels = ndi.label(mask_low)
# Check which connected components contain pixels from mask_high
sums = ndi.sum(mask_high, labels_low, np.arange(num_labels + 1))
connected_to_high = sums > 0
thresholded = connected_to_high[labels_low]
return thresholded
| 34.829435 | 127 | 0.623143 |
dd304376a04005ab5005d7fd322aa5f83eafba4e | 1,937 | py | Python | tests/functional/test_linux_appimage.py | Starbuck5/pyinstaller | 2672294982159e90ddbb6ccff135f8c42dff56bc | [
"Apache-2.0"
] | null | null | null | tests/functional/test_linux_appimage.py | Starbuck5/pyinstaller | 2672294982159e90ddbb6ccff135f8c42dff56bc | [
"Apache-2.0"
] | null | null | null | tests/functional/test_linux_appimage.py | Starbuck5/pyinstaller | 2672294982159e90ddbb6ccff135f8c42dff56bc | [
"Apache-2.0"
] | null | null | null | #-----------------------------------------------------------------------------
# Copyright (c) 2005-2022, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License (version 2
# or later) with exception for distributing the bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
#-----------------------------------------------------------------------------
"""
GNU/Linux-specific test to check the bootloader from the AppImage.
"""
import os
import pathlib
import stat
import subprocess
import pytest
@pytest.mark.linux
@pytest.mark.parametrize('arch', ['x86_64'])
def test_appimage_loading(tmp_path, pyi_builder_spec, arch):
# Skip the test if appimagetool is not found
appimagetool = pathlib.Path.home() / ('appimagetool-%s.AppImage' % arch)
if not appimagetool.is_file():
pytest.skip('%s not found' % appimagetool)
# Ensure appimagetool is executable
if not os.access(appimagetool, os.X_OK):
st = appimagetool.stat()
appimagetool.chmod(st.st_mode | stat.S_IXUSR)
app_name = 'apptest'
app_path = os.path.join(tmp_path, '%s-%s.AppImage' % (app_name, arch))
# Freeze the app
pyi_builder_spec.test_source('print("OK")', app_name=app_name, pyi_args=["--onedir"])
# Prepare the dist folder for AppImage compliancy
tools_dir = os.path.join(os.path.dirname(__file__), 'data', 'appimage')
script = os.path.join(tools_dir, 'create.sh')
subprocess.check_call(['bash', script, tools_dir, tmp_path, app_name])
# Create the AppImage
app_dir = os.path.join(tmp_path, 'dist', 'AppRun')
subprocess.check_call([appimagetool, "--no-appstream", app_dir, app_path])
# Launch the AppImage
st = os.stat(app_path)
os.chmod(app_path, st.st_mode | stat.S_IXUSR)
subprocess.check_call([app_path])
| 35.218182 | 89 | 0.653588 |
0158468b25aafe936ad636efcadd13e407fbb3db | 707 | py | Python | 07_Testbed_Programming/Code/Off.py | jetbotml/2021SummerLibrary | 71a5bdd39cde128678fd9f14348dd3ba3853ed16 | [
"MIT"
] | null | null | null | 07_Testbed_Programming/Code/Off.py | jetbotml/2021SummerLibrary | 71a5bdd39cde128678fd9f14348dd3ba3853ed16 | [
"MIT"
] | null | null | null | 07_Testbed_Programming/Code/Off.py | jetbotml/2021SummerLibrary | 71a5bdd39cde128678fd9f14348dd3ba3853ed16 | [
"MIT"
] | null | null | null | from __future__ import division
from gpiozero import Button
from gpiozero import Motor
from gpiozero import DistanceSensor
from gpiozero import LED
from gpiozero import Buzzer
from gpiozero import RGBLED
from gpiozero import LightSensor
from gpiozero import LEDBoard
from time import sleep
SleepTime = 1
leds = LEDBoard(25, 24, 9, 10, 15, 18, 17, 22, 27, 23)
FullColorled = RGBLED(red=12, green=6, blue=13)
SensorLight = LightSensor(8)
SensorDistance = DistanceSensor(echo=11, trigger=7)
Leftmotor = Motor(forward=20, backward=21)
Rightmotor = Motor(forward=19, backward=26)
buzzer = Buzzer(4)
Leftbutton = Button(16)
Rightbutton = Button(5)
leds.off()
FullColorled.color = (0, 0, 0) # off
buzzer.off()
| 26.185185 | 54 | 0.772277 |
6b9b7832d8c1472eddfb4c0ce6f8f5181dacd227 | 2,968 | py | Python | base/tests/utils/security/test_caesar_cypher.py | PeterStuck/teacher-app | e71c5b69019450a9ac8694fb461d343ce33e1b35 | [
"CC0-1.0"
] | null | null | null | base/tests/utils/security/test_caesar_cypher.py | PeterStuck/teacher-app | e71c5b69019450a9ac8694fb461d343ce33e1b35 | [
"CC0-1.0"
] | null | null | null | base/tests/utils/security/test_caesar_cypher.py | PeterStuck/teacher-app | e71c5b69019450a9ac8694fb461d343ce33e1b35 | [
"CC0-1.0"
] | null | null | null | from django.test import TestCase
from base.utils.security.caesar_cypher import CaesarCypher, ALL_UPPERCASE_LETTERS
from filler.utils.errors.argument_error import InvalidArgumentError
class TestCaesarCypher(TestCase):
def setUp(self) -> None:
self.cypher = CaesarCypher()
self.cypher.shift = 5
def test_encode(self):
""" Should encode entire word with given shift """
test_word = 'TEST'
encoded_word = self.cypher.encode(word=test_word)
self.assertEqual(encoded_word, 'YJXY')
def test_encode_character(self):
""" Should change revalidation character into shifted char """
test_char = 'A'
encoded_char = self.cypher.encode_character(letter=test_char, collection=ALL_UPPERCASE_LETTERS)
self.assertEqual(encoded_char, 'F')
def test_encode_character_none_collection(self):
""" Passed None as collection into method should result with InvalidArgumentError """
test_char = 'A'
with self.assertRaises(InvalidArgumentError):
self.cypher.encode_character(letter=test_char)
def test_decode(self):
""" Should be able to decode word when shift is known """
encoded_word = self.cypher.decode('YJXY')
self.assertEqual(encoded_word, 'TEST')
def test_decode_character(self):
""" Should decode given character using shift reversed way """
encoded_char = 'F'
self.assertEqual(self.cypher.decode_character(letter=encoded_char, collection=ALL_UPPERCASE_LETTERS), 'A')
def test_decode_character_none_collection(self):
""" If passed collection is None, InvalidArgumentError should be raised """
encoded_char = 'F'
with self.assertRaises(InvalidArgumentError):
self.cypher.decode_character(letter=encoded_char)
def test_add_salting(self):
""" Should return word with encoded salting at start """
salt = 'AAAA'
word = 'TEST'
salted_word = self.cypher.add_salting(word=word, salt=salt)
self.assertEqual(salted_word, 'FFFFTEST')
def test_remove_salting(self):
""" Should remove salting from decoded word """
encoded_word = self.cypher.encode(word='TEST')
salt = 'AAA'
salted_word = self.cypher.add_salting(word=encoded_word, salt=salt)
decoded_word = self.cypher.decode(salted_word)
self.assertEqual(self.cypher.remove_salting(decoded_word, salt), 'TEST')
def test_remove_salting_with_wrong_salt_pattern(self):
""" If given salt pattern does not match the salt from the word, appropriate exception will be thrown."""
encoded_word = self.cypher.encode(word='TEST')
salt = 'AAA'
salted_word = self.cypher.add_salting(word=encoded_word, salt=salt)
decoded_word = self.cypher.decode(salted_word)
with self.assertRaises(InvalidArgumentError):
self.cypher.remove_salting(word=decoded_word, salt='BBB') | 43.647059 | 114 | 0.693396 |
03fc6d038b941b72a65c7d99fe57377402013b54 | 215 | py | Python | swine/component/physics/collider/__init__.py | DeflatedPickle/swine | 28b40fff605f7ed9c006e032c11a5ff9c552ebac | [
"MIT"
] | 4 | 2019-07-14T13:39:43.000Z | 2021-11-08T15:57:17.000Z | swine/component/physics/collider/__init__.py | SwineEngine/swine | 28b40fff605f7ed9c006e032c11a5ff9c552ebac | [
"MIT"
] | 1 | 2018-01-09T14:33:54.000Z | 2018-01-09T14:34:34.000Z | swine/component/physics/collider/__init__.py | SwineProject/swine | 28b40fff605f7ed9c006e032c11a5ff9c552ebac | [
"MIT"
] | 1 | 2018-07-22T20:53:10.000Z | 2018-07-22T20:53:10.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from .base_collider import BaseCollider
from .polygon_collider import PolygonCollider
from .box_collider import BoxCollider
from .circle_collider import CircleCollider
| 26.875 | 45 | 0.804651 |
016bb6eb84add6173b5a99dbc3ee34664b65b2d4 | 1,724 | py | Python | utils/embeddedBot.py | Fantailed/fallback-chatbot | 596ed6566f61ee3491b7964df74efae05b64721f | [
"Apache-2.0"
] | 1 | 2018-11-29T15:35:16.000Z | 2018-11-29T15:35:16.000Z | utils/embeddedBot.py | Fantailed/fallback-chatbot | 596ed6566f61ee3491b7964df74efae05b64721f | [
"Apache-2.0"
] | null | null | null | utils/embeddedBot.py | Fantailed/fallback-chatbot | 596ed6566f61ee3491b7964df74efae05b64721f | [
"Apache-2.0"
] | null | null | null | import sys
from globals import *
from programy.clients.events.console.client import ConsoleBotClient
from programy.config.file.yaml_file import YamlConfigurationFile
from programy.config.programy import ProgramyConfiguration
from programy.clients.args import CommandLineClientArguments
sys.path.append('venv/Lib/site-packages/programy')
sys.path.append('venv/Lib/site-packages/MetOffer-1.3.2.dist-info')
class EmbeddedBot(ConsoleBotClient):
def __init__(self, config_filename):
os.chdir(BOT_PATH + 'config/windows')
sys.argv = ['embeddedBot.py', '--config', 'config.yaml',
'--cformat', 'yaml', '--logging', 'logging.yaml']
self._config_filename = config_filename
ConsoleBotClient.__init__(self, "Console")
def parse_arguments(self, argument_parser):
client_args = CommandLineClientArguments(self, parser=None)
client_args.parse_args(self)
return client_args
def load_configuration(self, arguments):
client_config = self.get_client_configuration()
self._configuration = ProgramyConfiguration(client_config)
yaml_file = YamlConfigurationFile()
yaml_file.load_from_file(self._config_filename, client_config, ".")
if __name__ == '__main__':
os.chdir(SKILL_PATH)
sys.argv = ['playground.py', '--config', 'config.yaml',
'--cformat', 'yaml', '--logging', 'logging.yaml']
bot = EmbeddedBot(sys.argv[2])
client_context = bot.create_client_context("testUser")
try:
while True:
question = input('>>>')
response = bot.process_question(client_context, question)
print(response)
except KeyboardInterrupt:
pass
| 31.925926 | 75 | 0.691415 |
2b27fab7731739d30a92569b18ade0377eb13870 | 1,734 | py | Python | pycs/info.py | sfarrens/cosmostat | a475315cda06dca346095a1e83cb6ad23979acae | [
"MIT"
] | null | null | null | pycs/info.py | sfarrens/cosmostat | a475315cda06dca346095a1e83cb6ad23979acae | [
"MIT"
] | null | null | null | pycs/info.py | sfarrens/cosmostat | a475315cda06dca346095a1e83cb6ad23979acae | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Module current version
version_major = 0
version_minor = 0
version_micro = 1
# Expected by setup.py: string of form "X.Y.Z"
__version__ = "{0}.{0}.{1}".format(version_major, version_minor, version_micro)
# Expected by setup.py: the status of the project
CLASSIFIERS = ["Development Status :: 1 - Planning",
"Environment :: Console",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering"]
# Project descriptions
description = """
PYthon CosmoStat Package
"""
SUMMARY = """
.. container:: summary-carousel
pycs is a Python package developed by **CosmoStat**
"""
long_description = (
"CosmoStat development package"
)
# Main setup parameters
NAME = "python-pycs"
ORGANISATION = "CEA"
MAINTAINER = "Sam Farrens"
MAINTAINER_EMAIL = "sam.farrens@cea.fr"
DESCRIPTION = description
LONG_DESCRIPTION = long_description
EXTRANAME = "CosmoStat webPage"
EXTRAURL = "http://cosmic.cosmostat.org/"
URL = "https://github.com/CosmoStat/cosmostat"
DOWNLOAD_URL = "https://github.com/CosmoStat/cosmostat"
LICENSE = "CeCILL-B"
CLASSIFIERS = CLASSIFIERS
AUTHOR = """
Samuel Farrens <samuel.farrens@cea.fr>
Francois Lanusse <francois.lanusse@cea.fr>
Jean-Luc Starck <jl.starck@cea.fr>
"""
AUTHOR_EMAIL = "sam.farrens@cea.fr"
PLATFORMS = "Linux,OSX"
ISRELEASE = True
VERSION = __version__
PROVIDES = ["pycs"]
CMAKE_VERSION = "3.12.0"
REQUIRES = [
"astropy>=4.0",
"healpy>=1.14",
"lenspack>=1.0.0",
"matplotlib>=3.3",
"modopt>=1.4.4",
"numpy>=1.19",
"pyqtgraph>=0.11",
"scipy>=1.5",
"seaborn>=0.10"
]
PREINSTALL_REQUIRES = [
"pybind11>=2.5",
"pyqt5>=5.12.2"
]
| 24.422535 | 79 | 0.668397 |
f8ef724448d15d0bca38c372abb6aef58a77925c | 9,655 | py | Python | demo/output_multi_image_result copy.py | ArthurWish/mmdetection | bd4c5b04e9d880f7a38131f17d3b43e4a3630c4f | [
"Apache-2.0"
] | null | null | null | demo/output_multi_image_result copy.py | ArthurWish/mmdetection | bd4c5b04e9d880f7a38131f17d3b43e4a3630c4f | [
"Apache-2.0"
] | null | null | null | demo/output_multi_image_result copy.py | ArthurWish/mmdetection | bd4c5b04e9d880f7a38131f17d3b43e4a3630c4f | [
"Apache-2.0"
] | null | null | null | # Copyright (c) OpenMMLab. All rights reserved.
import copy
import warnings
from PIL import Image
from matplotlib import patches
from mmcv.ops.nms import nms
from mmcv.ops import RoIPool
from mmcv.parallel import collate, scatter
import torch
from tqdm import tqdm
import matplotlib.pyplot as plt
import cv2
from mmdet.apis import (async_inference_detector, inference_detector,
init_detector, show_result_pyplot)
import os
import json
import numpy as np
from mmdet.core.post_processing.bbox_nms import multiclass_nms
from mmdet.datasets.pipelines.compose import Compose
from tools.analysis_tools.analyze_results import bbox_map_eval
def NMS(dets, thresh):
# x1、y1、x2、y2、以及score赋值
# (x1、y1)(x2、y2)为box的左上和右下角标
x1 = dets[:, 0]
y1 = dets[:, 1]
x2 = dets[:, 2]
y2 = dets[:, 3]
scores = dets[:, 4]
# 每一个候选框的面积
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
# order是按照score降序排序的,得到的是排序的本来的索引,不是排完序的原数组
order = scores.argsort()[::-1]
# ::-1表示逆序
temp = []
while order.size > 0:
i = order[0]
temp.append(i)
# 计算当前概率最大矩形框与其他矩形框的相交框的坐标
# 由于numpy的broadcast机制,得到的是向量
xx1 = np.maximum(x1[i], x1[order[1:]])
yy1 = np.minimum(y1[i], y1[order[1:]])
xx2 = np.minimum(x2[i], x2[order[1:]])
yy2 = np.maximum(y2[i], y2[order[1:]])
# 计算相交框的面积,注意矩形框不相交时w或h算出来会是负数,需要用0代替
w = np.maximum(0.0, xx2 - xx1 + 1)
h = np.maximum(0.0, yy2 - yy1 + 1)
inter = w * h
# 计算重叠度IoU
ovr = inter / (areas[i] + areas[order[1:]] - inter)
# 找到重叠度不高于阈值的矩形框索引
inds = np.where(ovr <= thresh)[0]
# 将order序列更新,由于前面得到的矩形框索引要比矩形框在原order序列中的索引小1,所以要把这个1加回来
order = order[inds + 1]
return dets[temp]
def replace_MultiImageToTensor(pipelines):
"""Replace the MultiImageToTensor transform in a data pipeline to
MultiDefaultFormatBundle, which is normally useful in batch inference.
Args:
pipelines (list[dict]): Data pipeline configs.
Returns:
list: The new pipeline list with all MultiImageToTensor replaced by
MultiDefaultFormatBundle.
"""
pipelines = copy.deepcopy(pipelines)
for i, pipeline in enumerate(pipelines):
if pipeline['type'] == 'MultiScaleFlipAug':
assert 'transforms' in pipeline
pipeline['transforms'] = replace_MultiImageToTensor(
pipeline['transforms'])
elif pipeline['type'] == 'MultiImageToTensor':
warnings.warn(
'"MultiImageToTensor" pipeline is replaced by '
'"MultiDefaultFormatBundle" for batch inference. It is '
'recommended to manually replace it in the test '
'data pipeline in your config file.', UserWarning)
pipelines[i] = {'type': 'MultiDefaultFormatBundle'}
return pipelines
def predict_img(model, img_path, sub_images = ()):
"""predict image with trained model
Args:
model (nn.Module): trained model with pipeline is multi input format
img_path (str): the folder where sub_images store
sub_images (tuple, optional): the sub_images to predict. Defaults to ().
Returns:
predict_bbox: the result of model predicted bboxes
"""
imgs = [os.path.join(img_path,sub_image) for sub_image in sub_images ]
cfg = model.cfg
device = next(model.parameters()).device
pipeline = replace_MultiImageToTensor(cfg.data.test.pipeline)
test_pipeline = Compose(pipeline)
data = dict(img_info=dict(filename=imgs), img_prefix=None)
data = test_pipeline(data)
data = collate([data], samples_per_gpu=1)
data['img_metas'] = [img_metas.data[0] for img_metas in data['img_metas']]
data['img'] = [[img.data[0] for img in imgs] for imgs in data['img']]
if next(model.parameters()).is_cuda:
# scatter to specified GPU
data = scatter(data, [device])[0]
with torch.no_grad():
result = model(return_loss=False, rescale=True, **data)
return result, model.show_result(img, result, bbox_color=(0, 0, 255), show=False, score_thr=0.3)
def merge_proposal(img_root, img_root_path, result_d, result_f):
result = np.concatenate((*result_d, *result_f))
det_result = NMS(result, thresh=0.3) # merge two proposal
# img = os.path.join(img_root_path, img_root, 'default.png')
bbox_list = []
label_list = []
with open(img_root_path + 'test.json') as f:
data = json.loads(f.read())
for j in range(len(data["images"])):
if data["images"][j]["file_name"] == img_root:
id = j
break
for i in range(len(data["annotations"])):
if data["images"][id]["id"] == data["annotations"][i]["image_id"]:
bbox = data["annotations"][i]["bbox"]
bbox_list.append(bbox)
label_list.append(np.array(0))
annotation = dict(
bboxes=bbox_list,
labels=label_list
)
map = bbox_map_eval([[det_result]], annotation)
print(f"map:{map}")
# def merge_proposal(img_root, img_root_path, img_suffix=(), model=()):
# assert img_suffix[0] == 'filled.png'
# # model_filled, model_default = model
# # filled, default = img_suffix
# # img_filled = os.path.join(img_root_path, img_root, filled)
# # img_default = os.path.join(img_root_path, img_root, default)
# result_list = []
# for model_i, img_suffix_i in zip(model, img_suffix):
# img = os.path.join(img_root_path, img_root, img_suffix_i)
# result = inference_detector(model_i, img)
# result_list.append(result)
# # the result shape [[bbox], scores] [x, 5]
# dets = np.concatenate((*result_list[0], *result_list[1]))
# # bboxes = np.concatenate((*result_list[0], *result_list[1]))[:, :4]
# # scores = np.concatenate((*result_list[0], *result_list[1]))[:, -1:]
# # bboxes= torch.from_numpy(bboxes).contiguous()
# # scores = torch.from_numpy(scores).contiguous()
# output = NMS(dets, 0.7)
# return output
def plot_gt_label(img_root, img_root_path, label_file_path, out_file=None, win_name='', thickness=2):
"""plot img with bbox
Args:
img ([type]): img file path
img_root_name ([type]): img root name
label_file_path ([type]): the json file
out_file ([type], optional): [description]. Defaults to None.
win_name (str, optional): [description]. Defaults to ''.
thickness (int, optional): [description]. Defaults to 1.
"""
img = os.path.join(img_root_path, img_root, 'default.png')
img = plt.imread(img)
fig, ax = plt.subplots(1, 1)
ax.imshow(img)
currentAxis = fig.gca()
bboxes = []
coords = []
with open(label_file_path) as f:
data = json.loads(f.read())
for j in range(len(data["images"])):
if data["images"][j]["file_name"] == img_root:
id = j
break
for i in range(len(data["annotations"])):
if data["images"][id]["id"] == data["annotations"][i]["image_id"]:
bbox = data["annotations"][i]["bbox"]
bboxes.append(bbox)
total_obj = len(bboxes)
bboxes = np.array(bboxes)
for i, bbox in enumerate(bboxes):
bbox_int = bbox.astype(np.int32)
coords.append([bbox_int[0], bbox_int[1], bbox_int[2], bbox_int[3]])
for _, coord in enumerate(coords):
rect = patches.Rectangle((coord[0], coord[1]), coord[2], coord[3],
linewidth=thickness, edgecolor='r', facecolor='none')
currentAxis.add_patch(rect)
plt.title(f'total_obj:{total_obj}')
plt.axis('off')
if out_file is not None:
plt.savefig(out_file, pad_inches=0.0, bbox_inches='tight')
return plt.imread(out_file)
def concat_img(img_list: list, img_root, orientation='horizontal', save_dir='demo/result'):
if not os.path.exists(save_dir):
os.makedirs(save_dir)
w1, h1, _ = img_list[0].shape
img_reshaped = []
for img in img_list:
img = cv2.resize(img, (w1, w1), interpolation=cv2.INTER_CUBIC)
img_reshaped.append(img)
if orientation == 'horizontal':
img_concat = np.concatenate((img_reshaped), axis=1)
cv2.imwrite(os.path.join(save_dir, f'{img_root}.png'), img_concat)
if __name__ == '__main__':
img_root_path = './my-dataset/test/'
with open(img_root_path + 'test.json') as f:
coco_data = json.load(f)
filled_config = './configs/my-dataset/siamnet_ga_filled.py'
filled_checkpoint = './work_dirs/siamnet_anchor_ga/img_filled/latest.pth'
default_config = './configs/my-dataset/siamnet_ga_default.py'
default_checkpoint = './work_dirs/siamnet_anchor_ga/img_default/latest.pth'
model_filled = init_detector(filled_config, filled_checkpoint, device='cuda')
model_default = init_detector(default_config, default_checkpoint, device='cuda')
for img in tqdm(coco_data["images"]):
img_root = os.path.join(img_root_path,img["file_name"])
result_f, img_pred_default = predict_img(
model_default, img_root, ('default.png',))
result_d, img_pred_filled = predict_img(
model_default, img_root, ('filled.png',))
merge_proposal(img_root, img_root_path, result_d, result_f)
img_oringin = plot_gt_label(img_root, img_root_path, label_file_path=img_root_path +
'test.json', out_file='demo/result.png')
img_oringin = cv2.cvtColor(img_oringin, cv2.COLOR_RGBA2BGR)
img_oringin = img_oringin * 255.0
concat_img([img_pred_default, img_pred_filled, img_oringin], img_root)
| 38.162055 | 101 | 0.64029 |
2ef32b9ce3273f0babb8b7875cd83e41aff84dc0 | 63,931 | py | Python | venv/lib/python3.6/site-packages/cffi/recompiler.py | aitoehigie/britecore_flask | eef1873dbe6b2cc21f770bc6dec783007ae4493b | [
"MIT"
] | null | null | null | venv/lib/python3.6/site-packages/cffi/recompiler.py | aitoehigie/britecore_flask | eef1873dbe6b2cc21f770bc6dec783007ae4493b | [
"MIT"
] | 1 | 2021-06-01T23:32:38.000Z | 2021-06-01T23:32:38.000Z | venv/lib/python3.6/site-packages/cffi/recompiler.py | aitoehigie/britecore_flask | eef1873dbe6b2cc21f770bc6dec783007ae4493b | [
"MIT"
] | null | null | null | import os, sys, io
from . import ffiplatform, model
from .error import VerificationError
from .cffi_opcode import *
VERSION_BASE = 0x2601
VERSION_EMBEDDED = 0x2701
VERSION_CHAR16CHAR32 = 0x2801
class GlobalExpr:
def __init__(self, name, address, type_op, size=0, check_value=0):
self.name = name
self.address = address
self.type_op = type_op
self.size = size
self.check_value = check_value
def as_c_expr(self):
return ' { "%s", (void *)%s, %s, (void *)%s },' % (
self.name,
self.address,
self.type_op.as_c_expr(),
self.size,
)
def as_python_expr(self):
return "b'%s%s',%d" % (
self.type_op.as_python_bytes(),
self.name,
self.check_value,
)
class FieldExpr:
def __init__(self, name, field_offset, field_size, fbitsize, field_type_op):
self.name = name
self.field_offset = field_offset
self.field_size = field_size
self.fbitsize = fbitsize
self.field_type_op = field_type_op
def as_c_expr(self):
spaces = " " * len(self.name)
return (
' { "%s", %s,\n' % (self.name, self.field_offset)
+ " %s %s,\n" % (spaces, self.field_size)
+ " %s %s }," % (spaces, self.field_type_op.as_c_expr())
)
def as_python_expr(self):
raise NotImplementedError
def as_field_python_expr(self):
if self.field_type_op.op == OP_NOOP:
size_expr = ""
elif self.field_type_op.op == OP_BITFIELD:
size_expr = format_four_bytes(self.fbitsize)
else:
raise NotImplementedError
return "b'%s%s%s'" % (
self.field_type_op.as_python_bytes(),
size_expr,
self.name,
)
class StructUnionExpr:
def __init__(
self,
name,
type_index,
flags,
size,
alignment,
comment,
first_field_index,
c_fields,
):
self.name = name
self.type_index = type_index
self.flags = flags
self.size = size
self.alignment = alignment
self.comment = comment
self.first_field_index = first_field_index
self.c_fields = c_fields
def as_c_expr(self):
return (
' { "%s", %d, %s,' % (self.name, self.type_index, self.flags)
+ "\n %s, %s, " % (self.size, self.alignment)
+ "%d, %d " % (self.first_field_index, len(self.c_fields))
+ ("/* %s */ " % self.comment if self.comment else "")
+ "},"
)
def as_python_expr(self):
flags = eval(self.flags, G_FLAGS)
fields_expr = [c_field.as_field_python_expr() for c_field in self.c_fields]
return "(b'%s%s%s',%s)" % (
format_four_bytes(self.type_index),
format_four_bytes(flags),
self.name,
",".join(fields_expr),
)
class EnumExpr:
def __init__(self, name, type_index, size, signed, allenums):
self.name = name
self.type_index = type_index
self.size = size
self.signed = signed
self.allenums = allenums
def as_c_expr(self):
return ' { "%s", %d, _cffi_prim_int(%s, %s),\n' ' "%s" },' % (
self.name,
self.type_index,
self.size,
self.signed,
self.allenums,
)
def as_python_expr(self):
prim_index = {
(1, 0): PRIM_UINT8,
(1, 1): PRIM_INT8,
(2, 0): PRIM_UINT16,
(2, 1): PRIM_INT16,
(4, 0): PRIM_UINT32,
(4, 1): PRIM_INT32,
(8, 0): PRIM_UINT64,
(8, 1): PRIM_INT64,
}[self.size, self.signed]
return "b'%s%s%s\\x00%s'" % (
format_four_bytes(self.type_index),
format_four_bytes(prim_index),
self.name,
self.allenums,
)
class TypenameExpr:
def __init__(self, name, type_index):
self.name = name
self.type_index = type_index
def as_c_expr(self):
return ' { "%s", %d },' % (self.name, self.type_index)
def as_python_expr(self):
return "b'%s%s'" % (format_four_bytes(self.type_index), self.name)
# ____________________________________________________________
class Recompiler:
_num_externpy = 0
def __init__(self, ffi, module_name, target_is_python=False):
self.ffi = ffi
self.module_name = module_name
self.target_is_python = target_is_python
self._version = VERSION_BASE
def needs_version(self, ver):
self._version = max(self._version, ver)
def collect_type_table(self):
self._typesdict = {}
self._generate("collecttype")
#
all_decls = sorted(self._typesdict, key=str)
#
# prepare all FUNCTION bytecode sequences first
self.cffi_types = []
for tp in all_decls:
if tp.is_raw_function:
assert self._typesdict[tp] is None
self._typesdict[tp] = len(self.cffi_types)
self.cffi_types.append(tp) # placeholder
for tp1 in tp.args:
assert isinstance(
tp1,
(
model.VoidType,
model.BasePrimitiveType,
model.PointerType,
model.StructOrUnionOrEnum,
model.FunctionPtrType,
),
)
if self._typesdict[tp1] is None:
self._typesdict[tp1] = len(self.cffi_types)
self.cffi_types.append(tp1) # placeholder
self.cffi_types.append("END") # placeholder
#
# prepare all OTHER bytecode sequences
for tp in all_decls:
if not tp.is_raw_function and self._typesdict[tp] is None:
self._typesdict[tp] = len(self.cffi_types)
self.cffi_types.append(tp) # placeholder
if tp.is_array_type and tp.length is not None:
self.cffi_types.append("LEN") # placeholder
assert None not in self._typesdict.values()
#
# collect all structs and unions and enums
self._struct_unions = {}
self._enums = {}
for tp in all_decls:
if isinstance(tp, model.StructOrUnion):
self._struct_unions[tp] = None
elif isinstance(tp, model.EnumType):
self._enums[tp] = None
for i, tp in enumerate(sorted(self._struct_unions, key=lambda tp: tp.name)):
self._struct_unions[tp] = i
for i, tp in enumerate(sorted(self._enums, key=lambda tp: tp.name)):
self._enums[tp] = i
#
# emit all bytecode sequences now
for tp in all_decls:
method = getattr(self, "_emit_bytecode_" + tp.__class__.__name__)
method(tp, self._typesdict[tp])
#
# consistency check
for op in self.cffi_types:
assert isinstance(op, CffiOp)
self.cffi_types = tuple(self.cffi_types) # don't change any more
def _do_collect_type(self, tp):
if not isinstance(tp, model.BaseTypeByIdentity):
if isinstance(tp, tuple):
for x in tp:
self._do_collect_type(x)
return
if tp not in self._typesdict:
self._typesdict[tp] = None
if isinstance(tp, model.FunctionPtrType):
self._do_collect_type(tp.as_raw_function())
elif isinstance(tp, model.StructOrUnion):
if tp.fldtypes is not None and (
tp not in self.ffi._parser._included_declarations
):
for name1, tp1, _, _ in tp.enumfields():
self._do_collect_type(self._field_type(tp, name1, tp1))
else:
for _, x in tp._get_items():
self._do_collect_type(x)
def _generate(self, step_name):
lst = self.ffi._parser._declarations.items()
for name, (tp, quals) in sorted(lst):
kind, realname = name.split(" ", 1)
try:
method = getattr(self, "_generate_cpy_%s_%s" % (kind, step_name))
except AttributeError:
raise VerificationError("not implemented in recompile(): %r" % name)
try:
self._current_quals = quals
method(tp, realname)
except Exception as e:
model.attach_exception_info(e, name)
raise
# ----------
ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"]
def collect_step_tables(self):
# collect the declarations for '_cffi_globals', '_cffi_typenames', etc.
self._lsts = {}
for step_name in self.ALL_STEPS:
self._lsts[step_name] = []
self._seen_struct_unions = set()
self._generate("ctx")
self._add_missing_struct_unions()
#
for step_name in self.ALL_STEPS:
lst = self._lsts[step_name]
if step_name != "field":
lst.sort(key=lambda entry: entry.name)
self._lsts[step_name] = tuple(lst) # don't change any more
#
# check for a possible internal inconsistency: _cffi_struct_unions
# should have been generated with exactly self._struct_unions
lst = self._lsts["struct_union"]
for tp, i in self._struct_unions.items():
assert i < len(lst)
assert lst[i].name == tp.name
assert len(lst) == len(self._struct_unions)
# same with enums
lst = self._lsts["enum"]
for tp, i in self._enums.items():
assert i < len(lst)
assert lst[i].name == tp.name
assert len(lst) == len(self._enums)
# ----------
def _prnt(self, what=""):
self._f.write(what + "\n")
def write_source_to_f(self, f, preamble):
if self.target_is_python:
assert preamble is None
self.write_py_source_to_f(f)
else:
assert preamble is not None
self.write_c_source_to_f(f, preamble)
def _rel_readlines(self, filename):
g = open(os.path.join(os.path.dirname(__file__), filename), "r")
lines = g.readlines()
g.close()
return lines
def write_c_source_to_f(self, f, preamble):
self._f = f
prnt = self._prnt
if self.ffi._embedding is not None:
prnt("#define _CFFI_USE_EMBEDDING")
#
# first the '#include' (actually done by inlining the file's content)
lines = self._rel_readlines("_cffi_include.h")
i = lines.index('#include "parse_c_type.h"\n')
lines[i : i + 1] = self._rel_readlines("parse_c_type.h")
prnt("".join(lines))
#
# if we have ffi._embedding != None, we give it here as a macro
# and include an extra file
base_module_name = self.module_name.split(".")[-1]
if self.ffi._embedding is not None:
prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,))
prnt("static const char _CFFI_PYTHON_STARTUP_CODE[] = {")
self._print_string_literal_in_array(self.ffi._embedding)
prnt("0 };")
prnt("#ifdef PYPY_VERSION")
prnt(
"# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s"
% (base_module_name,)
)
prnt("#elif PY_MAJOR_VERSION >= 3")
prnt("# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s" % (base_module_name,))
prnt("#else")
prnt("# define _CFFI_PYTHON_STARTUP_FUNC init%s" % (base_module_name,))
prnt("#endif")
lines = self._rel_readlines("_embedding.h")
i = lines.index('#include "_cffi_errors.h"\n')
lines[i : i + 1] = self._rel_readlines("_cffi_errors.h")
prnt("".join(lines))
self.needs_version(VERSION_EMBEDDED)
#
# then paste the C source given by the user, verbatim.
prnt("/************************************************************/")
prnt()
prnt(preamble)
prnt()
prnt("/************************************************************/")
prnt()
#
# the declaration of '_cffi_types'
prnt("static void *_cffi_types[] = {")
typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()])
for i, op in enumerate(self.cffi_types):
comment = ""
if i in typeindex2type:
comment = " // " + typeindex2type[i]._get_c_name()
prnt("/* %2d */ %s,%s" % (i, op.as_c_expr(), comment))
if not self.cffi_types:
prnt(" 0")
prnt("};")
prnt()
#
# call generate_cpy_xxx_decl(), for every xxx found from
# ffi._parser._declarations. This generates all the functions.
self._seen_constants = set()
self._generate("decl")
#
# the declaration of '_cffi_globals' and '_cffi_typenames'
nums = {}
for step_name in self.ALL_STEPS:
lst = self._lsts[step_name]
nums[step_name] = len(lst)
if nums[step_name] > 0:
prnt(
"static const struct _cffi_%s_s _cffi_%ss[] = {"
% (step_name, step_name)
)
for entry in lst:
prnt(entry.as_c_expr())
prnt("};")
prnt()
#
# the declaration of '_cffi_includes'
if self.ffi._included_ffis:
prnt("static const char * const _cffi_includes[] = {")
for ffi_to_include in self.ffi._included_ffis:
try:
included_module_name, included_source = ffi_to_include._assigned_source[
:2
]
except AttributeError:
raise VerificationError(
"ffi object %r includes %r, but the latter has not "
"been prepared with set_source()" % (self.ffi, ffi_to_include)
)
if included_source is None:
raise VerificationError(
"not implemented yet: ffi.include() of a Python-based "
"ffi inside a C-based ffi"
)
prnt(' "%s",' % (included_module_name,))
prnt(" NULL")
prnt("};")
prnt()
#
# the declaration of '_cffi_type_context'
prnt("static const struct _cffi_type_context_s _cffi_type_context = {")
prnt(" _cffi_types,")
for step_name in self.ALL_STEPS:
if nums[step_name] > 0:
prnt(" _cffi_%ss," % step_name)
else:
prnt(" NULL, /* no %ss */" % step_name)
for step_name in self.ALL_STEPS:
if step_name != "field":
prnt(" %d, /* num_%ss */" % (nums[step_name], step_name))
if self.ffi._included_ffis:
prnt(" _cffi_includes,")
else:
prnt(" NULL, /* no includes */")
prnt(" %d, /* num_types */" % (len(self.cffi_types),))
flags = 0
if self._num_externpy:
flags |= 1 # set to mean that we use extern "Python"
prnt(" %d, /* flags */" % flags)
prnt("};")
prnt()
#
# the init function
prnt("#ifdef __GNUC__")
prnt("# pragma GCC visibility push(default) /* for -fvisibility= */")
prnt("#endif")
prnt()
prnt("#ifdef PYPY_VERSION")
prnt("PyMODINIT_FUNC")
prnt("_cffi_pypyinit_%s(const void *p[])" % (base_module_name,))
prnt("{")
if self._num_externpy:
prnt(" if (((intptr_t)p[0]) >= 0x0A03) {")
prnt(
" _cffi_call_python_org = "
"(void(*)(struct _cffi_externpy_s *, char *))p[1];"
)
prnt(" }")
prnt(" p[0] = (const void *)0x%x;" % self._version)
prnt(" p[1] = &_cffi_type_context;")
prnt("#if PY_MAJOR_VERSION >= 3")
prnt(" return NULL;")
prnt("#endif")
prnt("}")
# on Windows, distutils insists on putting init_cffi_xyz in
# 'export_symbols', so instead of fighting it, just give up and
# give it one
prnt("# ifdef _MSC_VER")
prnt(" PyMODINIT_FUNC")
prnt("# if PY_MAJOR_VERSION >= 3")
prnt(" PyInit_%s(void) { return NULL; }" % (base_module_name,))
prnt("# else")
prnt(" init%s(void) { }" % (base_module_name,))
prnt("# endif")
prnt("# endif")
prnt("#elif PY_MAJOR_VERSION >= 3")
prnt("PyMODINIT_FUNC")
prnt("PyInit_%s(void)" % (base_module_name,))
prnt("{")
prnt(
' return _cffi_init("%s", 0x%x, &_cffi_type_context);'
% (self.module_name, self._version)
)
prnt("}")
prnt("#else")
prnt("PyMODINIT_FUNC")
prnt("init%s(void)" % (base_module_name,))
prnt("{")
prnt(
' _cffi_init("%s", 0x%x, &_cffi_type_context);'
% (self.module_name, self._version)
)
prnt("}")
prnt("#endif")
prnt()
prnt("#ifdef __GNUC__")
prnt("# pragma GCC visibility pop")
prnt("#endif")
self._version = None
def _to_py(self, x):
if isinstance(x, str):
return "b'%s'" % (x,)
if isinstance(x, (list, tuple)):
rep = [self._to_py(item) for item in x]
if len(rep) == 1:
rep.append("")
return "(%s)" % (",".join(rep),)
return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp.
def write_py_source_to_f(self, f):
self._f = f
prnt = self._prnt
#
# header
prnt("# auto-generated file")
prnt("import _cffi_backend")
#
# the 'import' of the included ffis
num_includes = len(self.ffi._included_ffis or ())
for i in range(num_includes):
ffi_to_include = self.ffi._included_ffis[i]
try:
included_module_name, included_source = ffi_to_include._assigned_source[
:2
]
except AttributeError:
raise VerificationError(
"ffi object %r includes %r, but the latter has not "
"been prepared with set_source()" % (self.ffi, ffi_to_include)
)
if included_source is not None:
raise VerificationError(
"not implemented yet: ffi.include() of a C-based "
"ffi inside a Python-based ffi"
)
prnt("from %s import ffi as _ffi%d" % (included_module_name, i))
prnt()
prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,))
prnt(" _version = 0x%x," % (self._version,))
self._version = None
#
# the '_types' keyword argument
self.cffi_types = tuple(self.cffi_types) # don't change any more
types_lst = [op.as_python_bytes() for op in self.cffi_types]
prnt(" _types = %s," % (self._to_py("".join(types_lst)),))
typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()])
#
# the keyword arguments from ALL_STEPS
for step_name in self.ALL_STEPS:
lst = self._lsts[step_name]
if len(lst) > 0 and step_name != "field":
prnt(" _%ss = %s," % (step_name, self._to_py(lst)))
#
# the '_includes' keyword argument
if num_includes > 0:
prnt(
" _includes = (%s,),"
% (", ".join(["_ffi%d" % i for i in range(num_includes)]),)
)
#
# the footer
prnt(")")
# ----------
def _gettypenum(self, type):
# a KeyError here is a bug. please report it! :-)
return self._typesdict[type]
def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode):
extraarg = ""
if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type():
if tp.is_integer_type() and tp.name != "_Bool":
converter = "_cffi_to_c_int"
extraarg = ", %s" % tp.name
elif isinstance(tp, model.UnknownFloatType):
# don't check with is_float_type(): it may be a 'long
# double' here, and _cffi_to_c_double would loose precision
converter = "(%s)_cffi_to_c_double" % (tp.get_c_name(""),)
else:
cname = tp.get_c_name("")
converter = "(%s)_cffi_to_c_%s" % (cname, tp.name.replace(" ", "_"))
if cname in ("char16_t", "char32_t"):
self.needs_version(VERSION_CHAR16CHAR32)
errvalue = "-1"
#
elif isinstance(tp, model.PointerType):
self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, tovar, errcode)
return
#
elif isinstance(tp, model.StructOrUnionOrEnum) or isinstance(
tp, model.BasePrimitiveType
):
# a struct (not a struct pointer) as a function argument;
# or, a complex (the same code works)
self._prnt(
" if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)"
% (tovar, self._gettypenum(tp), fromvar)
)
self._prnt(" %s;" % errcode)
return
#
elif isinstance(tp, model.FunctionPtrType):
converter = "(%s)_cffi_to_c_pointer" % tp.get_c_name("")
extraarg = ", _cffi_type(%d)" % self._gettypenum(tp)
errvalue = "NULL"
#
else:
raise NotImplementedError(tp)
#
self._prnt(" %s = %s(%s%s);" % (tovar, converter, fromvar, extraarg))
self._prnt(
" if (%s == (%s)%s && PyErr_Occurred())"
% (tovar, tp.get_c_name(""), errvalue)
)
self._prnt(" %s;" % errcode)
def _extra_local_variables(self, tp, localvars):
if isinstance(tp, model.PointerType):
localvars.add("Py_ssize_t datasize")
def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode):
self._prnt(" datasize = _cffi_prepare_pointer_call_argument(")
self._prnt(
" _cffi_type(%d), %s, (char **)&%s);"
% (self._gettypenum(tp), fromvar, tovar)
)
self._prnt(" if (datasize != 0) {")
self._prnt(" if (datasize < 0)")
self._prnt(" %s;" % errcode)
self._prnt(
" %s = (%s)alloca((size_t)datasize);" % (tovar, tp.get_c_name(""))
)
self._prnt(" memset((void *)%s, 0, (size_t)datasize);" % (tovar,))
self._prnt(
" if (_cffi_convert_array_from_object("
"(char *)%s, _cffi_type(%d), %s) < 0)"
% (tovar, self._gettypenum(tp), fromvar)
)
self._prnt(" %s;" % errcode)
self._prnt(" }")
def _convert_expr_from_c(self, tp, var, context):
if isinstance(tp, model.BasePrimitiveType):
if tp.is_integer_type() and tp.name != "_Bool":
return "_cffi_from_c_int(%s, %s)" % (var, tp.name)
elif isinstance(tp, model.UnknownFloatType):
return "_cffi_from_c_double(%s)" % (var,)
elif tp.name != "long double" and not tp.is_complex_type():
cname = tp.name.replace(" ", "_")
if cname in ("char16_t", "char32_t"):
self.needs_version(VERSION_CHAR16CHAR32)
return "_cffi_from_c_%s(%s)" % (cname, var)
else:
return "_cffi_from_c_deref((char *)&%s, _cffi_type(%d))" % (
var,
self._gettypenum(tp),
)
elif isinstance(tp, (model.PointerType, model.FunctionPtrType)):
return "_cffi_from_c_pointer((char *)%s, _cffi_type(%d))" % (
var,
self._gettypenum(tp),
)
elif isinstance(tp, model.ArrayType):
return "_cffi_from_c_pointer((char *)%s, _cffi_type(%d))" % (
var,
self._gettypenum(model.PointerType(tp.item)),
)
elif isinstance(tp, model.StructOrUnion):
if tp.fldnames is None:
raise TypeError(
"'%s' is used as %s, but is opaque" % (tp._get_c_name(), context)
)
return "_cffi_from_c_struct((char *)&%s, _cffi_type(%d))" % (
var,
self._gettypenum(tp),
)
elif isinstance(tp, model.EnumType):
return "_cffi_from_c_deref((char *)&%s, _cffi_type(%d))" % (
var,
self._gettypenum(tp),
)
else:
raise NotImplementedError(tp)
# ----------
# typedefs
def _typedef_type(self, tp, name):
return self._global_type(tp, "(*(%s *)0)" % (name,))
def _generate_cpy_typedef_collecttype(self, tp, name):
self._do_collect_type(self._typedef_type(tp, name))
def _generate_cpy_typedef_decl(self, tp, name):
pass
def _typedef_ctx(self, tp, name):
type_index = self._typesdict[tp]
self._lsts["typename"].append(TypenameExpr(name, type_index))
def _generate_cpy_typedef_ctx(self, tp, name):
tp = self._typedef_type(tp, name)
self._typedef_ctx(tp, name)
if getattr(tp, "origin", None) == "unknown_type":
self._struct_ctx(tp, tp.name, approxname=None)
elif isinstance(tp, model.NamedPointerType):
self._struct_ctx(
tp.totype, tp.totype.name, approxname=tp.name, named_ptr=tp
)
# ----------
# function declarations
def _generate_cpy_function_collecttype(self, tp, name):
self._do_collect_type(tp.as_raw_function())
if tp.ellipsis and not self.target_is_python:
self._do_collect_type(tp)
def _generate_cpy_function_decl(self, tp, name):
assert not self.target_is_python
assert isinstance(tp, model.FunctionPtrType)
if tp.ellipsis:
# cannot support vararg functions better than this: check for its
# exact type (including the fixed arguments), and build it as a
# constant function pointer (no CPython wrapper)
self._generate_cpy_constant_decl(tp, name)
return
prnt = self._prnt
numargs = len(tp.args)
if numargs == 0:
argname = "noarg"
elif numargs == 1:
argname = "arg0"
else:
argname = "args"
#
# ------------------------------
# the 'd' version of the function, only for addressof(lib, 'func')
arguments = []
call_arguments = []
context = "argument of %s" % name
for i, type in enumerate(tp.args):
arguments.append(type.get_c_name(" x%d" % i, context))
call_arguments.append("x%d" % i)
repr_arguments = ", ".join(arguments)
repr_arguments = repr_arguments or "void"
if tp.abi:
abi = tp.abi + " "
else:
abi = ""
name_and_arguments = "%s_cffi_d_%s(%s)" % (abi, name, repr_arguments)
prnt("static %s" % (tp.result.get_c_name(name_and_arguments),))
prnt("{")
call_arguments = ", ".join(call_arguments)
result_code = "return "
if isinstance(tp.result, model.VoidType):
result_code = ""
prnt(" %s%s(%s);" % (result_code, name, call_arguments))
prnt("}")
#
prnt("#ifndef PYPY_VERSION") # ------------------------------
#
prnt("static PyObject *")
prnt("_cffi_f_%s(PyObject *self, PyObject *%s)" % (name, argname))
prnt("{")
#
context = "argument of %s" % name
for i, type in enumerate(tp.args):
arg = type.get_c_name(" x%d" % i, context)
prnt(" %s;" % arg)
#
localvars = set()
for type in tp.args:
self._extra_local_variables(type, localvars)
for decl in localvars:
prnt(" %s;" % (decl,))
#
if not isinstance(tp.result, model.VoidType):
result_code = "result = "
context = "result of %s" % name
result_decl = " %s;" % tp.result.get_c_name(" result", context)
prnt(result_decl)
else:
result_decl = None
result_code = ""
#
if len(tp.args) > 1:
rng = range(len(tp.args))
for i in rng:
prnt(" PyObject *arg%d;" % i)
prnt()
prnt(
' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))'
% (name, len(rng), len(rng), ", ".join(["&arg%d" % i for i in rng]))
)
prnt(" return NULL;")
prnt()
#
for i, type in enumerate(tp.args):
self._convert_funcarg_to_c(type, "arg%d" % i, "x%d" % i, "return NULL")
prnt()
#
prnt(" Py_BEGIN_ALLOW_THREADS")
prnt(" _cffi_restore_errno();")
call_arguments = ["x%d" % i for i in range(len(tp.args))]
call_arguments = ", ".join(call_arguments)
prnt(" { %s%s(%s); }" % (result_code, name, call_arguments))
prnt(" _cffi_save_errno();")
prnt(" Py_END_ALLOW_THREADS")
prnt()
#
prnt(" (void)self; /* unused */")
if numargs == 0:
prnt(" (void)noarg; /* unused */")
if result_code:
prnt(
" return %s;"
% self._convert_expr_from_c(tp.result, "result", "result type")
)
else:
prnt(" Py_INCREF(Py_None);")
prnt(" return Py_None;")
prnt("}")
#
prnt("#else") # ------------------------------
#
# the PyPy version: need to replace struct/union arguments with
# pointers, and if the result is a struct/union, insert a first
# arg that is a pointer to the result. We also do that for
# complex args and return type.
def need_indirection(type):
return isinstance(type, model.StructOrUnion) or (
isinstance(type, model.PrimitiveType) and type.is_complex_type()
)
difference = False
arguments = []
call_arguments = []
context = "argument of %s" % name
for i, type in enumerate(tp.args):
indirection = ""
if need_indirection(type):
indirection = "*"
difference = True
arg = type.get_c_name(" %sx%d" % (indirection, i), context)
arguments.append(arg)
call_arguments.append("%sx%d" % (indirection, i))
tp_result = tp.result
if need_indirection(tp_result):
context = "result of %s" % name
arg = tp_result.get_c_name(" *result", context)
arguments.insert(0, arg)
tp_result = model.void_type
result_decl = None
result_code = "*result = "
difference = True
if difference:
repr_arguments = ", ".join(arguments)
repr_arguments = repr_arguments or "void"
name_and_arguments = "%s_cffi_f_%s(%s)" % (abi, name, repr_arguments)
prnt("static %s" % (tp_result.get_c_name(name_and_arguments),))
prnt("{")
if result_decl:
prnt(result_decl)
call_arguments = ", ".join(call_arguments)
prnt(" { %s%s(%s); }" % (result_code, name, call_arguments))
if result_decl:
prnt(" return result;")
prnt("}")
else:
prnt("# define _cffi_f_%s _cffi_d_%s" % (name, name))
#
prnt("#endif") # ------------------------------
prnt()
def _generate_cpy_function_ctx(self, tp, name):
if tp.ellipsis and not self.target_is_python:
self._generate_cpy_constant_ctx(tp, name)
return
type_index = self._typesdict[tp.as_raw_function()]
numargs = len(tp.args)
if self.target_is_python:
meth_kind = OP_DLOPEN_FUNC
elif numargs == 0:
meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS'
elif numargs == 1:
meth_kind = OP_CPYTHON_BLTN_O # 'METH_O'
else:
meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS'
self._lsts["global"].append(
GlobalExpr(
name,
"_cffi_f_%s" % name,
CffiOp(meth_kind, type_index),
size="_cffi_d_%s" % name,
)
)
# ----------
# named structs or unions
def _field_type(self, tp_struct, field_name, tp_field):
if isinstance(tp_field, model.ArrayType):
actual_length = tp_field.length
if actual_length == "...":
ptr_struct_name = tp_struct.get_c_name("*")
actual_length = "_cffi_array_len(((%s)0)->%s)" % (
ptr_struct_name,
field_name,
)
tp_item = self._field_type(tp_struct, "%s[0]" % field_name, tp_field.item)
tp_field = model.ArrayType(tp_item, actual_length)
return tp_field
def _struct_collecttype(self, tp):
self._do_collect_type(tp)
if self.target_is_python:
# also requires nested anon struct/unions in ABI mode, recursively
for fldtype in tp.anonymous_struct_fields():
self._struct_collecttype(fldtype)
def _struct_decl(self, tp, cname, approxname):
if tp.fldtypes is None:
return
prnt = self._prnt
checkfuncname = "_cffi_checkfld_%s" % (approxname,)
prnt("_CFFI_UNUSED_FN")
prnt("static void %s(%s *p)" % (checkfuncname, cname))
prnt("{")
prnt(" /* only to generate compile-time warnings or errors */")
prnt(" (void)p;")
for fname, ftype, fbitsize, fqual in tp.enumfields():
try:
if ftype.is_integer_type() or fbitsize >= 0:
# accept all integers, but complain on float or double
prnt(
" (void)((p->%s) | 0); /* check that '%s.%s' is "
"an integer */" % (fname, cname, fname)
)
continue
# only accept exactly the type declared, except that '[]'
# is interpreted as a '*' and so will match any array length.
# (It would also match '*', but that's harder to detect...)
while isinstance(ftype, model.ArrayType) and (
ftype.length is None or ftype.length == "..."
):
ftype = ftype.item
fname = fname + "[0]"
prnt(
" { %s = &p->%s; (void)tmp; }"
% (ftype.get_c_name("*tmp", "field %r" % fname, quals=fqual), fname)
)
except VerificationError as e:
prnt(" /* %s */" % str(e)) # cannot verify it, ignore
prnt("}")
prnt("struct _cffi_align_%s { char x; %s y; };" % (approxname, cname))
prnt()
def _struct_ctx(self, tp, cname, approxname, named_ptr=None):
type_index = self._typesdict[tp]
reason_for_not_expanding = None
flags = []
if isinstance(tp, model.UnionType):
flags.append("_CFFI_F_UNION")
if tp.fldtypes is None:
flags.append("_CFFI_F_OPAQUE")
reason_for_not_expanding = "opaque"
if tp not in self.ffi._parser._included_declarations and (
named_ptr is None
or named_ptr not in self.ffi._parser._included_declarations
):
if tp.fldtypes is None:
pass # opaque
elif tp.partial or any(tp.anonymous_struct_fields()):
pass # field layout obtained silently from the C compiler
else:
flags.append("_CFFI_F_CHECK_FIELDS")
if tp.packed:
flags.append("_CFFI_F_PACKED")
else:
flags.append("_CFFI_F_EXTERNAL")
reason_for_not_expanding = "external"
flags = "|".join(flags) or "0"
c_fields = []
if reason_for_not_expanding is None:
expand_anonymous_struct_union = not self.target_is_python
enumfields = list(tp.enumfields(expand_anonymous_struct_union))
for fldname, fldtype, fbitsize, fqual in enumfields:
fldtype = self._field_type(tp, fldname, fldtype)
self._check_not_opaque(fldtype, "field '%s.%s'" % (tp.name, fldname))
# cname is None for _add_missing_struct_unions() only
op = OP_NOOP
if fbitsize >= 0:
op = OP_BITFIELD
size = "%d /* bits */" % fbitsize
elif cname is None or (
isinstance(fldtype, model.ArrayType) and fldtype.length is None
):
size = "(size_t)-1"
else:
size = "sizeof(((%s)0)->%s)" % (
tp.get_c_name("*") if named_ptr is None else named_ptr.name,
fldname,
)
if cname is None or fbitsize >= 0:
offset = "(size_t)-1"
elif named_ptr is not None:
offset = "((char *)&((%s)0)->%s) - (char *)0" % (
named_ptr.name,
fldname,
)
else:
offset = "offsetof(%s, %s)" % (tp.get_c_name(""), fldname)
c_fields.append(
FieldExpr(
fldname,
offset,
size,
fbitsize,
CffiOp(op, self._typesdict[fldtype]),
)
)
first_field_index = len(self._lsts["field"])
self._lsts["field"].extend(c_fields)
#
if cname is None: # unknown name, for _add_missing_struct_unions
size = "(size_t)-2"
align = -2
comment = "unnamed"
else:
if named_ptr is not None:
size = "sizeof(*(%s)0)" % (named_ptr.name,)
align = "-1 /* unknown alignment */"
else:
size = "sizeof(%s)" % (cname,)
align = "offsetof(struct _cffi_align_%s, y)" % (approxname,)
comment = None
else:
size = "(size_t)-1"
align = -1
first_field_index = -1
comment = reason_for_not_expanding
self._lsts["struct_union"].append(
StructUnionExpr(
tp.name,
type_index,
flags,
size,
align,
comment,
first_field_index,
c_fields,
)
)
self._seen_struct_unions.add(tp)
def _check_not_opaque(self, tp, location):
while isinstance(tp, model.ArrayType):
tp = tp.item
if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None:
raise TypeError(
"%s is of an opaque type (not declared in cdef())" % location
)
def _add_missing_struct_unions(self):
# not very nice, but some struct declarations might be missing
# because they don't have any known C name. Check that they are
# not partial (we can't complete or verify them!) and emit them
# anonymously.
lst = list(self._struct_unions.items())
lst.sort(key=lambda tp_order: tp_order[1])
for tp, order in lst:
if tp not in self._seen_struct_unions:
if tp.partial:
raise NotImplementedError(
"internal inconsistency: %r is "
"partial but was not seen at "
"this point" % (tp,)
)
if tp.name.startswith("$") and tp.name[1:].isdigit():
approxname = tp.name[1:]
elif tp.name == "_IO_FILE" and tp.forcename == "FILE":
approxname = "FILE"
self._typedef_ctx(tp, "FILE")
else:
raise NotImplementedError("internal inconsistency: %r" % (tp,))
self._struct_ctx(tp, None, approxname)
def _generate_cpy_struct_collecttype(self, tp, name):
self._struct_collecttype(tp)
_generate_cpy_union_collecttype = _generate_cpy_struct_collecttype
def _struct_names(self, tp):
cname = tp.get_c_name("")
if " " in cname:
return cname, cname.replace(" ", "_")
else:
return cname, "_" + cname
def _generate_cpy_struct_decl(self, tp, name):
self._struct_decl(tp, *self._struct_names(tp))
_generate_cpy_union_decl = _generate_cpy_struct_decl
def _generate_cpy_struct_ctx(self, tp, name):
self._struct_ctx(tp, *self._struct_names(tp))
_generate_cpy_union_ctx = _generate_cpy_struct_ctx
# ----------
# 'anonymous' declarations. These are produced for anonymous structs
# or unions; the 'name' is obtained by a typedef.
def _generate_cpy_anonymous_collecttype(self, tp, name):
if isinstance(tp, model.EnumType):
self._generate_cpy_enum_collecttype(tp, name)
else:
self._struct_collecttype(tp)
def _generate_cpy_anonymous_decl(self, tp, name):
if isinstance(tp, model.EnumType):
self._generate_cpy_enum_decl(tp)
else:
self._struct_decl(tp, name, "typedef_" + name)
def _generate_cpy_anonymous_ctx(self, tp, name):
if isinstance(tp, model.EnumType):
self._enum_ctx(tp, name)
else:
self._struct_ctx(tp, name, "typedef_" + name)
# ----------
# constants, declared with "static const ..."
def _generate_cpy_const(
self, is_int, name, tp=None, category="const", check_value=None
):
if (category, name) in self._seen_constants:
raise VerificationError(
"duplicate declaration of %s '%s'" % (category, name)
)
self._seen_constants.add((category, name))
#
prnt = self._prnt
funcname = "_cffi_%s_%s" % (category, name)
if is_int:
prnt("static int %s(unsigned long long *o)" % funcname)
prnt("{")
prnt(" int n = (%s) <= 0;" % (name,))
prnt(
" *o = (unsigned long long)((%s) | 0);"
" /* check that %s is an integer */" % (name, name)
)
if check_value is not None:
if check_value > 0:
check_value = "%dU" % (check_value,)
prnt(" if (!_cffi_check_int(*o, n, %s))" % (check_value,))
prnt(" n |= 2;")
prnt(" return n;")
prnt("}")
else:
assert check_value is None
prnt("static void %s(char *o)" % funcname)
prnt("{")
prnt(" *(%s)o = %s;" % (tp.get_c_name("*"), name))
prnt("}")
prnt()
def _generate_cpy_constant_collecttype(self, tp, name):
is_int = tp.is_integer_type()
if not is_int or self.target_is_python:
self._do_collect_type(tp)
def _generate_cpy_constant_decl(self, tp, name):
is_int = tp.is_integer_type()
self._generate_cpy_const(is_int, name, tp)
def _generate_cpy_constant_ctx(self, tp, name):
if not self.target_is_python and tp.is_integer_type():
type_op = CffiOp(OP_CONSTANT_INT, -1)
else:
if self.target_is_python:
const_kind = OP_DLOPEN_CONST
else:
const_kind = OP_CONSTANT
type_index = self._typesdict[tp]
type_op = CffiOp(const_kind, type_index)
self._lsts["global"].append(GlobalExpr(name, "_cffi_const_%s" % name, type_op))
# ----------
# enums
def _generate_cpy_enum_collecttype(self, tp, name):
self._do_collect_type(tp)
def _generate_cpy_enum_decl(self, tp, name=None):
for enumerator in tp.enumerators:
self._generate_cpy_const(True, enumerator)
def _enum_ctx(self, tp, cname):
type_index = self._typesdict[tp]
type_op = CffiOp(OP_ENUM, -1)
if self.target_is_python:
tp.check_not_partial()
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
self._lsts["global"].append(
GlobalExpr(
enumerator,
"_cffi_const_%s" % enumerator,
type_op,
check_value=enumvalue,
)
)
#
if cname is not None and "$" not in cname and not self.target_is_python:
size = "sizeof(%s)" % cname
signed = "((%s)-1) <= 0" % cname
else:
basetp = tp.build_baseinttype(self.ffi, [])
size = self.ffi.sizeof(basetp)
signed = int(int(self.ffi.cast(basetp, -1)) < 0)
allenums = ",".join(tp.enumerators)
self._lsts["enum"].append(EnumExpr(tp.name, type_index, size, signed, allenums))
def _generate_cpy_enum_ctx(self, tp, name):
self._enum_ctx(tp, tp._get_c_name())
# ----------
# macros: for now only for integers
def _generate_cpy_macro_collecttype(self, tp, name):
pass
def _generate_cpy_macro_decl(self, tp, name):
if tp == "...":
check_value = None
else:
check_value = tp # an integer
self._generate_cpy_const(True, name, check_value=check_value)
def _generate_cpy_macro_ctx(self, tp, name):
if tp == "...":
if self.target_is_python:
raise VerificationError(
"cannot use the syntax '...' in '#define %s ...' when "
"using the ABI mode" % (name,)
)
check_value = None
else:
check_value = tp # an integer
type_op = CffiOp(OP_CONSTANT_INT, -1)
self._lsts["global"].append(
GlobalExpr(name, "_cffi_const_%s" % name, type_op, check_value=check_value)
)
# ----------
# global variables
def _global_type(self, tp, global_name):
if isinstance(tp, model.ArrayType):
actual_length = tp.length
if actual_length == "...":
actual_length = "_cffi_array_len(%s)" % (global_name,)
tp_item = self._global_type(tp.item, "%s[0]" % global_name)
tp = model.ArrayType(tp_item, actual_length)
return tp
def _generate_cpy_variable_collecttype(self, tp, name):
self._do_collect_type(self._global_type(tp, name))
def _generate_cpy_variable_decl(self, tp, name):
prnt = self._prnt
tp = self._global_type(tp, name)
if isinstance(tp, model.ArrayType) and tp.length is None:
tp = tp.item
ampersand = ""
else:
ampersand = "&"
# This code assumes that casts from "tp *" to "void *" is a
# no-op, i.e. a function that returns a "tp *" can be called
# as if it returned a "void *". This should be generally true
# on any modern machine. The only exception to that rule (on
# uncommon architectures, and as far as I can tell) might be
# if 'tp' were a function type, but that is not possible here.
# (If 'tp' is a function _pointer_ type, then casts from "fn_t
# **" to "void *" are again no-ops, as far as I can tell.)
decl = "*_cffi_var_%s(void)" % (name,)
prnt("static " + tp.get_c_name(decl, quals=self._current_quals))
prnt("{")
prnt(" return %s(%s);" % (ampersand, name))
prnt("}")
prnt()
def _generate_cpy_variable_ctx(self, tp, name):
tp = self._global_type(tp, name)
type_index = self._typesdict[tp]
if self.target_is_python:
op = OP_GLOBAL_VAR
else:
op = OP_GLOBAL_VAR_F
self._lsts["global"].append(
GlobalExpr(name, "_cffi_var_%s" % name, CffiOp(op, type_index))
)
# ----------
# extern "Python"
def _generate_cpy_extern_python_collecttype(self, tp, name):
assert isinstance(tp, model.FunctionPtrType)
self._do_collect_type(tp)
_generate_cpy_dllexport_python_collecttype = (
_generate_cpy_extern_python_plus_c_collecttype
) = _generate_cpy_extern_python_collecttype
def _extern_python_decl(self, tp, name, tag_and_space):
prnt = self._prnt
if isinstance(tp.result, model.VoidType):
size_of_result = "0"
else:
context = "result of %s" % name
size_of_result = "(int)sizeof(%s)" % (tp.result.get_c_name("", context),)
prnt("static struct _cffi_externpy_s _cffi_externpy__%s =" % name)
prnt(' { "%s.%s", %s };' % (self.module_name, name, size_of_result))
prnt()
#
arguments = []
context = "argument of %s" % name
for i, type in enumerate(tp.args):
arg = type.get_c_name(" a%d" % i, context)
arguments.append(arg)
#
repr_arguments = ", ".join(arguments)
repr_arguments = repr_arguments or "void"
name_and_arguments = "%s(%s)" % (name, repr_arguments)
if tp.abi == "__stdcall":
name_and_arguments = "_cffi_stdcall " + name_and_arguments
#
def may_need_128_bits(tp):
return isinstance(tp, model.PrimitiveType) and tp.name == "long double"
#
size_of_a = max(len(tp.args) * 8, 8)
if may_need_128_bits(tp.result):
size_of_a = max(size_of_a, 16)
if isinstance(tp.result, model.StructOrUnion):
size_of_a = "sizeof(%s) > %d ? sizeof(%s) : %d" % (
tp.result.get_c_name(""),
size_of_a,
tp.result.get_c_name(""),
size_of_a,
)
prnt("%s%s" % (tag_and_space, tp.result.get_c_name(name_and_arguments)))
prnt("{")
prnt(" char a[%s];" % size_of_a)
prnt(" char *p = a;")
for i, type in enumerate(tp.args):
arg = "a%d" % i
if isinstance(type, model.StructOrUnion) or may_need_128_bits(type):
arg = "&" + arg
type = model.PointerType(type)
prnt(" *(%s)(p + %d) = %s;" % (type.get_c_name("*"), i * 8, arg))
prnt(" _cffi_call_python(&_cffi_externpy__%s, p);" % name)
if not isinstance(tp.result, model.VoidType):
prnt(" return *(%s)p;" % (tp.result.get_c_name("*"),))
prnt("}")
prnt()
self._num_externpy += 1
def _generate_cpy_extern_python_decl(self, tp, name):
self._extern_python_decl(tp, name, "static ")
def _generate_cpy_dllexport_python_decl(self, tp, name):
self._extern_python_decl(tp, name, "CFFI_DLLEXPORT ")
def _generate_cpy_extern_python_plus_c_decl(self, tp, name):
self._extern_python_decl(tp, name, "")
def _generate_cpy_extern_python_ctx(self, tp, name):
if self.target_is_python:
raise VerificationError("cannot use 'extern \"Python\"' in the ABI mode")
if tp.ellipsis:
raise NotImplementedError('a vararg function is extern "Python"')
type_index = self._typesdict[tp]
type_op = CffiOp(OP_EXTERN_PYTHON, type_index)
self._lsts["global"].append(
GlobalExpr(name, "&_cffi_externpy__%s" % name, type_op, name)
)
_generate_cpy_dllexport_python_ctx = (
_generate_cpy_extern_python_plus_c_ctx
) = _generate_cpy_extern_python_ctx
def _print_string_literal_in_array(self, s):
prnt = self._prnt
prnt("// # NB. this is not a string because of a size limit in MSVC")
for line in s.splitlines(True):
prnt(("// " + line).rstrip())
printed_line = ""
for c in line:
if len(printed_line) >= 76:
prnt(printed_line)
printed_line = ""
printed_line += "%d," % (ord(c),)
prnt(printed_line)
# ----------
# emitting the opcodes for individual types
def _emit_bytecode_VoidType(self, tp, index):
self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID)
def _emit_bytecode_PrimitiveType(self, tp, index):
prim_index = PRIMITIVE_TO_INDEX[tp.name]
self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index)
def _emit_bytecode_UnknownIntegerType(self, tp, index):
s = (
"_cffi_prim_int(sizeof(%s), (\n"
" ((%s)-1) | 0 /* check that %s is an integer type */\n"
" ) <= 0)" % (tp.name, tp.name, tp.name)
)
self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s)
def _emit_bytecode_UnknownFloatType(self, tp, index):
s = (
"_cffi_prim_float(sizeof(%s) *\n"
" (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n"
" )" % (tp.name, tp.name)
)
self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s)
def _emit_bytecode_RawFunctionType(self, tp, index):
self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result])
index += 1
for tp1 in tp.args:
realindex = self._typesdict[tp1]
if index != realindex:
if isinstance(tp1, model.PrimitiveType):
self._emit_bytecode_PrimitiveType(tp1, index)
else:
self.cffi_types[index] = CffiOp(OP_NOOP, realindex)
index += 1
flags = int(tp.ellipsis)
if tp.abi is not None:
if tp.abi == "__stdcall":
flags |= 2
else:
raise NotImplementedError("abi=%r" % (tp.abi,))
self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags)
def _emit_bytecode_PointerType(self, tp, index):
self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype])
_emit_bytecode_ConstPointerType = _emit_bytecode_PointerType
_emit_bytecode_NamedPointerType = _emit_bytecode_PointerType
def _emit_bytecode_FunctionPtrType(self, tp, index):
raw = tp.as_raw_function()
self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw])
def _emit_bytecode_ArrayType(self, tp, index):
item_index = self._typesdict[tp.item]
if tp.length is None:
self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index)
elif tp.length == "...":
raise VerificationError(
"type %s badly placed: the '...' array length can only be "
"used on global arrays or on fields of structures"
% (str(tp).replace("/*...*/", "..."),)
)
else:
assert self.cffi_types[index + 1] == "LEN"
self.cffi_types[index] = CffiOp(OP_ARRAY, item_index)
self.cffi_types[index + 1] = CffiOp(None, str(tp.length))
def _emit_bytecode_StructType(self, tp, index):
struct_index = self._struct_unions[tp]
self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index)
_emit_bytecode_UnionType = _emit_bytecode_StructType
def _emit_bytecode_EnumType(self, tp, index):
enum_index = self._enums[tp]
self.cffi_types[index] = CffiOp(OP_ENUM, enum_index)
if sys.version_info >= (3,):
NativeIO = io.StringIO
else:
class NativeIO(io.BytesIO):
def write(self, s):
if isinstance(s, unicode):
s = s.encode("ascii")
super(NativeIO, self).write(s)
def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose):
if verbose:
print("generating %s" % (target_file,))
recompiler = Recompiler(ffi, module_name, target_is_python=(preamble is None))
recompiler.collect_type_table()
recompiler.collect_step_tables()
f = NativeIO()
recompiler.write_source_to_f(f, preamble)
output = f.getvalue()
try:
with open(target_file, "r") as f1:
if f1.read(len(output) + 1) != output:
raise IOError
if verbose:
print("(already up-to-date)")
return False # already up-to-date
except IOError:
tmp_file = "%s.~%d" % (target_file, os.getpid())
with open(tmp_file, "w") as f1:
f1.write(output)
try:
os.rename(tmp_file, target_file)
except OSError:
os.unlink(target_file)
os.rename(tmp_file, target_file)
return True
def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False):
assert preamble is not None
return _make_c_or_py_source(ffi, module_name, preamble, target_c_file, verbose)
def make_py_source(ffi, module_name, target_py_file, verbose=False):
return _make_c_or_py_source(ffi, module_name, None, target_py_file, verbose)
def _modname_to_file(outputdir, modname, extension):
parts = modname.split(".")
try:
os.makedirs(os.path.join(outputdir, *parts[:-1]))
except OSError:
pass
parts[-1] += extension
return os.path.join(outputdir, *parts), parts
# Aaargh. Distutils is not tested at all for the purpose of compiling
# DLLs that are not extension modules. Here are some hacks to work
# around that, in the _patch_for_*() functions...
def _patch_meth(patchlist, cls, name, new_meth):
old = getattr(cls, name)
patchlist.append((cls, name, old))
setattr(cls, name, new_meth)
return old
def _unpatch_meths(patchlist):
for cls, name, old_meth in reversed(patchlist):
setattr(cls, name, old_meth)
def _patch_for_embedding(patchlist):
if sys.platform == "win32":
# we must not remove the manifest when building for embedding!
from distutils.msvc9compiler import MSVCCompiler
_patch_meth(
patchlist,
MSVCCompiler,
"_remove_visual_c_ref",
lambda self, manifest_file: manifest_file,
)
if sys.platform == "darwin":
# we must not make a '-bundle', but a '-dynamiclib' instead
from distutils.ccompiler import CCompiler
def my_link_shared_object(self, *args, **kwds):
if "-bundle" in self.linker_so:
self.linker_so = list(self.linker_so)
i = self.linker_so.index("-bundle")
self.linker_so[i] = "-dynamiclib"
return old_link_shared_object(self, *args, **kwds)
old_link_shared_object = _patch_meth(
patchlist, CCompiler, "link_shared_object", my_link_shared_object
)
def _patch_for_target(patchlist, target):
from distutils.command.build_ext import build_ext
# if 'target' is different from '*', we need to patch some internal
# method to just return this 'target' value, instead of having it
# built from module_name
if target.endswith(".*"):
target = target[:-2]
if sys.platform == "win32":
target += ".dll"
elif sys.platform == "darwin":
target += ".dylib"
else:
target += ".so"
_patch_meth(patchlist, build_ext, "get_ext_filename", lambda self, ext_name: target)
def recompile(
ffi,
module_name,
preamble,
tmpdir=".",
call_c_compiler=True,
c_file=None,
source_extension=".c",
extradir=None,
compiler_verbose=1,
target=None,
debug=None,
**kwds
):
if not isinstance(module_name, str):
module_name = module_name.encode("ascii")
if ffi._windows_unicode:
ffi._apply_windows_unicode(kwds)
if preamble is not None:
embedding = ffi._embedding is not None
if embedding:
ffi._apply_embedding_fix(kwds)
if c_file is None:
c_file, parts = _modname_to_file(tmpdir, module_name, source_extension)
if extradir:
parts = [extradir] + parts
ext_c_file = os.path.join(*parts)
else:
ext_c_file = c_file
#
if target is None:
if embedding:
target = "%s.*" % module_name
else:
target = "*"
#
ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds)
updated = make_c_source(
ffi, module_name, preamble, c_file, verbose=compiler_verbose
)
if call_c_compiler:
patchlist = []
cwd = os.getcwd()
try:
if embedding:
_patch_for_embedding(patchlist)
if target != "*":
_patch_for_target(patchlist, target)
if compiler_verbose:
if tmpdir == ".":
msg = "the current directory is"
else:
msg = "setting the current directory to"
print("%s %r" % (msg, os.path.abspath(tmpdir)))
os.chdir(tmpdir)
outputfilename = ffiplatform.compile(".", ext, compiler_verbose, debug)
finally:
os.chdir(cwd)
_unpatch_meths(patchlist)
return outputfilename
else:
return ext, updated
else:
if c_file is None:
c_file, _ = _modname_to_file(tmpdir, module_name, ".py")
updated = make_py_source(ffi, module_name, c_file, verbose=compiler_verbose)
if call_c_compiler:
return c_file
else:
return None, updated
def _verify(ffi, module_name, preamble, *args, **kwds):
# FOR TESTS ONLY
from testing.udir import udir
import imp
assert module_name not in sys.modules, "module name conflict: %r" % (module_name,)
kwds.setdefault("tmpdir", str(udir))
outputfilename = recompile(ffi, module_name, preamble, *args, **kwds)
module = imp.load_dynamic(module_name, outputfilename)
#
# hack hack hack: copy all *bound methods* from module.ffi back to the
# ffi instance. Then calls like ffi.new() will invoke module.ffi.new().
for name in dir(module.ffi):
if not name.startswith("_"):
attr = getattr(module.ffi, name)
if attr is not getattr(ffi, name, object()):
setattr(ffi, name, attr)
def typeof_disabled(*args, **kwds):
raise NotImplementedError
ffi._typeof = typeof_disabled
for name in dir(ffi):
if not name.startswith("_") and not hasattr(module.ffi, name):
setattr(ffi, name, NotImplemented)
return module.lib
| 37.408426 | 92 | 0.537877 |
84ebc98ba4b40d2e179aa1d69eba9bb0f3f06e67 | 2,462 | py | Python | var/spack/repos/builtin/packages/r-phyloseq/package.py | xiki-tempula/spack | 9d66c05e93ab8a933fc59915040c0e0c86a4aac4 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 9 | 2018-04-18T07:51:40.000Z | 2021-09-10T03:56:57.000Z | var/spack/repos/builtin/packages/r-phyloseq/package.py | xiki-tempula/spack | 9d66c05e93ab8a933fc59915040c0e0c86a4aac4 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 907 | 2018-04-18T11:17:57.000Z | 2022-03-31T13:20:25.000Z | var/spack/repos/builtin/packages/r-phyloseq/package.py | xiki-tempula/spack | 9d66c05e93ab8a933fc59915040c0e0c86a4aac4 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 29 | 2018-11-05T16:14:23.000Z | 2022-02-03T16:07:09.000Z | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RPhyloseq(RPackage):
"""Handling and analysis of high-throughput microbiome census data.
phyloseq provides a set of classes and tools to facilitate the import,
storage, analysis, and graphical display of microbiome census data."""
homepage = "https://bioconductor.org/packages/phyloseq"
git = "https://git.bioconductor.org/packages/phyloseq.git"
version('1.28.0', commit='a86ed1e0a650fdf80bee5a0a5a82aaa5a276178d')
version('1.26.1', commit='a084072bc9e057b90adfbd59e27db2a1ecee151c')
version('1.24.2', commit='829992f88c79de48bb8749678624e2bbd3b66645')
version('1.22.3', commit='c695323f2963636d16acda9f05a583bd58e31344')
version('1.20.0', commit='107d1d5e3437a6e33982c06a548d3cc91df2a7e0')
depends_on('r@3.3.0:', type=('build', 'run'))
depends_on('r-biocgenerics@0.18.0:', type=('build', 'run'))
depends_on('r-ade4@1.7.4:', type=('build', 'run'))
depends_on('r-ape@3.4:', type=('build', 'run'))
depends_on('r-biomformat@1.0.0:', type=('build', 'run'))
depends_on('r-biostrings@2.40.0:', type=('build', 'run'))
depends_on('r-cluster@2.0.4:', type=('build', 'run'))
depends_on('r-data-table@1.9.6:', type=('build', 'run'))
depends_on('r-foreach@1.4.3:', type=('build', 'run'))
depends_on('r-ggplot2@2.1.0:', type=('build', 'run'))
depends_on('r-igraph@1.0.1:', type=('build', 'run'))
depends_on('r-multtest@2.28.0:', type=('build', 'run'))
depends_on('r-plyr@1.8.3:', type=('build', 'run'))
depends_on('r-reshape2@1.4.1:', type=('build', 'run'))
depends_on('r-scales@0.4.0:', type=('build', 'run'))
depends_on('r-vegan@2.3.5:', type=('build', 'run'))
depends_on('r-biobase', type=('build', 'run'))
depends_on('r@3.4.0:', when='@1.22.3:', type=('build', 'run'))
depends_on('r-ape@5.0:', when='@1.22.3:', type=('build', 'run'))
depends_on('r-biobase@2.36.2:', when='@1.22.3:', type=('build', 'run'))
depends_on('r-biocgenerics@0.22.0:', when='@1.22.3:', type=('build', 'run'))
depends_on('r-data-table@1.10.4:', when='@1.22.3:', type=('build', 'run'))
depends_on('r-vegan@2.4:', when='@1.22.3:', type=('build', 'run'))
depends_on('r-vegan@2.5:', when='@1.24.2:', type=('build', 'run'))
| 49.24 | 80 | 0.636474 |
2316de5fe3f67c16d0c74faeca2e76e7be8c4df2 | 683 | py | Python | lang-support/python3/mapper-suffix.py | benguillet/wmr-backend | bc8c1998d58877416358adbfdcd0d7da4e8c369e | [
"Apache-2.0"
] | 1 | 2017-06-02T19:31:08.000Z | 2017-06-02T19:31:08.000Z | lang-support/python3/mapper-suffix.py | benguillet/wmr-backend | bc8c1998d58877416358adbfdcd0d7da4e8c369e | [
"Apache-2.0"
] | null | null | null | lang-support/python3/mapper-suffix.py | benguillet/wmr-backend | bc8c1998d58877416358adbfdcd0d7da4e8c369e | [
"Apache-2.0"
] | null | null | null | # Copyright 2010 WebMapReduce Developers
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from wmr import common
for pair in map(common.parse_input, sys.stdin):
mapper(*pair)
| 35.947368 | 74 | 0.770132 |
6715d4b8adc20b9a8b7ff430677cf33c710ae6c0 | 6,108 | py | Python | lib/btstack/3rd-party/micro-ecc/scripts/mult_arm.py | thiamchi/micropython_custom | b2d3ac286268502029a2215adb710c15f21ccae0 | [
"MIT"
] | 7 | 2020-10-02T13:42:54.000Z | 2021-04-01T01:33:42.000Z | lib/btstack/3rd-party/micro-ecc/scripts/mult_arm.py | thiamchi/micropython_custom | b2d3ac286268502029a2215adb710c15f21ccae0 | [
"MIT"
] | 1 | 2021-06-19T19:03:10.000Z | 2021-06-19T19:29:10.000Z | lib/btstack/3rd-party/micro-ecc/scripts/mult_arm.py | thiamchi/micropython_custom | b2d3ac286268502029a2215adb710c15f21ccae0 | [
"MIT"
] | 7 | 2021-01-17T03:35:04.000Z | 2022-03-20T06:02:33.000Z | #!/usr/bin/env python
import sys
if len(sys.argv) < 2:
print "Provide the integer size in 32-bit words"
sys.exit(1)
size = int(sys.argv[1])
full_rows = size // 3
init_size = size % 3
if init_size == 0:
full_rows = full_rows - 1
init_size = 3
def emit(line, *args):
s = '"' + line + r' \n\t"'
print s % args
rx = [3, 4, 5]
ry = [6, 7, 8]
#### set up registers
emit("add r0, %s", (size - init_size) * 4) # move z
emit("add r2, %s", (size - init_size) * 4) # move y
emit("ldmia r1!, {%s}", ", ".join(["r%s" % (rx[i]) for i in xrange(init_size)]))
emit("ldmia r2!, {%s}", ", ".join(["r%s" % (ry[i]) for i in xrange(init_size)]))
print ""
if init_size == 1:
emit("umull r9, r10, r3, r6")
emit("stmia r0!, {r9, r10}")
else:
#### first two multiplications of initial block
emit("umull r11, r12, r3, r6")
emit("stmia r0!, {r11}")
print ""
emit("mov r10, #0")
emit("umull r11, r9, r3, r7")
emit("adds r12, r11")
emit("adc r9, #0")
emit("umull r11, r14, r4, r6")
emit("adds r12, r11")
emit("adcs r9, r14")
emit("adc r10, #0")
emit("stmia r0!, {r12}")
print ""
#### rest of initial block, with moving accumulator registers
acc = [9, 10, 11, 12, 14]
if init_size == 3:
emit("mov r%s, #0", acc[2])
for i in xrange(0, 3):
emit("umull r%s, r%s, r%s, r%s", acc[3], acc[4], rx[i], ry[2 - i])
emit("adds r%s, r%s", acc[0], acc[3])
emit("adcs r%s, r%s", acc[1], acc[4])
emit("adc r%s, #0", acc[2])
emit("stmia r0!, {r%s}", acc[0])
print ""
acc = acc[1:] + acc[:1]
emit("mov r%s, #0", acc[2])
for i in xrange(0, 2):
emit("umull r%s, r%s, r%s, r%s", acc[3], acc[4], rx[i + 1], ry[2 - i])
emit("adds r%s, r%s", acc[0], acc[3])
emit("adcs r%s, r%s", acc[1], acc[4])
emit("adc r%s, #0", acc[2])
emit("stmia r0!, {r%s}", acc[0])
print ""
acc = acc[1:] + acc[:1]
emit("umull r%s, r%s, r%s, r%s", acc[3], acc[4], rx[init_size-1], ry[init_size-1])
emit("adds r%s, r%s", acc[0], acc[3])
emit("adc r%s, r%s", acc[1], acc[4])
emit("stmia r0!, {r%s}", acc[0])
emit("stmia r0!, {r%s}", acc[1])
print ""
#### reset y and z pointers
emit("sub r0, %s", (2 * init_size + 3) * 4)
emit("sub r2, %s", (init_size + 3) * 4)
#### load y registers
emit("ldmia r2!, {%s}", ", ".join(["r%s" % (ry[i]) for i in xrange(3)]))
#### load additional x registers
if init_size != 3:
emit("ldmia r1!, {%s}", ", ".join(["r%s" % (rx[i]) for i in xrange(init_size, 3)]))
print ""
prev_size = init_size
for row in xrange(full_rows):
emit("umull r11, r12, r3, r6")
emit("stmia r0!, {r11}")
print ""
emit("mov r10, #0")
emit("umull r11, r9, r3, r7")
emit("adds r12, r11")
emit("adc r9, #0")
emit("umull r11, r14, r4, r6")
emit("adds r12, r11")
emit("adcs r9, r14")
emit("adc r10, #0")
emit("stmia r0!, {r12}")
print ""
acc = [9, 10, 11, 12, 14]
emit("mov r%s, #0", acc[2])
for i in xrange(0, 3):
emit("umull r%s, r%s, r%s, r%s", acc[3], acc[4], rx[i], ry[2 - i])
emit("adds r%s, r%s", acc[0], acc[3])
emit("adcs r%s, r%s", acc[1], acc[4])
emit("adc r%s, #0", acc[2])
emit("stmia r0!, {r%s}", acc[0])
print ""
acc = acc[1:] + acc[:1]
#### now we need to start shifting x and loading from z
x_regs = [3, 4, 5]
for r in xrange(0, prev_size):
x_regs = x_regs[1:] + x_regs[:1]
emit("ldmia r1!, {r%s}", x_regs[2])
emit("mov r%s, #0", acc[2])
for i in xrange(0, 3):
emit("umull r%s, r%s, r%s, r%s", acc[3], acc[4], x_regs[i], ry[2 - i])
emit("adds r%s, r%s", acc[0], acc[3])
emit("adcs r%s, r%s", acc[1], acc[4])
emit("adc r%s, #0", acc[2])
emit("ldr r%s, [r0]", acc[3]) # load stored value from initial block, and add to accumulator
emit("adds r%s, r%s", acc[0], acc[3])
emit("adcs r%s, #0", acc[1])
emit("adc r%s, #0", acc[2])
emit("stmia r0!, {r%s}", acc[0])
print ""
acc = acc[1:] + acc[:1]
# done shifting x, start shifting y
y_regs = [6, 7, 8]
for r in xrange(0, prev_size):
y_regs = y_regs[1:] + y_regs[:1]
emit("ldmia r2!, {r%s}", y_regs[2])
emit("mov r%s, #0", acc[2])
for i in xrange(0, 3):
emit("umull r%s, r%s, r%s, r%s", acc[3], acc[4], x_regs[i], y_regs[2 - i])
emit("adds r%s, r%s", acc[0], acc[3])
emit("adcs r%s, r%s", acc[1], acc[4])
emit("adc r%s, #0", acc[2])
emit("ldr r%s, [r0]", acc[3]) # load stored value from initial block, and add to accumulator
emit("adds r%s, r%s", acc[0], acc[3])
emit("adcs r%s, #0", acc[1])
emit("adc r%s, #0", acc[2])
emit("stmia r0!, {r%s}", acc[0])
print ""
acc = acc[1:] + acc[:1]
# done both shifts, do remaining corner
emit("mov r%s, #0", acc[2])
for i in xrange(0, 2):
emit("umull r%s, r%s, r%s, r%s", acc[3], acc[4], x_regs[i + 1], y_regs[2 - i])
emit("adds r%s, r%s", acc[0], acc[3])
emit("adcs r%s, r%s", acc[1], acc[4])
emit("adc r%s, #0", acc[2])
emit("stmia r0!, {r%s}", acc[0])
print ""
acc = acc[1:] + acc[:1]
emit("umull r%s, r%s, r%s, r%s", acc[3], acc[4], x_regs[2], y_regs[2])
emit("adds r%s, r%s", acc[0], acc[3])
emit("adc r%s, r%s", acc[1], acc[4])
emit("stmia r0!, {r%s}", acc[0])
emit("stmia r0!, {r%s}", acc[1])
print ""
prev_size = prev_size + 3
if row < full_rows - 1:
#### reset x, y and z pointers
emit("sub r0, %s", (2 * prev_size + 3) * 4)
emit("sub r1, %s", prev_size * 4)
emit("sub r2, %s", (prev_size + 3) * 4)
#### load x and y registers
emit("ldmia r1!, {%s}", ",".join(["r%s" % (rx[i]) for i in xrange(3)]))
emit("ldmia r2!, {%s}", ",".join(["r%s" % (ry[i]) for i in xrange(3)]))
print ""
| 32.31746 | 100 | 0.48019 |
562ac53f6b9907c6e990785ceaf75d3904bf5d95 | 369 | py | Python | Python3/src/monitorExample.py | emanuelen5/XPlaneConnect | 0d462ac306bc802a3b269227d3b98d2507abcd40 | [
"Unlicense"
] | 457 | 2015-01-02T14:21:11.000Z | 2022-03-27T02:56:47.000Z | Python3/src/monitorExample.py | fseconomy/XPlaneConnect | 11a5f350bd6888873d293bf3c9f59b0fba1331c1 | [
"Unlicense"
] | 211 | 2015-03-24T16:41:33.000Z | 2022-03-27T18:36:11.000Z | Python3/src/monitorExample.py | fseconomy/XPlaneConnect | 11a5f350bd6888873d293bf3c9f59b0fba1331c1 | [
"Unlicense"
] | 258 | 2015-01-01T17:02:27.000Z | 2022-03-31T19:36:03.000Z | import sys
import xpc
def monitor():
with xpc.XPlaneConnect() as client:
while True:
posi = client.getPOSI();
ctrl = client.getCTRL();
print("Loc: (%4f, %4f, %4f) Aileron:%2f Elevator:%2f Rudder:%2f\n"\
% (posi[0], posi[1], posi[2], ctrl[1], ctrl[0], ctrl[2]))
if __name__ == "__main__":
monitor() | 23.0625 | 79 | 0.531165 |
7a8f638690c37b622e3e0b042dcaf00145eb5196 | 9,003 | py | Python | scripts/package_build.py | rsund/duckdb | e4ba94a4f4540fe6d34fb2f6b7abf18e00ad3ca9 | [
"MIT"
] | null | null | null | scripts/package_build.py | rsund/duckdb | e4ba94a4f4540fe6d34fb2f6b7abf18e00ad3ca9 | [
"MIT"
] | null | null | null | scripts/package_build.py | rsund/duckdb | e4ba94a4f4540fe6d34fb2f6b7abf18e00ad3ca9 | [
"MIT"
] | null | null | null | import os
import sys
import shutil
import subprocess
from python_helpers import open_utf8
excluded_objects = ['utf8proc_data.cpp']
def get_libraries(binary_dir, libraries, extensions):
result_libs = []
def find_library_recursive(search_dir, potential_libnames):
flist = os.listdir(search_dir)
for fname in flist:
fpath = os.path.join(search_dir, fname)
if os.path.isdir(fpath):
entry = find_library_recursive(fpath, potential_libnames)
if entry != None:
return entry
elif os.path.isfile(fpath) and fname in potential_libnames:
return search_dir
return None
def find_library(search_dir, libname, result_libs):
if libname == 'Threads::Threads':
result_libs += [(None, 'pthread')]
return
libextensions = ['.a', '.lib']
libprefixes = ['', 'lib']
potential_libnames = []
for ext in libextensions:
for prefix in libprefixes:
potential_libnames.append(prefix + libname + ext)
libdir = find_library_recursive(binary_dir, potential_libnames)
result_libs += [(libdir, libname)]
result_libs += [(os.path.join(binary_dir, 'src'), 'duckdb_static')]
for ext in extensions:
result_libs += [(os.path.join(binary_dir, 'extension', ext), ext + '_extension')]
for libname in libraries:
find_library(binary_dir, libname, result_libs)
return result_libs
def includes(extensions):
scripts_dir = os.path.dirname(os.path.abspath(__file__))
# add includes for duckdb and extensions
includes = []
includes.append(os.path.join(scripts_dir, '..', 'src', 'include'))
includes.append(os.path.join(scripts_dir, '..'))
includes.append(os.path.join(scripts_dir, '..', 'third_party', 'utf8proc', 'include'))
for ext in extensions:
includes.append(os.path.join(scripts_dir, '..', 'extension', ext, 'include'))
return includes
def include_flags(extensions):
return ' ' + ' '.join(['-I' + x for x in includes(extensions)])
def convert_backslashes(x):
return '/'.join(x.split(os.path.sep))
def get_relative_path(source_dir, target_file):
source_dir = convert_backslashes(source_dir)
target_file = convert_backslashes(target_file)
# absolute path: try to convert
if source_dir in target_file:
target_file = target_file.replace(source_dir, "").lstrip('/')
return target_file
def git_commit_hash():
try:
return subprocess.check_output(['git','log','-1','--format=%h']).strip().decode('utf8')
except:
if 'SETUPTOOLS_SCM_PRETEND_HASH' in os.environ:
return os.environ['SETUPTOOLS_SCM_PRETEND_HASH']
else:
return "deadbeeff"
def git_dev_version():
try:
version = subprocess.check_output(['git','describe','--tags','--abbrev=0']).strip().decode('utf8')
long_version = subprocess.check_output(['git','describe','--tags','--long']).strip().decode('utf8')
version_splits = version.lstrip('v').split('.')
dev_version = long_version.split('-')[1]
if int(dev_version) == 0:
# directly on a tag: emit the regular version
return '.'.join(version_splits)
else:
# not on a tag: increment the version by one and add a -devX suffix
version_splits[2] = str(int(version_splits[2]) + 1)
return '.'.join(version_splits) + "-dev" + dev_version
except:
if 'SETUPTOOLS_SCM_PRETEND_VERSION' in os.environ:
return os.environ['SETUPTOOLS_SCM_PRETEND_VERSION']
else:
return "0.0.0"
def include_package(pkg_name, pkg_dir, include_files, include_list, source_list):
import amalgamation
original_path = sys.path
# append the directory
sys.path.append(pkg_dir)
ext_pkg = __import__(pkg_name + '_config')
ext_include_dirs = ext_pkg.include_directories
ext_source_files = ext_pkg.source_files
include_files += amalgamation.list_includes_files(ext_include_dirs)
include_list += ext_include_dirs
source_list += ext_source_files
sys.path = original_path
def build_package(target_dir, extensions, linenumbers = False, unity_count = 32, folder_name = 'duckdb'):
if not os.path.isdir(target_dir):
os.mkdir(target_dir)
scripts_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(scripts_dir)
import amalgamation
prev_wd = os.getcwd()
os.chdir(os.path.join(scripts_dir, '..'))
# obtain the list of source files from the amalgamation
source_list = amalgamation.list_sources()
include_list = amalgamation.list_include_dirs()
include_files = amalgamation.list_includes()
def copy_file(src, target_dir):
# get the path
full_path = src.split(os.path.sep)
current_path = target_dir
for i in range(len(full_path) - 1):
current_path = os.path.join(current_path, full_path[i])
if not os.path.isdir(current_path):
os.mkdir(current_path)
target_name = full_path[-1]
target_file = os.path.join(current_path, target_name)
amalgamation.copy_if_different(src, target_file)
# include the main extension helper
include_files += [os.path.join('src', 'include', 'duckdb', 'main', 'extension_helper.hpp')]
# include the separate extensions
for ext in extensions:
ext_path = os.path.join(scripts_dir, '..', 'extension', ext)
include_package(ext, ext_path, include_files, include_list, source_list)
for src in source_list:
copy_file(src, target_dir)
for inc in include_files:
copy_file(inc, target_dir)
# handle pragma_version.cpp: paste #define DUCKDB_SOURCE_ID and DUCKDB_VERSION there
curdir = os.getcwd()
os.chdir(os.path.join(scripts_dir, '..'))
githash = git_commit_hash()
dev_version = git_dev_version()
os.chdir(curdir)
# open the file and read the current contents
fpath = os.path.join(target_dir, 'src', 'function', 'table', 'version', 'pragma_version.cpp')
with open_utf8(fpath, 'r') as f:
text = f.read()
# now add the DUCKDB_SOURCE_ID define, if it is not there already
found_hash = False
found_dev = False
lines = text.split('\n')
for i in range(len(lines)):
if '#define DUCKDB_SOURCE_ID ' in lines[i]:
lines[i] = '#define DUCKDB_SOURCE_ID "{}"'.format(githash)
found_hash = True
break
if '#define DUCKDB_VERSION ' in lines[i]:
lines[i] = '#define DUCKDB_VERSION "{}"'.format(dev_version)
found_dev = True
break
if not found_hash:
lines = ['#ifndef DUCKDB_SOURCE_ID', '#define DUCKDB_SOURCE_ID "{}"'.format(githash), '#endif'] + lines
if not found_dev:
lines = ['#ifndef DUCKDB_VERSION', '#define DUCKDB_VERSION "{}"'.format(dev_version), '#endif'] + lines
text = '\n'.join(lines)
with open_utf8(fpath, 'w+') as f:
f.write(text)
def file_is_excluded(fname):
for entry in excluded_objects:
if entry in fname:
return True
return False
def generate_unity_build(entries, idx, linenumbers):
ub_file = os.path.join(target_dir, 'amalgamation-{}.cpp'.format(str(idx)))
with open_utf8(ub_file, 'w+') as f:
for entry in entries:
if linenumbers:
f.write('#line 0 "{}"\n'.format(convert_backslashes(entry)))
f.write('#include "{}"\n\n'.format(convert_backslashes(entry)))
return ub_file
def generate_unity_builds(source_list, nsplits, linenumbers):
source_list.sort()
files_per_split = len(source_list) / nsplits
new_source_files = []
current_files = []
idx = 1
for entry in source_list:
if not entry.startswith('src'):
new_source_files.append(os.path.join(folder_name, entry))
continue
current_files.append(entry)
if len(current_files) > files_per_split:
new_source_files.append(generate_unity_build(current_files, idx, linenumbers))
current_files = []
idx += 1
if len(current_files) > 0:
new_source_files.append(generate_unity_build(current_files, idx, linenumbers))
current_files = []
idx += 1
return new_source_files
original_sources = source_list
if unity_count > 0:
source_list = generate_unity_builds(source_list, unity_count, linenumbers)
else:
source_list = [os.path.join(folder_name, source) for source in source_list]
os.chdir(prev_wd)
return ([convert_backslashes(x) for x in source_list if not file_is_excluded(x)],
[convert_backslashes(x) for x in include_list],
[convert_backslashes(x) for x in original_sources])
| 37.987342 | 111 | 0.641786 |
4e74731bf709711886846c15de21529d656ded35 | 18,254 | py | Python | src/compiler.py | lkpdn/t4p4s | 9df8eefd891e44b8a59b4b8edfb90150b7e484e8 | [
"Apache-2.0"
] | null | null | null | src/compiler.py | lkpdn/t4p4s | 9df8eefd891e44b8a59b4b8edfb90150b7e484e8 | [
"Apache-2.0"
] | null | null | null | src/compiler.py | lkpdn/t4p4s | 9df8eefd891e44b8a59b4b8edfb90150b7e484e8 | [
"Apache-2.0"
] | null | null | null | # Copyright 2016 Eotvos Lorand University, Budapest, Hungary
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/usr/bin/env python
from __future__ import print_function
import argparse
from hlir16.hlir16 import *
from utils.misc import *
import utils.misc
from transform_hlir16 import *
from subprocess import call
import re
import os
import sys
import traceback
import pkgutil
from os.path import isfile, join
generate_code_files = True
show_code = False
cache_dir_name = "build/.cache"
# Inside the compiler, these variables are considered singleton.
args = []
hlir = None
indentation_level = 0
def verbose_print(*txts):
if args['verbose']:
print(*txts)
def translate_line_with_insert(file, line_idx, line, indent_str):
"""Gets a line that contains an insert
and transforms it to a Python code section."""
# since Python code is generated, indentation has to be respected
indentation = re.sub(r'^([ \t]*)#[\[\{\}].*$', r'\1', line)
global indentation_level
pre_indentation_mod = ""
post_indentation_mod = ""
# #{ unindents starting this line
if '#}' in line:
if indentation_level == 0:
addError("Compiler", "Too much unindent in {}:{}".format(file, line_idx))
indentation_level -= 1
pre_indentation_mod = indentation + "file_indentation_level -= 1\n"
# #{ starts a new indentation level from the next line
if '#{' in line:
indentation_level += 1
post_indentation_mod = "\n" + indentation + "file_indentation_level += 1"
# get the #[ (or #{, or #}) part
content = re.sub(r'^[ \t]*#[\[\{\}]([ \t]*[^\n]*)[ \t]*', r'\1', line)
# escape sequences like \n may appear in #[ parts
content = re.sub(r'\\', r'\\\\', content)
# quotes may appear in #[ parts
content = re.sub(r'"', r'\"', content)
def replacer(m):
light = m.group("light")
txt1 = m.group('text1') or ''
expr = m.group('expr')
txt2 = m.group('text2') or ''
# no highlighting
if m.group("type") == '$':
return '{}" + str({}) + "{}'.format(txt1, expr, txt2)
light_param = "," + light if light not in (None, "") else ""
return '\\" T4LIT({}" + str({}) + "{}{}) \\"'.format(txt1, expr, txt2, light_param)
# replace $$[light][text1]{expr}{text2} inserts, where all parts except {expr} are optional
content = re.sub(r'(?P<type>\$\$?)(\[(?P<light>[^\]]+)\])?(\[(?P<text1>[^\]]+)\])?{\s*(?P<expr>[^}]*)\s*}({(?P<text2>[^}]+)})?',
replacer, content)
# replace $var inserts
content = re.sub(r'\$([a-zA-Z0-9_]*)', r'" + str(\1) + "', content)
# trim the line
content = content.strip()
# add a comment that shows where the line is generated at
is_nonempty_line = bool(content.strip())
if is_nonempty_line and line_idx is not None:
if args['desugar_info'] == "comment":
content += '" + sugar("{}", {}) + "'.format(os.path.basename(file), line_idx)
if args['desugar_info'] == "pragma":
content = '#line %d \\"%s\\"\\n%s' % (line_idx, "../../" + file, content)
return '{}{}generated_code += indent() + "{}"{}'.format(pre_indentation_mod, indentation, content, post_indentation_mod)
def increase(idx):
if idx is None:
return None
return idx + 1
def add_empty_lines(code_lines):
"""Returns an enumerated list of the lines.
When an empty line separates follows an escaped code part,
an empty line is inserted into the generated list with None as line number."""
new_lines = []
is_block_with_sequence = False
last_indent = 0
already_added = False
for idx, line in code_lines:
if "#[" in line:
is_block_with_sequence = True
if not line.strip() and last_indent == 0 and not already_added:
new_lines.append((idx, line))
new_lines.append((None, "#["))
last_indent = 0
already_added = True
else:
if not line.strip():
continue
new_lines.append((increase(idx), line))
last_indent = len(line) - len(line.lstrip())
already_added = False
return new_lines
def add_gen_in_def(code_lines, orig_file):
"""If a function's name starts with 'gen_' in a generated file,
that function produces code.
This is a helper function that initialises and returns the appropriate variable.
Also, if "return" is encountered on a single line,
the requisite return value is inserted."""
new_lines = []
is_inside_gen = False
for idx, line in code_lines:
if is_inside_gen:
if re.match(r'^[ \t]*return[ \t]*$', line):
line = re.sub(r'^([ \t]*)return[ \t]*$', r'\1return generated_code', line)
is_separator_line = re.match(r'^#[ \t]*([^ \t])\1\1*', line)
is_method_line = re.sub(r'[ \t]*#.*', '', line).strip() != "" and line.lstrip() == line
is_unindented_line = re.match(r'^[^ \t]', line)
if is_separator_line or is_method_line or is_unindented_line:
new_lines.append((None, ' return generated_code'))
new_lines.append((None, ''))
is_inside_gen = False
if line.startswith('def gen_'):
new_lines.append((idx, line))
new_lines.append((None, ' generated_code = ""'))
is_inside_gen = True
continue
new_lines.append((idx, line))
if is_inside_gen:
new_lines.append((None, ' return generated_code'))
new_lines.append((None, ''))
return new_lines
def translate_file_contents(file, code, indent_str=" ", prefix_lines="", add_lines=True):
"""Returns the code transformed into runnable Python code.
Translated are #[generated_code, #=generator_expression and ${var} constructs."""
has_translatable_comment = re.compile(r'^[ \t]*#[\[\{\}][ \t]*.*$')
global indentation_level
indentation_level = 0
new_lines = prefix_lines.splitlines()
new_lines += """
# Autogenerated file (from {0}), do not modify directly.
# Generator: T4P4S (https://github.com/P4ELTE/t4p4s/)
global file_indentation_level
file_indentation_level = 0
# The last element is the innermost (current) style.
file_sugar_style = ['line_comment']
def indent():
global file_indentation_level
return '{1}' * file_indentation_level
class SugarStyle():
def __init__(self, sugar):
global file_sugar_style
file_sugar_style.append(sugar)
def __enter__(self):
global file_sugar_style
return file_sugar_style[-1]
def __exit__(self, type, value, traceback):
global file_sugar_style
file_sugar_style.pop()
def sugar(file, line):
import re
global file_sugar_style
sugar_file = re.sub("[.].*$", "", file)
if file_sugar_style[-1] == 'line_comment':
# if file == "{2}":
# return ' // @' + str(line) + '\\n'
return ' // ' + sugar_file + '@' + str(line) + '\\n'
if file_sugar_style[-1] == 'inline_comment':
# if file == "{2}":
# return '\\n/* @' + str(line) + '*/'
return '\\n/* ' + sugar_file + '@' + str(line) + '*/'
return ''
""".format(file, indent_str, os.path.basename(file)).splitlines()
code_lines = enumerate(code.splitlines())
code_lines = add_gen_in_def(code_lines, file)
if add_lines:
code_lines = add_empty_lines(code_lines)
for idx, code_line in code_lines:
new_line = code_line
if has_translatable_comment.match(code_line):
new_line = translate_line_with_insert(file, idx, code_line, indent_str)
elif re.match(r'^[ \t]*#= .*$', code_line):
new_line = re.sub(r'^([ \t]*)#=(.*)$', r'\1generated_code += str(\2)', code_line)
if args['desugar_info'] == "comment":
sugar_filename = os.path.basename(file)
sugar_filename = re.sub("([.]sugar)?[.]py", "", sugar_filename)
new_line += " # {}@{}".format(sugar_filename, idx)
# won't mark empty lines and continued lines
if new_line.strip() != "" and new_line.strip()[-1] != '\\':
new_line += " ## " + file + " " + str(idx)
new_lines.append(new_line)
if indentation_level != 0:
addError("Compiler", "Non-zero indentation level ({}) at end of file: {}".format(indentation_level, file))
return '\n'.join(new_lines) + "\n"
def generate_code(file, genfile, localvars={}):
"""The file contains Python code with #[ inserts.
The comments (which have to be indented properly)
contain code to be output,
their contents are collected in the variable generated_code.
Inside the comments, refer to Python variables as ${variable_name}."""
with open(file, "r") as orig_file:
code = orig_file.read()
code = translate_file_contents(file, code)
if generate_code_files:
write_file(genfile, code)
if show_code:
print(file + " -------------------------------------------------")
print(code)
print(file + " *************************************************")
localvars['generated_code'] = ""
try:
exec(code, localvars, localvars)
except Exception as exc:
# exc_type, exc, tb = sys.exc_info()
if hasattr(exc, 'lineno'):
addError("{}:{}:{}".format(genfile, exc.lineno, exc.offset), exc.msg)
else:
# TODO better error output
print("Error: cannot compile file {}".format(genfile), file=sys.stderr)
if not pkgutil.find_loader('backtrace'):
print("Exception: {}".format(str(exc)), file=sys.stderr)
traceback.print_exc(file=sys.stderr)
raise
return re.sub(r'\n{3,}', '\n\n', localvars['generated_code'])
def generate_desugared_py():
"""Some Python source files also use the sugared syntax.
The desugared files are generated here."""
import glob
for fromfile in glob.glob("src/utils/*.sugar.py"):
tofile = re.sub("[.]sugar[.]py$", ".py", fromfile)
with open(fromfile, "r") as orig_file:
code = orig_file.read()
prefix_lines = "generated_code = \"\"\n"
code = translate_file_contents(fromfile, code, prefix_lines=prefix_lines, add_lines=False)
write_file(tofile, code)
def get_hlir():
global hlir
if hlir is not None:
return hlir
hlir = load_hlir()
return hlir
def generate_desugared_c(filename, filepath):
hlir = get_hlir()
genfile = join(args['desugared_path'], re.sub(r'\.([ch])\.py$', r'.\1.gen.py', filename))
outfile = join(args['generated_dir'], re.sub(r'\.([ch])\.py$', r'.\1', filename))
utils.misc.filename = filename
utils.misc.filepath = filepath
utils.misc.genfile = genfile
utils.misc.outfile = outfile
code = generate_code(filepath, genfile, {'hlir16': hlir})
write_file(outfile, code)
def make_dirs():
"""Makes directories if they do not exist"""
if not os.path.isdir(args['compiler_files_dir']):
print("Compiler files path is missing", file=sys.stderr)
sys.exit(1)
if not os.path.isdir(args['desugared_path']):
os.makedirs(args['desugared_path'])
verbose_print(" GEN {0} (desugared compiler files)".format(args['desugared_path']))
if not os.path.isdir(args['generated_dir']):
os.makedirs(args['generated_dir'])
verbose_print(" GEN {0} (generated files)".format(args['generated_dir']))
if cache_dir_name and not os.path.isdir(cache_dir_name):
os.mkdir(cache_dir_name)
def file_contains_exact_text(filename, text):
"""Returns True iff the file exists and it already contains the given text."""
if not os.path.isfile(filename):
return
with open(filename, "r") as infile:
intext = infile.read()
return text == intext
return False
def write_file(filename, text):
"""Writes the given text to the given file."""
if file_contains_exact_text(filename, text):
return
with open(filename, "w") as genfile:
genfile.write(text)
def init_args():
"""Parses the command line arguments and loads them
into the global variable args."""
parser = argparse.ArgumentParser(description='T4P4S compiler')
parser.add_argument('p4_file', help='The source file')
parser.add_argument('-v', '--p4v', help='Use P4-14 (default is P4-16)', required=False, choices=[16, 14], type=int, default=16)
parser.add_argument('-p', '--p4c_path', help='P4C path', required=False)
parser.add_argument('-c', '--compiler_files_dir', help='Source directory of the compiler\'s files', required=False, default=join("src", "hardware_indep"))
parser.add_argument('-g', '--generated_dir', help='Output directory for hardware independent files', required=True)
parser.add_argument('-desugared_path', help='Output directory for the compiler\'s files', required=False, default=join("build", "util", "gen"))
parser.add_argument('-desugar_info', help='Markings in the generated source code', required=False, choices=["comment", "pragma", "none"], default="comment")
parser.add_argument('-verbose', help='Verbosity', required=False, default=False, action='store_const', const=True)
parser.add_argument('-beautify', help='Beautification', required=False, default=False, action='store_const', const=True)
global args
args = vars(parser.parse_args())
# TODO also reload if HLIR has changed
def is_file_fresh(filename):
global p4time
filetime = os.path.getmtime(filename)
return p4time < filetime
def load_json_from_cache(base_p4_file):
if not cache_dir_name:
return None
json_filename = base_p4_file + ".json"
json_filepath = os.path.join(cache_dir_name, json_filename)
if not os.path.isfile(json_filepath):
return None
if not is_file_fresh(json_filepath):
return None
verbose_print("JSON %s (cached)" % json_filename)
return json_filepath
def get_pickled_hlir_file(base_p4_file):
if not cache_dir_name:
return None
if not pkgutil.find_loader('dill'):
return None
pickle_filepath = os.path.join(cache_dir_name, base_p4_file + ".pickled")
if not os.path.isfile(pickle_filepath):
return None
if not is_file_fresh(pickle_filepath):
return None
return pickle_filepath
def load_pickled_hlir(pickle_filepath):
if pickle_filepath is None:
return None
if not pkgutil.find_loader('dill'):
return None
import dill
import pickle
# the standard recursion limit of 1000 can be too restrictive in more complex cases
sys.setrecursionlimit(10000)
with open(pickle_filepath, 'r') as inf:
verbose_print("Found serialized HLIR in %s..." % pickle_filepath)
return pickle.load(inf)
def save_pickled_hlir(hlir, base_p4_file):
if not cache_dir_name:
return None
if not pkgutil.find_loader('dill'):
return None
import dill
import pickle
# the standard recursion limit of 1000 can be too restrictive in more complex cases
sys.setrecursionlimit(10000)
with open(os.path.join(cache_dir_name, base_p4_file + ".pickled"), 'w') as outf:
pickled_hlir = pickle.dumps(hlir)
outf.write(pickled_hlir)
def load_p4_file(filename):
global hlir
base_p4_file = os.path.basename(args['p4_file'])
pickle_filepath = get_pickled_hlir_file(base_p4_file)
hlir = load_pickled_hlir(pickle_filepath)
if hlir is not None:
return True
to_load = load_json_from_cache(base_p4_file) or args['p4_file']
hlir = load_p4(to_load, args['p4v'], args['p4c_path'], cache_dir_name)
success = type(hlir) is not int
if not success:
return False
verbose_print("HLIR " + filename)
transform_hlir16(hlir)
save_pickled_hlir(hlir, base_p4_file)
return True
def check_file_exists(filename):
if os.path.isfile(filename) is False:
print("FILE NOT FOUND: %s" % filename, file=sys.stderr)
sys.exit(1)
def check_file_extension(filename):
_, ext = os.path.splitext(filename)
if ext not in {'.p4', '.p4_14'}:
print("EXTENSION NOT SUPPORTED: %s" % ext, file=sys.stderr)
sys.exit(1)
def setup_backtrace():
"""If the backtrace module is installed, use it to print better tracebacks."""
if not pkgutil.find_loader('backtrace'):
return
import backtrace
backtrace.hook(
reverse=True,
align=True,
strip_path=True,
enable_on_envvar_only=False,
on_tty=False,
conservative=False,
styles={})
def main():
setup_backtrace()
init_args()
filename = args['p4_file']
global p4time
p4time = os.path.getmtime(filename)
make_dirs()
check_file_exists(filename)
check_file_extension(filename)
success = load_p4_file(filename)
if not success:
print("P4 compilation failed for file %s" % (os.path.basename(__file__)), file=sys.stderr)
sys.exit(1)
base = args['compiler_files_dir']
exts = [".c.py", ".h.py"]
for filename in (f for f in os.listdir(base) if isfile(join(base, f)) for ext in exts if f.endswith(ext)):
verbose_print(" P4", filename)
generate_desugared_py()
generate_desugared_c(filename, join(base, filename))
showErrors()
showWarnings()
global errors
if len(errors) > 0:
sys.exit(1)
if __name__ == '__main__':
main()
| 32.250883 | 160 | 0.626876 |
6e79d5edeee77f39fa5a5db61c63437dcc74162f | 668 | py | Python | full/django-get-started/solution/manage.py | GeekTrainer/workshop-library | 73aa44b7f89c03f2851e63a0944c539198efecb5 | [
"MIT"
] | 106 | 2021-11-18T01:29:46.000Z | 2022-03-31T03:21:08.000Z | full/django-get-started/solution/manage.py | GeekTrainer/workshop-library | 73aa44b7f89c03f2851e63a0944c539198efecb5 | [
"MIT"
] | 39 | 2021-12-17T01:28:54.000Z | 2022-03-31T19:45:40.000Z | full/django-get-started/solution/manage.py | GeekTrainer/workshop-library | 73aa44b7f89c03f2851e63a0944c539198efecb5 | [
"MIT"
] | 70 | 2021-11-12T16:25:09.000Z | 2022-03-31T01:29:04.000Z | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'helloproject.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 29.043478 | 76 | 0.681138 |
2081bee8132f621073428902782033e2581c8188 | 196,047 | py | Python | kubernetes/client/apis/rbac_authorization_v1beta1_api.py | jraby/kubernetes-client-python | e6e7b710d0b15fbde686bc9dccf00da5951bef84 | [
"Apache-2.0"
] | null | null | null | kubernetes/client/apis/rbac_authorization_v1beta1_api.py | jraby/kubernetes-client-python | e6e7b710d0b15fbde686bc9dccf00da5951bef84 | [
"Apache-2.0"
] | null | null | null | kubernetes/client/apis/rbac_authorization_v1beta1_api.py | jraby/kubernetes-client-python | e6e7b710d0b15fbde686bc9dccf00da5951bef84 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class RbacAuthorizationV1beta1Api(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_cluster_role(self, body, **kwargs):
"""
create a ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_cluster_role(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param V1beta1ClusterRole body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_cluster_role_with_http_info(body, **kwargs)
else:
(data) = self.create_cluster_role_with_http_info(body, **kwargs)
return data
def create_cluster_role_with_http_info(self, body, **kwargs):
"""
create a ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_cluster_role_with_http_info(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param V1beta1ClusterRole body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_cluster_role`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/clusterroles'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1ClusterRole',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_cluster_role_binding(self, body, **kwargs):
"""
create a ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_cluster_role_binding(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param V1beta1ClusterRoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_cluster_role_binding_with_http_info(body, **kwargs)
else:
(data) = self.create_cluster_role_binding_with_http_info(body, **kwargs)
return data
def create_cluster_role_binding_with_http_info(self, body, **kwargs):
"""
create a ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_cluster_role_binding_with_http_info(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param V1beta1ClusterRoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_cluster_role_binding`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/clusterrolebindings'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1ClusterRoleBinding',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_namespaced_role(self, namespace, body, **kwargs):
"""
create a Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_role(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1Role body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1Role
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_namespaced_role_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_role_with_http_info(namespace, body, **kwargs)
return data
def create_namespaced_role_with_http_info(self, namespace, body, **kwargs):
"""
create a Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_role_with_http_info(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1Role body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1Role
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_role`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_role`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/namespaces/{namespace}/roles'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1Role',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_namespaced_role_binding(self, namespace, body, **kwargs):
"""
create a RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_role_binding(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1RoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_namespaced_role_binding_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_role_binding_with_http_info(namespace, body, **kwargs)
return data
def create_namespaced_role_binding_with_http_info(self, namespace, body, **kwargs):
"""
create a RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_role_binding_with_http_info(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1RoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_role_binding`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_role_binding`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/namespaces/{namespace}/rolebindings'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1RoleBinding',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_cluster_role(self, name, body, **kwargs):
"""
delete a ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_cluster_role(name, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ClusterRole (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_cluster_role_with_http_info(name, body, **kwargs)
else:
(data) = self.delete_cluster_role_with_http_info(name, body, **kwargs)
return data
def delete_cluster_role_with_http_info(self, name, body, **kwargs):
"""
delete a ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_cluster_role_with_http_info(name, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ClusterRole (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_cluster_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_cluster_role`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_cluster_role`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/clusterroles/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'grace_period_seconds' in params:
query_params['gracePeriodSeconds'] = params['grace_period_seconds']
if 'orphan_dependents' in params:
query_params['orphanDependents'] = params['orphan_dependents']
if 'propagation_policy' in params:
query_params['propagationPolicy'] = params['propagation_policy']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_cluster_role_binding(self, name, body, **kwargs):
"""
delete a ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_cluster_role_binding(name, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ClusterRoleBinding (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_cluster_role_binding_with_http_info(name, body, **kwargs)
else:
(data) = self.delete_cluster_role_binding_with_http_info(name, body, **kwargs)
return data
def delete_cluster_role_binding_with_http_info(self, name, body, **kwargs):
"""
delete a ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_cluster_role_binding_with_http_info(name, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ClusterRoleBinding (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_cluster_role_binding`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_cluster_role_binding`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/clusterrolebindings/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'grace_period_seconds' in params:
query_params['gracePeriodSeconds'] = params['grace_period_seconds']
if 'orphan_dependents' in params:
query_params['orphanDependents'] = params['orphan_dependents']
if 'propagation_policy' in params:
query_params['propagationPolicy'] = params['propagation_policy']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_cluster_role(self, **kwargs):
"""
delete collection of ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_cluster_role(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_collection_cluster_role_with_http_info(**kwargs)
else:
(data) = self.delete_collection_cluster_role_with_http_info(**kwargs)
return data
def delete_collection_cluster_role_with_http_info(self, **kwargs):
"""
delete collection of ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_cluster_role_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_cluster_role" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/clusterroles'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_cluster_role_binding(self, **kwargs):
"""
delete collection of ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_cluster_role_binding(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_collection_cluster_role_binding_with_http_info(**kwargs)
else:
(data) = self.delete_collection_cluster_role_binding_with_http_info(**kwargs)
return data
def delete_collection_cluster_role_binding_with_http_info(self, **kwargs):
"""
delete collection of ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_cluster_role_binding_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/clusterrolebindings'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_namespaced_role(self, namespace, **kwargs):
"""
delete collection of Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_role(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_collection_namespaced_role_with_http_info(namespace, **kwargs)
else:
(data) = self.delete_collection_namespaced_role_with_http_info(namespace, **kwargs)
return data
def delete_collection_namespaced_role_with_http_info(self, namespace, **kwargs):
"""
delete collection of Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_role_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_collection_namespaced_role`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/namespaces/{namespace}/roles'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_namespaced_role_binding(self, namespace, **kwargs):
"""
delete collection of RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_role_binding(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_collection_namespaced_role_binding_with_http_info(namespace, **kwargs)
else:
(data) = self.delete_collection_namespaced_role_binding_with_http_info(namespace, **kwargs)
return data
def delete_collection_namespaced_role_binding_with_http_info(self, namespace, **kwargs):
"""
delete collection of RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_role_binding_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_collection_namespaced_role_binding`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/namespaces/{namespace}/rolebindings'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_namespaced_role(self, name, namespace, body, **kwargs):
"""
delete a Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_role(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_namespaced_role_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.delete_namespaced_role_with_http_info(name, namespace, body, **kwargs)
return data
def delete_namespaced_role_with_http_info(self, name, namespace, body, **kwargs):
"""
delete a Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_role_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_namespaced_role`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_namespaced_role`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_namespaced_role`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/namespaces/{namespace}/roles/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'grace_period_seconds' in params:
query_params['gracePeriodSeconds'] = params['grace_period_seconds']
if 'orphan_dependents' in params:
query_params['orphanDependents'] = params['orphan_dependents']
if 'propagation_policy' in params:
query_params['propagationPolicy'] = params['propagation_policy']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_namespaced_role_binding(self, name, namespace, body, **kwargs):
"""
delete a RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_role_binding(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.delete_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs)
return data
def delete_namespaced_role_binding_with_http_info(self, name, namespace, body, **kwargs):
"""
delete a RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_role_binding_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_namespaced_role_binding`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_namespaced_role_binding`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_namespaced_role_binding`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/namespaces/{namespace}/rolebindings/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'grace_period_seconds' in params:
query_params['gracePeriodSeconds'] = params['grace_period_seconds']
if 'orphan_dependents' in params:
query_params['orphanDependents'] = params['orphan_dependents']
if 'propagation_policy' in params:
query_params['propagationPolicy'] = params['propagation_policy']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_api_resources(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_api_resources(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_api_resources_with_http_info(**kwargs)
else:
(data) = self.get_api_resources_with_http_info(**kwargs)
return data
def get_api_resources_with_http_info(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_api_resources_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_resources" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIResourceList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_cluster_role(self, **kwargs):
"""
list or watch objects of kind ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_cluster_role(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1ClusterRoleList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_cluster_role_with_http_info(**kwargs)
else:
(data) = self.list_cluster_role_with_http_info(**kwargs)
return data
def list_cluster_role_with_http_info(self, **kwargs):
"""
list or watch objects of kind ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_cluster_role_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1ClusterRoleList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_cluster_role" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/clusterroles'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1ClusterRoleList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_cluster_role_binding(self, **kwargs):
"""
list or watch objects of kind ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_cluster_role_binding(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1ClusterRoleBindingList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_cluster_role_binding_with_http_info(**kwargs)
else:
(data) = self.list_cluster_role_binding_with_http_info(**kwargs)
return data
def list_cluster_role_binding_with_http_info(self, **kwargs):
"""
list or watch objects of kind ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_cluster_role_binding_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1ClusterRoleBindingList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/clusterrolebindings'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1ClusterRoleBindingList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_namespaced_role(self, namespace, **kwargs):
"""
list or watch objects of kind Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_role(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1RoleList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_namespaced_role_with_http_info(namespace, **kwargs)
else:
(data) = self.list_namespaced_role_with_http_info(namespace, **kwargs)
return data
def list_namespaced_role_with_http_info(self, namespace, **kwargs):
"""
list or watch objects of kind Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_role_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1RoleList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `list_namespaced_role`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/namespaces/{namespace}/roles'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1RoleList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_namespaced_role_binding(self, namespace, **kwargs):
"""
list or watch objects of kind RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_role_binding(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1RoleBindingList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_namespaced_role_binding_with_http_info(namespace, **kwargs)
else:
(data) = self.list_namespaced_role_binding_with_http_info(namespace, **kwargs)
return data
def list_namespaced_role_binding_with_http_info(self, namespace, **kwargs):
"""
list or watch objects of kind RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_role_binding_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1RoleBindingList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `list_namespaced_role_binding`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/namespaces/{namespace}/rolebindings'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1RoleBindingList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_role_binding_for_all_namespaces(self, **kwargs):
"""
list or watch objects of kind RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_role_binding_for_all_namespaces(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1RoleBindingList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_role_binding_for_all_namespaces_with_http_info(**kwargs)
else:
(data) = self.list_role_binding_for_all_namespaces_with_http_info(**kwargs)
return data
def list_role_binding_for_all_namespaces_with_http_info(self, **kwargs):
"""
list or watch objects of kind RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_role_binding_for_all_namespaces_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1RoleBindingList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['field_selector', 'include_uninitialized', 'label_selector', 'pretty', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_role_binding_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/rolebindings'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1RoleBindingList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_role_for_all_namespaces(self, **kwargs):
"""
list or watch objects of kind Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_role_for_all_namespaces(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1RoleList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_role_for_all_namespaces_with_http_info(**kwargs)
else:
(data) = self.list_role_for_all_namespaces_with_http_info(**kwargs)
return data
def list_role_for_all_namespaces_with_http_info(self, **kwargs):
"""
list or watch objects of kind Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_role_for_all_namespaces_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1RoleList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['field_selector', 'include_uninitialized', 'label_selector', 'pretty', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_role_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/roles'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1RoleList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_cluster_role(self, name, body, **kwargs):
"""
partially update the specified ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_cluster_role(name, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ClusterRole (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_cluster_role_with_http_info(name, body, **kwargs)
else:
(data) = self.patch_cluster_role_with_http_info(name, body, **kwargs)
return data
def patch_cluster_role_with_http_info(self, name, body, **kwargs):
"""
partially update the specified ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_cluster_role_with_http_info(name, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ClusterRole (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_cluster_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_cluster_role`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_cluster_role`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/clusterroles/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1ClusterRole',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_cluster_role_binding(self, name, body, **kwargs):
"""
partially update the specified ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_cluster_role_binding(name, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ClusterRoleBinding (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_cluster_role_binding_with_http_info(name, body, **kwargs)
else:
(data) = self.patch_cluster_role_binding_with_http_info(name, body, **kwargs)
return data
def patch_cluster_role_binding_with_http_info(self, name, body, **kwargs):
"""
partially update the specified ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_cluster_role_binding_with_http_info(name, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ClusterRoleBinding (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_cluster_role_binding`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_cluster_role_binding`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/clusterrolebindings/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1ClusterRoleBinding',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_role(self, name, namespace, body, **kwargs):
"""
partially update the specified Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_role(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1Role
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_namespaced_role_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_role_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_role_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update the specified Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_role_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1Role
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_role`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_role`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_role`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/namespaces/{namespace}/roles/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1Role',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_role_binding(self, name, namespace, body, **kwargs):
"""
partially update the specified RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_role_binding(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_role_binding_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update the specified RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_role_binding_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_role_binding`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_role_binding`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_role_binding`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/namespaces/{namespace}/rolebindings/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1RoleBinding',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_cluster_role(self, name, **kwargs):
"""
read the specified ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_cluster_role(name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ClusterRole (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_cluster_role_with_http_info(name, **kwargs)
else:
(data) = self.read_cluster_role_with_http_info(name, **kwargs)
return data
def read_cluster_role_with_http_info(self, name, **kwargs):
"""
read the specified ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_cluster_role_with_http_info(name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ClusterRole (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_cluster_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_cluster_role`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/clusterroles/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1ClusterRole',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_cluster_role_binding(self, name, **kwargs):
"""
read the specified ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_cluster_role_binding(name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ClusterRoleBinding (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_cluster_role_binding_with_http_info(name, **kwargs)
else:
(data) = self.read_cluster_role_binding_with_http_info(name, **kwargs)
return data
def read_cluster_role_binding_with_http_info(self, name, **kwargs):
"""
read the specified ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_cluster_role_binding_with_http_info(name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ClusterRoleBinding (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_cluster_role_binding`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/clusterrolebindings/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1ClusterRoleBinding',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_role(self, name, namespace, **kwargs):
"""
read the specified Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_role(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1Role
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_namespaced_role_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_role_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_role_with_http_info(self, name, namespace, **kwargs):
"""
read the specified Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_role_with_http_info(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1Role
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_role`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_role`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/namespaces/{namespace}/roles/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1Role',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_role_binding(self, name, namespace, **kwargs):
"""
read the specified RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_role_binding(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_namespaced_role_binding_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_role_binding_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_role_binding_with_http_info(self, name, namespace, **kwargs):
"""
read the specified RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_role_binding_with_http_info(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_role_binding`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_role_binding`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/namespaces/{namespace}/rolebindings/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1RoleBinding',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_cluster_role(self, name, body, **kwargs):
"""
replace the specified ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_cluster_role(name, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ClusterRole (required)
:param V1beta1ClusterRole body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_cluster_role_with_http_info(name, body, **kwargs)
else:
(data) = self.replace_cluster_role_with_http_info(name, body, **kwargs)
return data
def replace_cluster_role_with_http_info(self, name, body, **kwargs):
"""
replace the specified ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_cluster_role_with_http_info(name, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ClusterRole (required)
:param V1beta1ClusterRole body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_cluster_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_cluster_role`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_cluster_role`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/clusterroles/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1ClusterRole',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_cluster_role_binding(self, name, body, **kwargs):
"""
replace the specified ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_cluster_role_binding(name, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ClusterRoleBinding (required)
:param V1beta1ClusterRoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_cluster_role_binding_with_http_info(name, body, **kwargs)
else:
(data) = self.replace_cluster_role_binding_with_http_info(name, body, **kwargs)
return data
def replace_cluster_role_binding_with_http_info(self, name, body, **kwargs):
"""
replace the specified ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_cluster_role_binding_with_http_info(name, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ClusterRoleBinding (required)
:param V1beta1ClusterRoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_cluster_role_binding`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_cluster_role_binding`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/clusterrolebindings/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1ClusterRoleBinding',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_role(self, name, namespace, body, **kwargs):
"""
replace the specified Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_role(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1Role body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1Role
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_namespaced_role_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_role_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_role_with_http_info(self, name, namespace, body, **kwargs):
"""
replace the specified Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_role_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1Role body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1Role
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_role`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_role`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_role`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/namespaces/{namespace}/roles/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1Role',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_role_binding(self, name, namespace, body, **kwargs):
"""
replace the specified RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_role_binding(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1RoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_role_binding_with_http_info(self, name, namespace, body, **kwargs):
"""
replace the specified RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_role_binding_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1RoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_role_binding`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_role_binding`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_role_binding`")
collection_formats = {}
resource_path = '/apis/rbac.authorization.k8s.io/v1beta1/namespaces/{namespace}/rolebindings/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1RoleBinding',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 51.469415 | 457 | 0.612572 |
721a38af4f9d1dbd2117b957553385e36931c997 | 19,837 | py | Python | third_party/WebKit/Source/devtools/scripts/compile_frontend.py | wenfeifei/miniblink49 | 2ed562ff70130485148d94b0e5f4c343da0c2ba4 | [
"Apache-2.0"
] | 5,964 | 2016-09-27T03:46:29.000Z | 2022-03-31T16:25:27.000Z | third_party/WebKit/Source/devtools/scripts/compile_frontend.py | w4454962/miniblink49 | b294b6eacb3333659bf7b94d670d96edeeba14c0 | [
"Apache-2.0"
] | 459 | 2016-09-29T00:51:38.000Z | 2022-03-07T14:37:46.000Z | third_party/WebKit/Source/devtools/scripts/compile_frontend.py | w4454962/miniblink49 | b294b6eacb3333659bf7b94d670d96edeeba14c0 | [
"Apache-2.0"
] | 1,006 | 2016-09-27T05:17:27.000Z | 2022-03-30T02:46:51.000Z | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from modular_build import read_file, write_file
import os
import os.path as path
import generate_injected_script_externs
import generate_protocol_externs
import modular_build
import re
import shutil
import subprocess
import sys
import tempfile
try:
import simplejson as json
except ImportError:
import json
if len(sys.argv) == 2 and sys.argv[1] == '--help':
print("Usage: %s [module_names]" % path.basename(sys.argv[0]))
print(" module_names list of modules for which the Closure compilation should run.")
print(" If absent, the entire frontend will be compiled.")
sys.exit(0)
is_cygwin = sys.platform == 'cygwin'
def popen(arguments):
return subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
def to_platform_path(filepath):
if not is_cygwin:
return filepath
return re.sub(r'^/cygdrive/(\w)', '\\1:', filepath)
def to_platform_path_exact(filepath):
if not is_cygwin:
return filepath
output, _ = popen(['cygpath', '-w', filepath]).communicate()
# pylint: disable=E1103
return output.strip().replace('\\', '\\\\')
scripts_path = path.dirname(path.abspath(__file__))
devtools_path = path.dirname(scripts_path)
inspector_path = path.join(path.dirname(devtools_path), 'core', 'inspector')
devtools_frontend_path = path.join(devtools_path, 'front_end')
patched_es6_externs_file = to_platform_path(path.join(devtools_frontend_path, 'es6.js'))
global_externs_file = to_platform_path(path.join(devtools_frontend_path, 'externs.js'))
protocol_externs_file = path.join(devtools_frontend_path, 'protocol_externs.js')
injected_script_source_name = path.join(inspector_path, 'InjectedScriptSource.js')
injected_script_externs_file = path.join(inspector_path, 'injected_script_externs.js')
jsmodule_name_prefix = 'jsmodule_'
runtime_module_name = '_runtime'
type_checked_jsdoc_tags_list = ['param', 'return', 'type', 'enum']
type_checked_jsdoc_tags_or = '|'.join(type_checked_jsdoc_tags_list)
# Basic regex for invalid JsDoc types: an object type name ([A-Z][A-Za-z0-9.]+[A-Za-z0-9]) not preceded by '!', '?', ':' (this, new), or '.' (object property).
invalid_type_regex = re.compile(r'@(?:' + type_checked_jsdoc_tags_or + r')\s*\{.*(?<![!?:.A-Za-z0-9])([A-Z][A-Za-z0-9.]+[A-Za-z0-9])[^/]*\}')
invalid_type_designator_regex = re.compile(r'@(?:' + type_checked_jsdoc_tags_or + r')\s*.*(?<![{: ])([?!])=?\}')
invalid_non_object_type_regex = re.compile(r'@(?:' + type_checked_jsdoc_tags_or + r')\s*\{.*(![a-z]+)[^/]*\}')
error_warning_regex = re.compile(r'WARNING|ERROR')
loaded_css_regex = re.compile(r'(?:registerRequiredCSS|WebInspector\.View\.createStyleElement)\s*\(\s*"(.+)"\s*\)')
java_build_regex = re.compile(r'^\w+ version "(\d+)\.(\d+)')
errors_found = False
generate_protocol_externs.generate_protocol_externs(protocol_externs_file, path.join(devtools_path, 'protocol.json'))
# Based on http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python.
def which(program):
def is_exe(fpath):
return path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = path.split(program)
if fpath:
if is_exe(program):
return program
else:
for part in os.environ["PATH"].split(os.pathsep):
part = part.strip('"')
exe_file = path.join(part, program)
if is_exe(exe_file):
return exe_file
return None
def log_error(message):
print 'ERROR: ' + message
def error_excepthook(exctype, value, traceback):
print 'ERROR:'
sys.__excepthook__(exctype, value, traceback)
sys.excepthook = error_excepthook
application_descriptors = ['inspector.json', 'toolbox.json']
loader = modular_build.DescriptorLoader(devtools_frontend_path)
descriptors = loader.load_applications(application_descriptors)
modules_by_name = descriptors.modules
def hasErrors(output):
return re.search(error_warning_regex, output) != None
def verify_jsdoc_extra(additional_files):
files = [to_platform_path(file) for file in descriptors.all_compiled_files() + additional_files]
file_list = tempfile.NamedTemporaryFile(mode='wt', delete=False)
try:
file_list.write('\n'.join(files))
finally:
file_list.close()
return popen(java_exec + ['-jar', jsdoc_validator_jar, '--files-list-name', to_platform_path_exact(file_list.name)]), file_list
def verify_jsdoc(additional_files):
def file_list():
return descriptors.all_compiled_files() + additional_files
errors_found = False
for full_file_name in file_list():
lineIndex = 0
with open(full_file_name, 'r') as sourceFile:
for line in sourceFile:
line = line.rstrip()
lineIndex += 1
if not line:
continue
if verify_jsdoc_line(full_file_name, lineIndex, line):
errors_found = True
return errors_found
def verify_jsdoc_line(fileName, lineIndex, line):
def print_error(message, errorPosition):
print '%s:%s: ERROR - %s%s%s%s%s%s' % (fileName, lineIndex, message, os.linesep, line, os.linesep, ' ' * errorPosition + '^', os.linesep)
known_css = {}
errors_found = False
match = re.search(invalid_type_regex, line)
if match:
print_error('Type "%s" nullability not marked explicitly with "?" (nullable) or "!" (non-nullable)' % match.group(1), match.start(1))
errors_found = True
match = re.search(invalid_non_object_type_regex, line)
if match:
print_error('Non-object type explicitly marked with "!" (non-nullable), which is the default and should be omitted', match.start(1))
errors_found = True
match = re.search(invalid_type_designator_regex, line)
if match:
print_error('Type nullability indicator misplaced, should precede type', match.start(1))
errors_found = True
match = re.search(loaded_css_regex, line)
if match:
file = path.join(devtools_frontend_path, match.group(1))
exists = known_css.get(file)
if exists is None:
exists = path.isfile(file)
known_css[file] = exists
if not exists:
print_error('Dynamically loaded CSS stylesheet is missing in the source tree', match.start(1))
errors_found = True
return errors_found
def find_java():
required_major = 1
required_minor = 7
exec_command = None
has_server_jvm = True
java_path = which('java')
if not java_path:
java_path = which('java.exe')
if not java_path:
print 'NOTE: No Java executable found in $PATH.'
sys.exit(1)
is_ok = False
java_version_out, _ = popen([java_path, '-version']).communicate()
# pylint: disable=E1103
match = re.search(java_build_regex, java_version_out)
if match:
major = int(match.group(1))
minor = int(match.group(2))
is_ok = major >= required_major and minor >= required_minor
if is_ok:
exec_command = [java_path, '-Xms1024m', '-server', '-XX:+TieredCompilation']
check_server_proc = popen(exec_command + ['-version'])
check_server_proc.communicate()
if check_server_proc.returncode != 0:
# Not all Java installs have server JVMs.
exec_command = exec_command.remove('-server')
has_server_jvm = False
if not is_ok:
print 'NOTE: Java executable version %d.%d or above not found in $PATH.' % (required_major, required_minor)
sys.exit(1)
print 'Java executable: %s%s' % (java_path, '' if has_server_jvm else ' (no server JVM)')
return exec_command
java_exec = find_java()
closure_compiler_jar = to_platform_path(path.join(scripts_path, 'closure', 'compiler.jar'))
closure_runner_jar = to_platform_path(path.join(scripts_path, 'compiler-runner', 'closure-runner.jar'))
jsdoc_validator_jar = to_platform_path(path.join(scripts_path, 'jsdoc-validator', 'jsdoc-validator.jar'))
modules_dir = tempfile.mkdtemp()
common_closure_args = [
'--summary_detail_level', '3',
'--jscomp_error', 'visibility',
'--compilation_level', 'SIMPLE_OPTIMIZATIONS',
'--warning_level', 'VERBOSE',
'--language_in=ES6_STRICT',
'--language_out=ES5_STRICT',
'--accept_const_keyword',
'--extra_annotation_name', 'suppressReceiverCheck',
'--extra_annotation_name', 'suppressGlobalPropertiesCheck',
'--module_output_path_prefix', to_platform_path_exact(modules_dir + path.sep)
]
worker_modules_by_name = {}
dependents_by_module_name = {}
for module_name in descriptors.application:
module = descriptors.modules[module_name]
if descriptors.application[module_name].get('type', None) == 'worker':
worker_modules_by_name[module_name] = module
for dep in module.get('dependencies', []):
list = dependents_by_module_name.get(dep)
if not list:
list = []
dependents_by_module_name[dep] = list
list.append(module_name)
def check_conditional_dependencies():
for name in modules_by_name:
for dep_name in modules_by_name[name].get('dependencies', []):
dependency = modules_by_name[dep_name]
if dependency.get('experiment') or dependency.get('condition'):
log_error('Module "%s" may not depend on the conditional module "%s"' % (name, dep_name))
errors_found = True
check_conditional_dependencies()
def verify_worker_modules():
for name in modules_by_name:
for dependency in modules_by_name[name].get('dependencies', []):
if dependency in worker_modules_by_name:
log_error('Module "%s" may not depend on the worker module "%s"' % (name, dependency))
errors_found = True
verify_worker_modules()
def check_duplicate_files():
def check_module(module, seen_files, seen_modules):
name = module['name']
seen_modules[name] = True
for dep_name in module.get('dependencies', []):
if not dep_name in seen_modules:
check_module(modules_by_name[dep_name], seen_files, seen_modules)
for source in module.get('scripts', []):
referencing_module = seen_files.get(source)
if referencing_module:
log_error('Duplicate use of %s in "%s" (previously seen in "%s")' % (source, name, referencing_module))
seen_files[source] = name
for module_name in worker_modules_by_name:
check_module(worker_modules_by_name[module_name], {}, {})
print 'Checking duplicate files across modules...'
check_duplicate_files()
def module_arg(module_name):
return ' --module ' + jsmodule_name_prefix + module_name
def modules_to_check():
if len(sys.argv) == 1:
return descriptors.sorted_modules()
print 'Compiling only these modules: %s' % sys.argv[1:]
return [module for module in descriptors.sorted_modules() if module in set(sys.argv[1:])]
def dump_module(name, recursively, processed_modules):
if name in processed_modules:
return ''
processed_modules[name] = True
module = modules_by_name[name]
skipped_scripts = set(module.get('skip_compilation', []))
command = ''
dependencies = module.get('dependencies', [])
if recursively:
for dependency in dependencies:
command += dump_module(dependency, recursively, processed_modules)
command += module_arg(name) + ':'
filtered_scripts = descriptors.module_compiled_files(name)
command += str(len(filtered_scripts))
firstDependency = True
for dependency in dependencies + [runtime_module_name]:
if firstDependency:
command += ':'
else:
command += ','
firstDependency = False
command += jsmodule_name_prefix + dependency
for script in filtered_scripts:
command += ' --js ' + to_platform_path(path.join(devtools_frontend_path, name, script))
return command
print 'Compiling frontend...'
compiler_args_file = tempfile.NamedTemporaryFile(mode='wt', delete=False)
try:
platform_protocol_externs_file = to_platform_path(protocol_externs_file)
runtime_js_path = to_platform_path(path.join(devtools_frontend_path, 'Runtime.js'))
checked_modules = modules_to_check()
for name in checked_modules:
closure_args = ' '.join(common_closure_args)
closure_args += ' --externs ' + to_platform_path(patched_es6_externs_file)
closure_args += ' --externs ' + to_platform_path(global_externs_file)
closure_args += ' --externs ' + platform_protocol_externs_file
runtime_module = module_arg(runtime_module_name) + ':1 --js ' + runtime_js_path
closure_args += runtime_module + dump_module(name, True, {})
compiler_args_file.write('%s %s%s' % (name, closure_args, os.linesep))
finally:
compiler_args_file.close()
modular_compiler_proc = popen(java_exec + ['-jar', closure_runner_jar, '--compiler-args-file', to_platform_path_exact(compiler_args_file.name)])
def unclosure_injected_script(sourceFileName, outFileName):
source = read_file(sourceFileName)
def replace_function(matchobj):
return re.sub(r'@param', 'param', matchobj.group(1) or '') + '\n//' + matchobj.group(2)
# Comment out the closure function and its jsdocs
source = re.sub(r'(/\*\*(?:[\s\n]*\*\s*@param[^\n]+\n)+\s*\*/\s*)?\n(\(function)', replace_function, source, count=1)
# Comment out its return statement
source = re.sub(r'\n(\s*return\s+[^;]+;\s*\n\}\)\s*)$', '\n/*\\1*/', source)
# Replace the "var Object" override with a "self.Object" one
source = re.sub(r'\nvar Object =', '\nself.Object =', source, count=1)
write_file(outFileName, source)
injectedScriptSourceTmpFile = to_platform_path(path.join(inspector_path, 'InjectedScriptSourceTmp.js'))
unclosure_injected_script(injected_script_source_name, injectedScriptSourceTmpFile)
print 'Compiling InjectedScriptSource.js...'
spawned_compiler_command = java_exec + [
'-jar',
closure_compiler_jar
] + common_closure_args
command = spawned_compiler_command + [
'--externs', to_platform_path_exact(injected_script_externs_file),
'--externs', to_platform_path_exact(protocol_externs_file),
'--module', jsmodule_name_prefix + 'injected_script' + ':1',
'--js', to_platform_path(injectedScriptSourceTmpFile)
]
injectedScriptCompileProc = popen(command)
print 'Compiling devtools.js...'
command = spawned_compiler_command + [
'--externs', to_platform_path(global_externs_file),
'--externs', to_platform_path(path.join(devtools_frontend_path, 'host', 'InspectorFrontendHostAPI.js')),
'--module', jsmodule_name_prefix + 'devtools_js' + ':1',
'--js', to_platform_path(path.join(devtools_frontend_path, 'devtools.js'))
]
devtoolsJSCompileProc = popen(command)
print 'Verifying JSDoc comments...'
additional_jsdoc_check_files = [injectedScriptSourceTmpFile]
errors_found |= verify_jsdoc(additional_jsdoc_check_files)
jsdocValidatorProc, jsdocValidatorFileList = verify_jsdoc_extra(additional_jsdoc_check_files)
print 'Validating InjectedScriptSource.js...'
injectedscript_check_script_path = path.join(scripts_path, "check_injected_script_source.py")
validateInjectedScriptProc = popen([sys.executable, injectedscript_check_script_path, injected_script_source_name])
print
(jsdocValidatorOut, _) = jsdocValidatorProc.communicate()
if jsdocValidatorOut:
print ('JSDoc validator output:%s%s' % (os.linesep, jsdocValidatorOut))
errors_found = True
os.remove(jsdocValidatorFileList.name)
(moduleCompileOut, _) = modular_compiler_proc.communicate()
print 'Modular compilation output:'
start_module_regex = re.compile(r'^@@ START_MODULE:(.+) @@$')
end_module_regex = re.compile(r'^@@ END_MODULE @@$')
in_module = False
skipped_modules = {}
error_count = 0
def skip_dependents(module_name):
for skipped_module in dependents_by_module_name.get(module_name, []):
skipped_modules[skipped_module] = True
has_module_output = False
# pylint: disable=E1103
for line in moduleCompileOut.splitlines():
if not in_module:
match = re.search(start_module_regex, line)
if not match:
continue
in_module = True
has_module_output = True
module_error_count = 0
module_output = []
module_name = match.group(1)
skip_module = skipped_modules.get(module_name)
if skip_module:
skip_dependents(module_name)
else:
match = re.search(end_module_regex, line)
if not match:
if not skip_module:
module_output.append(line)
if hasErrors(line):
error_count += 1
module_error_count += 1
skip_dependents(module_name)
continue
in_module = False
if skip_module:
print 'Skipping module %s...' % module_name
elif not module_error_count:
print 'Module %s compiled successfully: %s' % (module_name, module_output[0])
else:
print 'Module %s compile failed: %s errors%s' % (module_name, module_error_count, os.linesep)
print os.linesep.join(module_output)
if not has_module_output:
print moduleCompileOut
if error_count:
print 'Total Closure errors: %d%s' % (error_count, os.linesep)
errors_found = True
(injectedScriptCompileOut, _) = injectedScriptCompileProc.communicate()
print 'InjectedScriptSource.js compilation output:%s' % os.linesep, injectedScriptCompileOut
errors_found |= hasErrors(injectedScriptCompileOut)
(devtoolsJSCompileOut, _) = devtoolsJSCompileProc.communicate()
print 'devtools.js compilation output:%s' % os.linesep, devtoolsJSCompileOut
errors_found |= hasErrors(devtoolsJSCompileOut)
(validateInjectedScriptOut, _) = validateInjectedScriptProc.communicate()
print 'Validate InjectedScriptSource.js output:%s' % os.linesep, (validateInjectedScriptOut if validateInjectedScriptOut else '<empty>')
errors_found |= hasErrors(validateInjectedScriptOut)
if errors_found:
print 'ERRORS DETECTED'
os.remove(injectedScriptSourceTmpFile)
os.remove(compiler_args_file.name)
os.remove(protocol_externs_file)
shutil.rmtree(modules_dir, True)
| 38.668616 | 159 | 0.702475 |
334a5c13275a6676b290f2fe215acc3e07db5278 | 4,043 | py | Python | setup.py | ayushev/python-optiga-trust | 51ca647fde05cf6dce325baa6a9dd72bce3efa07 | [
"MIT"
] | null | null | null | setup.py | ayushev/python-optiga-trust | 51ca647fde05cf6dce325baa6a9dd72bce3efa07 | [
"MIT"
] | null | null | null | setup.py | ayushev/python-optiga-trust | 51ca647fde05cf6dce325baa6a9dd72bce3efa07 | [
"MIT"
] | 1 | 2020-08-24T12:19:48.000Z | 2020-08-24T12:19:48.000Z | from setuptools import setup, find_packages
from setuptools.command.install import install
import platform
import sys
import os
import shutil
def __get_arch_os():
platforms = {
'linux': 'Linux',
'linux1': 'Linux',
'linux2': 'Linux',
'darwin': 'OSX',
'cygwin': 'Windows',
'msys': 'Windows',
'win32': 'Windows',
}
if sys.platform not in platforms:
return sys.platform
return platform.architecture()[0], platforms[sys.platform]
def __get_lib_postfix():
targets = {
'Linux': {
'32bit': 'x86',
'64bit': 'x86_64'
},
'Windows': {
'32bit': 'ms32',
'64bit': 'ms64',
}
}
arch_os = __get_arch_os()
if arch_os[1] not in targets:
raise Exception('Platform not supported')
return targets[arch_os[1]][arch_os[0]]
def __copy_rules(target):
rules = 'src/optiga-trust-x/pal/libusb/include/90-optigatrust.rules'
if not os.path.exists(target):
raise FileNotFoundError
if not os.path.exists(target + os.path.sep + os.path.basename(rules)):
shutil.copy(rules, target)
def _install_rules():
if sys.platform.startswith('linux'):
try:
__copy_rules('/etc/udev/rules.d')
except PermissionError:
print('Install udev rules failed, install as sudo or manually')
except:
print('Install udev rules failed')
def __readme():
with open('README.md', 'r', encoding='utf-8') as f:
readme = f.read()
return readme
class OptigaTrustInstall(install):
def run(self):
self.do_egg_install()
_install_rules()
__name = 'optigatrust'
__desc = 'The ctypes Python wrapper for the Infineon OPTIGA(TM) Trust family of security solutions'
__url = 'https://github.com/infineon/python-optiga-trust'
__author = 'Infineon Technologies AG'
__author_email = 'DSSTechnicalSupport@infineon.com'
__license = 'MIT'
__keywords = 'ECDHE ECDSA RSA ECC X509 NISTP256 NIST384 OPTIGA TRUST TRUSTX TRUSTM'
__classifiers = [
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 3.7',
'Operating System :: Microsoft :: Windows',
'Operating System :: Microsoft :: Windows :: Windows 8',
'Operating System :: Microsoft :: Windows :: Windows 8.1',
'Operating System :: Microsoft :: Windows :: Windows 10',
'Operating System :: POSIX :: Linux'
]
with open(os.path.join(__name, "version.py")) as init_root:
for line in init_root:
if line.startswith("__version_info__"):
__version_tuple__ = eval(line.split("=")[1])
__version = ".".join([str(x) for x in __version_tuple__])
# Parameters for setup
__packages = [
'optigatrust',
'optigatrust.enums',
'optigatrust.csrc.lib'
]
__package_data = {
'optigatrust.csrc.lib': ['*.dll', '*.so', '*.ini'],
'optigatrust.enums': ['*.xml'],
'optigatrust.rules': [
'csrc/optiga-trust-x/pal/libusb/include/90-optigatrust.rules'
]
}
__package_dir = {
"optigatrust": "optigatrust",
}
if __name__ == '__main__':
setup(
name=__name,
version=__version,
description=__desc,
long_description=__readme(),
long_description_content_type='text/markdown',
url=__url,
author=__author,
author_email=__author_email,
keywords=__keywords,
license=__license,
classifiers=__classifiers,
include_package_data=True,
packages=__packages,
package_dir=__package_dir,
package_data=__package_data,
setup_requires=['setuptools>=40', 'wheel'],
install_requires=['optigatrust', 'asn1crypto', 'jinja2', 'cryptography'],
python_requires='>=3.5',
)
| 27.882759 | 100 | 0.604502 |
17075df6e17f147ba0c3d78bba28717deda4ed67 | 620 | py | Python | utils/migrations/0003_newsletter.py | manishwins/Greenline | 28085358eb6d8e500075067747f242e457117c44 | [
"Apache-2.0"
] | null | null | null | utils/migrations/0003_newsletter.py | manishwins/Greenline | 28085358eb6d8e500075067747f242e457117c44 | [
"Apache-2.0"
] | null | null | null | utils/migrations/0003_newsletter.py | manishwins/Greenline | 28085358eb6d8e500075067747f242e457117c44 | [
"Apache-2.0"
] | null | null | null | # Generated by Django 3.1.7 on 2021-04-28 05:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('utils', '0002_city_country_state'),
]
operations = [
migrations.CreateModel(
name='NewsLetter',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.CharField(blank=True, max_length=255, null=True)),
],
options={
'db_table': 'news_letter',
},
),
]
| 25.833333 | 114 | 0.554839 |
8d142c143f00c1e12fe8cfa96e14352cfad86826 | 649 | py | Python | lib/SceneGraphParser/test1.py | xzhEmma/Text2Scene | 0e67877543d69ccac0db08204815cf0091b96778 | [
"MIT",
"Unlicense"
] | null | null | null | lib/SceneGraphParser/test1.py | xzhEmma/Text2Scene | 0e67877543d69ccac0db08204815cf0091b96778 | [
"MIT",
"Unlicense"
] | null | null | null | lib/SceneGraphParser/test1.py | xzhEmma/Text2Scene | 0e67877543d69ccac0db08204815cf0091b96778 | [
"MIT",
"Unlicense"
] | null | null | null | import sng_parser
import urllib.request
import urllib.parse
import json
def translation(content):
url = 'http://fanyi.youdao.com/translate?smartresult=dict&smartresult=rule'
data = {'i': content, 'doctype': 'json'} # 定义一个字典,将data数据存入
data = urllib.parse.urlencode(data).encode('utf-8')
response = urllib.request.urlopen(url, data)
html = response.read().decode('utf-8')
target = json.loads(html)
print("翻译的结果为 : %s" % (target['translateResult'][0][0]['tgt']))
sentence = 'A black Honda motorcycle parked in front of a garage'
translation(sentence)
print(sng_parser.parse(sentence)["relations"])
| 30.904762 | 80 | 0.684129 |
e4c6dea2da56a1651f9de450002cad01220ae8b7 | 5,582 | py | Python | configs/RS-data/Faster-RCNN/SENet/se-resnet101-backbone-R32.py | SuzaKrish/mmdetection | 31c16891d7493252262e738bcbf05326dba866b2 | [
"Apache-2.0"
] | null | null | null | configs/RS-data/Faster-RCNN/SENet/se-resnet101-backbone-R32.py | SuzaKrish/mmdetection | 31c16891d7493252262e738bcbf05326dba866b2 | [
"Apache-2.0"
] | null | null | null | configs/RS-data/Faster-RCNN/SENet/se-resnet101-backbone-R32.py | SuzaKrish/mmdetection | 31c16891d7493252262e738bcbf05326dba866b2 | [
"Apache-2.0"
] | null | null | null | # model settings
model = dict(
type='FasterRCNN',
pretrained='torchvision://resnet101',
backbone=dict(
type='ResNet',
depth=101,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
style='pytorch',
attention=dict(
type='SENet',
inplanes=256,
reduction=32,
bias=True)),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_scales=[8],
anchor_ratios=[0.5, 1.0, 2.0],
anchor_strides=[4, 8, 16, 32, 64],
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0],
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)),
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=21,
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2],
reg_class_agnostic=False,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)))
# model training and testing settings
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False))
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=1000,
nms_post=1000,
max_num=1000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.05, nms=dict(type='nms', iou_thr=0.5), max_per_img=-1)
# soft-nms is also supported for rcnn testing
# e.g., nms=dict(type='soft_nms', iou_thr=0.5, min_score=0.05)
)
# dataset settings
dataset_type = 'DIORDataset'
data_root = 'data/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='Resize', img_scale=(800, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(800, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file= data_root + 'DIOR/ImageSets/Main/trainval.txt',
img_prefix= data_root + 'DIOR/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'DIOR/ImageSets/Main/test.txt',
img_prefix=data_root + 'DIOR/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'DIOR/ImageSets/Main/test.txt',
img_prefix=data_root + 'DIOR/',
pipeline=test_pipeline))
# optimizer
optimizer = dict(type='SGD', lr=0.0025, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[8, 11])
#lr_config = dict(policy='step', step=[3])
checkpoint_config = dict(interval=1)
evaluation = dict(interval=12)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
# runtime settings
find_unused_parameters=True
total_epochs = 12
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/faster_rcnn_r50_fpn_1x'
load_from = None
resume_from = None
workflow = [('train', 1)]
| 30.336957 | 78 | 0.587603 |
cc17ac03cdd682bb817b1ab899e83c1fe075507d | 845 | py | Python | repo/script.module.liveresolver/lib/liveresolver/resolvers/streamup.py | Hades01/Addons | 710da97ac850197498a3cd64be1811c593610add | [
"Apache-2.0"
] | 3 | 2020-03-03T13:21:44.000Z | 2021-07-21T09:53:31.000Z | repo/script.module.liveresolver/lib/liveresolver/resolvers/streamup.py | Hades01/Addons | 710da97ac850197498a3cd64be1811c593610add | [
"Apache-2.0"
] | null | null | null | repo/script.module.liveresolver/lib/liveresolver/resolvers/streamup.py | Hades01/Addons | 710da97ac850197498a3cd64be1811c593610add | [
"Apache-2.0"
] | 2 | 2020-04-01T22:11:12.000Z | 2020-05-07T23:54:52.000Z | # -*- coding: utf-8 -*-
import re,urlparse,urllib
from liveresolver.modules import client
from liveresolver.modules.log_utils import log
def resolve(url):
#try:
id = re.findall('streamup.com/([^$/]+)',url)[0]
playUrl = 'https://streamup.com/%s/embeds/video?startMuted=true'%id
url = 'https://api.streamup.com/v1/channels/' + id
result = client.request(url, referer=playUrl)
slug = re.findall('.*?["\']slug["\']\s*:\s*["\']([^"\']+)["\'].*',result)[0]
url2 = 'https://lancer.streamup.com/api/channels/%s/playlists'%slug
result = client.request(url2, referer=playUrl)
url = re.findall('.*(http[^"\']+\.m3u8[^"\']*).*',result)[0]
url+='|%s' %urllib.urlencode({'Referer':playUrl,'User-agent':client.agent()})
return url
#except:
# return
| 29.137931 | 85 | 0.575148 |
f1ce2f4e8cf9da450cf2a67fc53c6997e31ba75c | 14,525 | py | Python | tools/perf/page_sets/webrtc_cases.py | zealoussnow/chromium | fd8a8914ca0183f0add65ae55f04e287543c7d4a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 14,668 | 2015-01-01T01:57:10.000Z | 2022-03-31T23:33:32.000Z | tools/perf/page_sets/webrtc_cases.py | zealoussnow/chromium | fd8a8914ca0183f0add65ae55f04e287543c7d4a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 113 | 2015-05-04T09:58:14.000Z | 2022-01-31T19:35:03.000Z | tools/perf/page_sets/webrtc_cases.py | zealoussnow/chromium | fd8a8914ca0183f0add65ae55f04e287543c7d4a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 5,941 | 2015-01-02T11:32:21.000Z | 2022-03-31T16:35:46.000Z | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from page_sets import press_story
from telemetry import story
class WebrtcPage(press_story.PressStory):
def __init__(self, url, page_set, name, tags, extra_browser_args=None):
assert url.startswith('file://webrtc_cases/')
self.URL = url
self.NAME = name
super(WebrtcPage, self).__init__(page_set,
tags=tags,
extra_browser_args=extra_browser_args)
class GetUserMedia(WebrtcPage):
"""Why: Acquires a high definition (720p) local stream."""
def __init__(self, page_set, tags):
super(GetUserMedia, self).__init__(
url='file://webrtc_cases/resolution.html',
name='hd_local_stream_10s',
page_set=page_set, tags=tags)
def ExecuteTest(self, action_runner):
action_runner.ClickElement('button[id="hd"]')
action_runner.Wait(10)
class DataChannel(WebrtcPage):
"""Why: Transfer as much data as possible through a data channel in 10s."""
def __init__(self, page_set, tags):
super(DataChannel, self).__init__(
url='file://webrtc_cases/datatransfer.html',
name='10s_datachannel_transfer',
page_set=page_set, tags=tags)
def ExecuteTest(self, action_runner):
action_runner.ExecuteJavaScript('megsToSend.value = 100;')
action_runner.ClickElement('button[id="sendTheData"]')
action_runner.Wait(10)
def ParseTestResults(self, action_runner):
self.AddJavaScriptMeasurement(
'data_transferred',
'sizeInBytes_biggerIsBetter',
'receiveProgress.value',
description='Amount of data transferred by data channel in 10 seconds')
self.AddJavaScriptMeasurement(
'data_throughput',
'bytesPerSecond',
'currentThroughput',
description='Throughput of the data transfer.')
class CanvasCapturePeerConnection(WebrtcPage):
"""Why: Sets up a canvas capture stream connection to a peer connection."""
def __init__(self, page_set, tags):
super(CanvasCapturePeerConnection, self).__init__(
url='file://webrtc_cases/canvas-capture.html',
name='canvas_capture_peer_connection',
page_set=page_set, tags=tags)
def ExecuteTest(self, action_runner):
with action_runner.CreateInteraction('Action_Canvas_PeerConnection',
repeatable=False):
action_runner.ClickElement('button[id="startButton"]')
action_runner.Wait(10)
class VideoCodecConstraints(WebrtcPage):
"""Why: Sets up a video codec to a peer connection."""
def __init__(self, page_set, video_codec, tags):
super(VideoCodecConstraints, self).__init__(
url='file://webrtc_cases/codec_constraints.html',
name='codec_constraints_%s' % video_codec.lower(),
page_set=page_set, tags=tags)
self.video_codec = video_codec
def ExecuteTest(self, action_runner):
with action_runner.CreateInteraction('Action_Codec_Constraints',
repeatable=False):
action_runner.ClickElement('input[id="%s"]' % self.video_codec)
action_runner.ClickElement('button[id="startButton"]')
action_runner.WaitForElement('button[id="callButton"]:enabled')
action_runner.ClickElement('button[id="callButton"]')
action_runner.Wait(20)
action_runner.ClickElement('button[id="hangupButton"]')
class MultiplePeerConnections(WebrtcPage):
"""Why: Sets up several peer connections in the same page."""
def __init__(self, page_set, tags):
super(MultiplePeerConnections, self).__init__(
url='file://webrtc_cases/multiple-peerconnections.html',
name='multiple_peerconnections',
page_set=page_set, tags=tags)
def ExecuteTest(self, action_runner):
with action_runner.CreateInteraction('Action_Create_PeerConnection',
repeatable=False):
# Set the number of peer connections to create to 10.
action_runner.ExecuteJavaScript(
'document.getElementById("num-peerconnections").value=10')
action_runner.ExecuteJavaScript(
'document.getElementById("cpuoveruse-detection").checked=false')
action_runner.ClickElement('button[id="start-test"]')
action_runner.Wait(20)
class PausePlayPeerConnections(WebrtcPage):
"""Why: Ensures frequent pause and plays of peer connection streams work."""
def __init__(self, page_set, tags):
super(PausePlayPeerConnections, self).__init__(
url='file://webrtc_cases/pause-play.html',
name='pause_play_peerconnections',
page_set=page_set, tags=tags)
def ExecuteTest(self, action_runner):
action_runner.ExecuteJavaScript(
'startTest({test_runtime_s}, {num_peerconnections},'
'{iteration_delay_ms}, "video");'.format(
test_runtime_s=20, num_peerconnections=10, iteration_delay_ms=20))
action_runner.Wait(20)
class InsertableStreamsAudioProcessing(WebrtcPage):
"""Why: processes/transforms audio using insertable streams."""
def __init__(self, page_set, tags):
super(InsertableStreamsAudioProcessing, self).__init__(
url='file://webrtc_cases/audio-processing.html',
name='insertable_streams_audio_processing',
page_set=page_set,
tags=tags,
extra_browser_args=(
'--enable-blink-features=WebCodecs,MediaStreamInsertableStreams'))
self.supported = None
def RunNavigateSteps(self, action_runner):
self.supported = action_runner.EvaluateJavaScript('''(function () {
try {
new MediaStreamTrackGenerator('audio');
return true;
} catch (e) {
return false;
}
})()''')
if self.supported:
super(InsertableStreamsAudioProcessing,
self).RunNavigateSteps(action_runner)
def ExecuteTest(self, action_runner):
self.AddMeasurement(
'supported', 'count_biggerIsBetter', 1 if self.supported else 0,
'Boolean flag indicating if this benchmark is supported by the browser.'
)
if not self.supported:
return
action_runner.WaitForJavaScriptCondition('!!audio')
action_runner.ExecuteJavaScript('start()')
action_runner.Wait(10)
class InsertableStreamsVideoProcessing(WebrtcPage):
"""Why: processes/transforms video in various ways."""
def __init__(self, page_set, source, transform, sink, tags):
super(InsertableStreamsVideoProcessing, self).__init__(
url='file://webrtc_cases/video-processing.html',
name=('insertable_streams_video_processing_%s_%s_%s' %
(source, transform, sink)),
page_set=page_set,
tags=tags,
extra_browser_args=(
'--enable-blink-features=WebCodecs,MediaStreamInsertableStreams'))
self.source = source
self.transform = transform
self.sink = sink
self.supported = None
def RunNavigateSteps(self, action_runner):
self.supported = action_runner.EvaluateJavaScript(
"typeof MediaStreamTrackProcessor !== 'undefined' &&"
"typeof MediaStreamTrackGenerator !== 'undefined'")
if self.supported:
super(InsertableStreamsVideoProcessing,
self).RunNavigateSteps(action_runner)
def ExecuteTest(self, action_runner):
self.AddMeasurement(
'supported', 'count_biggerIsBetter', 1 if self.supported else 0,
'Boolean flag indicating if this benchmark is supported by the browser.'
)
if not self.supported:
return
with action_runner.CreateInteraction('Start_Pipeline', repeatable=True):
action_runner.WaitForElement('select[id="sourceSelector"]:enabled')
action_runner.ExecuteJavaScript(
'document.getElementById("sourceSelector").value="%s";' % self.source)
action_runner.WaitForElement('select[id="transformSelector"]:enabled')
action_runner.ExecuteJavaScript(
'document.getElementById("transformSelector").value="%s";' %
self.transform)
action_runner.WaitForElement('select[id="sinkSelector"]:enabled')
action_runner.ExecuteJavaScript(
'document.getElementById("sinkSelector").value="%s";' % self.sink)
action_runner.ExecuteJavaScript(
'document.getElementById("sourceSelector").dispatchEvent('
' new InputEvent("input", {}));')
action_runner.WaitForElement('.sinkVideo')
action_runner.Wait(10)
self.AddJavaScriptMeasurement(
'sink_decoded_frames',
'count_biggerIsBetter',
'document.querySelector(".sinkVideo").webkitDecodedFrameCount',
description='Number of frames received at the sink video.')
class NegotiateTiming(WebrtcPage):
"""Why: Measure how long renegotiation takes with large SDP blobs."""
def __init__(self, page_set, tags):
super(NegotiateTiming,
self).__init__(url='file://webrtc_cases/negotiate-timing.html',
name='negotiate-timing',
page_set=page_set,
tags=tags)
def ExecuteTest(self, action_runner):
action_runner.ExecuteJavaScript('start()')
action_runner.WaitForJavaScriptCondition('!callButton.disabled')
action_runner.ExecuteJavaScript('call()')
action_runner.WaitForJavaScriptCondition('!renegotiateButton.disabled')
# Due to suspicion of renegotiate activating too early:
action_runner.Wait(1)
# Negotiate 50 transceivers, then negotiate back to 1, simulating Meet "pin"
action_runner.ExecuteJavaScript('videoSectionsField.value = 50')
action_runner.ExecuteJavaScript('renegotiate()')
action_runner.WaitForJavaScriptCondition('!renegotiateButton.disabled')
action_runner.ExecuteJavaScript('videoSectionsField.value = 1')
action_runner.ExecuteJavaScript('renegotiate()')
action_runner.WaitForJavaScriptCondition('!renegotiateButton.disabled')
# Negotiate back up to 50, simulating Meet "unpin". This is what gets measured.
action_runner.ExecuteJavaScript('videoSectionsField.value = 50')
action_runner.ExecuteJavaScript('renegotiate()')
action_runner.WaitForJavaScriptCondition('!renegotiateButton.disabled')
result = action_runner.EvaluateJavaScript('result')
self.AddMeasurement('callerSetLocalDescription',
'ms',
result['callerSetLocalDescription'],
description='Time for caller SetLocalDescription')
self.AddMeasurement('calleeSetLocalDescription',
'ms',
result['calleeSetLocalDescription'],
description='Time for callee SetLocalDescription')
self.AddMeasurement('callerSetRemoteDescription',
'ms',
result['callerSetRemoteDescription'],
description='Time for caller SetRemoteDescription')
self.AddMeasurement('calleeSetRemoteDescription',
'ms',
result['calleeSetRemoteDescription'],
description='Time for callee SetRemoteDescription')
self.AddMeasurement('callerCreateOffer',
'ms',
result['callerCreateOffer'],
description='Time for overall offer/answer handshake')
self.AddMeasurement('calleeCreateAnswer',
'ms',
result['calleeCreateAnswer'],
description='Time for overall offer/answer handshake')
self.AddMeasurement('elapsedTime',
'ms',
result['elapsedTime'],
description='Time for overall offer/answer handshake')
self.AddMeasurement(
'audioImpairment',
'count',
result['audioImpairment'],
description='Number of late audio samples concealed during negotiation')
class WebrtcPageSet(story.StorySet):
def __init__(self):
super(WebrtcPageSet, self).__init__(
cloud_storage_bucket=story.PUBLIC_BUCKET)
self.AddStory(PausePlayPeerConnections(self, tags=['pauseplay']))
self.AddStory(MultiplePeerConnections(self, tags=['stress']))
self.AddStory(DataChannel(self, tags=['datachannel']))
self.AddStory(GetUserMedia(self, tags=['getusermedia']))
self.AddStory(CanvasCapturePeerConnection(self, tags=['smoothness']))
self.AddStory(VideoCodecConstraints(self, 'H264', tags=['videoConstraints']))
self.AddStory(VideoCodecConstraints(self, 'VP8', tags=['videoConstraints']))
self.AddStory(VideoCodecConstraints(self, 'VP9', tags=['videoConstraints']))
self.AddStory(
InsertableStreamsAudioProcessing(self, tags=['insertableStreams']))
self.AddStory(
InsertableStreamsVideoProcessing(self,
'camera',
'webgl',
'video',
tags=['insertableStreams']))
self.AddStory(
InsertableStreamsVideoProcessing(self,
'video',
'webgl',
'video',
tags=['insertableStreams']))
self.AddStory(
InsertableStreamsVideoProcessing(self,
'pc',
'webgl',
'video',
tags=['insertableStreams']))
self.AddStory(
InsertableStreamsVideoProcessing(self,
'camera',
'canvas2d',
'video',
tags=['insertableStreams']))
self.AddStory(
InsertableStreamsVideoProcessing(self,
'camera',
'noop',
'video',
tags=['insertableStreams']))
self.AddStory(
InsertableStreamsVideoProcessing(self,
'camera',
'webgl',
'pc',
tags=['insertableStreams']))
self.AddStory(NegotiateTiming(self, tags=['sdp']))
| 41.979769 | 83 | 0.637522 |
76524b0aca9bf97c7490dfe5890b0259037fe95a | 4,698 | py | Python | lib/python3.8/site-packages/ansible_collections/arista/eos/plugins/module_utils/network/eos/providers/cli/config/bgp/address_family.py | cjsteel/python3-venv-ansible-2.10.5 | c95395c4cae844dc66fddde9b4343966f4b2ecd5 | [
"Apache-1.1"
] | null | null | null | lib/python3.8/site-packages/ansible_collections/arista/eos/plugins/module_utils/network/eos/providers/cli/config/bgp/address_family.py | cjsteel/python3-venv-ansible-2.10.5 | c95395c4cae844dc66fddde9b4343966f4b2ecd5 | [
"Apache-1.1"
] | null | null | null | lib/python3.8/site-packages/ansible_collections/arista/eos/plugins/module_utils/network/eos/providers/cli/config/bgp/address_family.py | cjsteel/python3-venv-ansible-2.10.5 | c95395c4cae844dc66fddde9b4343966f4b2ecd5 | [
"Apache-1.1"
] | null | null | null | #
# (c) 2019, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import re
from ansible.module_utils.six import iteritems
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.utils import (
to_list,
)
from ansible_collections.arista.eos.plugins.module_utils.network.eos.providers.providers import (
CliProvider,
)
from ansible_collections.arista.eos.plugins.module_utils.network.eos.providers.cli.config.bgp.neighbors import (
AFNeighbors,
)
class AddressFamily(CliProvider):
def render(self, config=None):
commands = list()
safe_list = list()
router_context = "router bgp %s" % self.get_value("config.bgp_as")
context_config = None
for item in self.get_value("config.address_family"):
context = "address-family %s" % item["afi"]
context_commands = list()
if config:
context_path = [router_context, context]
context_config = self.get_config_context(
config, context_path, indent=2
)
for key, value in iteritems(item):
if value is not None:
meth = getattr(self, "_render_%s" % key, None)
if meth:
resp = meth(item, context_config)
if resp:
context_commands.extend(to_list(resp))
if context_commands:
commands.append(context)
commands.extend(context_commands)
commands.append("exit")
safe_list.append(context)
if self.params["operation"] == "replace":
if config:
resp = self._negate_config(config, safe_list)
commands.extend(resp)
return commands
def _negate_config(self, config, safe_list=None):
commands = list()
matches = re.findall(r"(address-family .+)$", config, re.M)
for item in set(matches).difference(safe_list):
commands.append("no %s" % item)
return commands
def _render_auto_summary(self, item, config=None):
cmd = "auto-summary"
if item["auto_summary"] is False:
cmd = "no %s" % cmd
if not config or cmd not in config:
return cmd
def _render_synchronization(self, item, config=None):
cmd = "synchronization"
if item["synchronization"] is False:
cmd = "no %s" % cmd
if not config or cmd not in config:
return cmd
def _render_networks(self, item, config=None):
commands = list()
safe_list = list()
for entry in item["networks"]:
network = entry["prefix"]
if entry["masklen"]:
network = "%s/%s" % (entry["prefix"], entry["masklen"])
safe_list.append(network)
cmd = "network %s" % network
if entry["route_map"]:
cmd += " route-map %s" % entry["route_map"]
if not config or cmd not in config:
commands.append(cmd)
if self.params["operation"] == "replace":
if config:
matches = re.findall(r"network (\S+)", config, re.M)
for entry in set(matches).difference(safe_list):
commands.append("no network %s" % entry)
return commands
def _render_redistribute(self, item, config=None):
commands = list()
safe_list = list()
for entry in item["redistribute"]:
option = entry["protocol"]
cmd = "redistribute %s" % entry["protocol"]
if entry["route_map"]:
cmd += " route-map %s" % entry["route_map"]
if not config or cmd not in config:
commands.append(cmd)
safe_list.append(option)
if self.params["operation"] == "replace":
if config:
matches = re.findall(
r"redistribute (\S+)(?:\s*)(\d*)", config, re.M
)
for i in range(0, len(matches)):
matches[i] = " ".join(matches[i]).strip()
for entry in set(matches).difference(safe_list):
commands.append("no redistribute %s" % entry)
return commands
def _render_neighbors(self, item, config):
""" generate bgp neighbor configuration
"""
return AFNeighbors(self.params).render(
config, nbr_list=item["neighbors"]
)
| 32.625 | 112 | 0.560877 |
a821a048d7270959660b47f1a39bfdee9a00d50e | 4,382 | py | Python | src/arch/arm/ArmTLB.py | He-Liu-ooo/Computer-Architecture-THUEE-2022-spring- | 9d36aaacbc7eea357608524113bec97bae2ea229 | [
"BSD-3-Clause"
] | 4 | 2020-12-25T03:12:00.000Z | 2022-01-07T03:35:35.000Z | src/arch/arm/ArmTLB.py | He-Liu-ooo/Computer-Architecture-THUEE-2022-spring- | 9d36aaacbc7eea357608524113bec97bae2ea229 | [
"BSD-3-Clause"
] | 3 | 2021-03-26T20:33:59.000Z | 2022-01-24T22:54:03.000Z | src/arch/arm/ArmTLB.py | He-Liu-ooo/Computer-Architecture-THUEE-2022-spring- | 9d36aaacbc7eea357608524113bec97bae2ea229 | [
"BSD-3-Clause"
] | 3 | 2021-03-27T16:36:19.000Z | 2022-03-28T18:32:57.000Z | # -*- mode:python -*-
# Copyright (c) 2009, 2013, 2015 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from m5.SimObject import SimObject
from m5.params import *
from m5.proxy import *
from m5.objects.BaseTLB import BaseTLB
from m5.objects.ClockedObject import ClockedObject
# Basic stage 1 translation objects
class ArmTableWalker(ClockedObject):
type = 'ArmTableWalker'
cxx_class = 'ArmISA::TableWalker'
cxx_header = "arch/arm/table_walker.hh"
is_stage2 = Param.Bool(False, "Is this object for stage 2 translation?")
num_squash_per_cycle = Param.Unsigned(2,
"Number of outstanding walks that can be squashed per cycle")
# The port to the memory system. This port is ultimately belonging
# to the Stage2MMU, and shared by the two table walkers, but we
# access it through the ITB and DTB walked objects in the CPU for
# symmetry with the other ISAs.
port = RequestPort("Port used by the two table walkers")
sys = Param.System(Parent.any, "system object parameter")
class ArmTLB(BaseTLB):
type = 'ArmTLB'
cxx_class = 'ArmISA::TLB'
cxx_header = "arch/arm/tlb.hh"
sys = Param.System(Parent.any, "system object parameter")
size = Param.Int(64, "TLB size")
walker = Param.ArmTableWalker(ArmTableWalker(), "HW Table walker")
is_stage2 = Param.Bool(False, "Is this a stage 2 TLB?")
# Stage 2 translation objects, only used when virtualisation is being used
class ArmStage2TableWalker(ArmTableWalker):
is_stage2 = True
class ArmStage2TLB(ArmTLB):
size = 32
walker = ArmStage2TableWalker()
is_stage2 = True
class ArmStage2MMU(SimObject):
type = 'ArmStage2MMU'
cxx_class = 'ArmISA::Stage2MMU'
cxx_header = 'arch/arm/stage2_mmu.hh'
tlb = Param.ArmTLB("Stage 1 TLB")
stage2_tlb = Param.ArmTLB("Stage 2 TLB")
sys = Param.System(Parent.any, "system object parameter")
class ArmStage2IMMU(ArmStage2MMU):
# We rely on the itb being a parameter of the CPU, and get the
# appropriate object that way
tlb = Parent.any
stage2_tlb = ArmStage2TLB()
class ArmStage2DMMU(ArmStage2MMU):
# We rely on the dtb being a parameter of the CPU, and get the
# appropriate object that way
tlb = Parent.any
stage2_tlb = ArmStage2TLB()
class ArmITB(ArmTLB):
stage2_mmu = ArmStage2IMMU()
class ArmDTB(ArmTLB):
stage2_mmu = ArmStage2DMMU()
| 41.733333 | 77 | 0.75194 |
fae60d1e2c493919852844c43eb132afcf8fb599 | 2,778 | py | Python | mergeMetadata/modules/utils/utilities.py | isabella232/ALM-SF-DX-Python-Tools | 8a29c1d056653999542e9d7048641d5fb5c36bed | [
"Apache-2.0"
] | null | null | null | mergeMetadata/modules/utils/utilities.py | isabella232/ALM-SF-DX-Python-Tools | 8a29c1d056653999542e9d7048641d5fb5c36bed | [
"Apache-2.0"
] | 1 | 2021-02-23T18:15:06.000Z | 2021-02-23T18:15:06.000Z | mergeMetadata/modules/utils/utilities.py | isabella232/ALM-SF-DX-Python-Tools | 8a29c1d056653999542e9d7048641d5fb5c36bed | [
"Apache-2.0"
] | null | null | null | import os
from modules.utils import XMLNS
from modules.utils.exceptions import NoFullNameError
MAP_COMPOSED_FULLNAME = {
'actionOverrides' : { 'main' : 'actionName', 'secondary' : 'formFactor' },
'layoutAssignments' : { 'main' : 'layout', 'secondary' : 'recordType' }
}
MAP_FULLNAME = {
'applicationVisibilities' : 'application',
'categoryGroupVisibilities' : 'dataCategoryGroup',
'classAccesses' : 'apexClass',
'customMetadataTypeAccesses' : 'name',
'customPermissions' : 'name',
'customSettingAccesses' : 'name',
'externalDataSourceAccesses' : 'externalDataSource',
'fieldPermissions' : 'field',
'flowAccesses' : 'flow',
'loginHours' : 'weekdayStart',
'loginIpRanges' : 'startAddress',
'objectPermissions' : 'object',
'pageAccesses' : 'apexPage',
'profileActionOverride' : 'actionName',
'recordTypeVisibilities' : 'recordType',
'sharingRecalculations' : 'className',
'tabVisibilities' : 'tab',
'userPermissions' : 'name'
}
def checkFolder( folderPath ):
if not os.path.isdir( folderPath ):
os.makedirs( folderPath )
def getFullName( tagName, childElement ):
if tagName in MAP_COMPOSED_FULLNAME:
fullName = getComposedName( tagName, childElement )
elif tagName in MAP_FULLNAME:
fullName = searchFullNameTag( MAP_FULLNAME[ tagName ], childElement )
else:
fullName = searchFullNameTag( 'fullName', childElement )
return fullName
def searchFullNameTag( fullNameTag, childElement ):
fullName = ''
for subChildElement in childElement.getchildren():
tagName = subChildElement.tag.split( XMLNS )[ 1 ]
if fullNameTag == tagName:
fullName = subChildElement.text
if not fullName:
raise NoFullNameError( childElement.tag )
return fullName
def getComposedName( tagName, childElement ):
fullName = ''
mainId = MAP_COMPOSED_FULLNAME[ tagName ][ 'main' ]
secondId = MAP_COMPOSED_FULLNAME[ tagName ][ 'secondary' ]
mainName = ''
secondName = ''
for subChildElement in childElement.getchildren():
tagName = subChildElement.tag.split( XMLNS )[ 1 ]
if mainId == tagName:
mainName = subChildElement.text
if secondId == tagName:
secondName = subChildElement.text
fullName = mainName + secondName
if not fullName:
raise NoFullNameError( childElement.tag )
return fullName
def xmlEncodeText( textValue ):
textValue = textValue.replace( "&", "&" );
textValue = textValue.replace( "<", "<" );
textValue = textValue.replace( ">", ">" );
textValue = textValue.replace( "\"", """ );
textValue = textValue.replace( "'", "'" );
return textValue
| 34.725 | 78 | 0.656947 |
0130af1a2ad5ebce044e8298c3db2e6a0fbf34a6 | 2,871 | py | Python | embedding-calculator/src/services/facescan/plugins/agegender/agegender.py | em3ndez/CompreFace | 503153ebbe158d091155d924576610d7d7a88e4d | [
"Apache-2.0"
] | 1 | 2021-09-06T12:40:42.000Z | 2021-09-06T12:40:42.000Z | embedding-calculator/src/services/facescan/plugins/agegender/agegender.py | em3ndez/CompreFace | 503153ebbe158d091155d924576610d7d7a88e4d | [
"Apache-2.0"
] | null | null | null | embedding-calculator/src/services/facescan/plugins/agegender/agegender.py | em3ndez/CompreFace | 503153ebbe158d091155d924576610d7d7a88e4d | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2020 the original author or authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
from typing import Tuple, Union
import numpy as np
import tensorflow as tf
from cached_property import cached_property
from src.services.imgtools.types import Array3D
from src.services.facescan.plugins import base, managers
from src.services.facescan.plugins.agegender import helpers
from src.services.dto import plugin_result
class BaseAgeGender(base.BasePlugin):
LABELS: Tuple[Tuple[int, int], ...]
@cached_property
def _model(self):
labels = self.LABELS
model_dir = self.ml_model.path
IMAGE_SIZE = managers.plugin_manager.detector.IMAGE_SIZE
g = tf.Graph()
with g.as_default():
sess = tf.Session(config=tf.ConfigProto(allow_soft_placement=True))
images = tf.placeholder(tf.float32, [None, IMAGE_SIZE, IMAGE_SIZE, 3])
logits = helpers.inception_v3(len(labels), images)
tf.global_variables_initializer()
checkpoint = tf.train.get_checkpoint_state(model_dir)
saver = tf.train.Saver()
saver.restore(sess, checkpoint.model_checkpoint_path)
softmax_output = tf.nn.softmax(logits)
def get_value(img: Array3D) -> Tuple[Union[str, Tuple], float]:
img = np.expand_dims(helpers.prewhiten(img), 0)
output = sess.run(softmax_output, feed_dict={images: img})[0]
best_i = int(np.argmax(output))
return labels[best_i], output[best_i]
return get_value
class AgeDetector(BaseAgeGender):
slug = 'age'
LABELS = ((0, 2), (4, 6), (8, 12), (15, 20), (25, 32), (38, 43), (48, 53), (60, 100))
ml_models = (
('22801', '1PxK72O-NROEz8pUGDDFRDYF4AABbvWiC'),
)
def __call__(self, face: plugin_result.FaceDTO):
value, probability = self._model(face._face_img)
return plugin_result.AgeDTO(age=value, age_probability=probability)
class GenderDetector(BaseAgeGender):
slug = 'gender'
LABELS = ('male', 'female')
ml_models = (
('21936', '1j9B76U3b4_F9e8-OKlNdOBQKa2ziGe_-'),
)
def __call__(self, face: plugin_result.FaceDTO):
value, probability = self._model(face._face_img)
return plugin_result.GenderDTO(gender=value, gender_probability=probability)
| 36.341772 | 89 | 0.678858 |
8e9f56d9b92233327c33f6a2ecbea3ca6a4e0652 | 1,102 | py | Python | tests/core/tests/fields_tests.py | PlantandFoodResearch/django-import-export | b4a1bd44a04df53d5fb40ba349749c759a72f18f | [
"BSD-2-Clause"
] | null | null | null | tests/core/tests/fields_tests.py | PlantandFoodResearch/django-import-export | b4a1bd44a04df53d5fb40ba349749c759a72f18f | [
"BSD-2-Clause"
] | 1 | 2020-04-09T04:45:36.000Z | 2020-04-09T04:45:36.000Z | tests/core/tests/fields_tests.py | PlantandFoodResearch/django-import-export | b4a1bd44a04df53d5fb40ba349749c759a72f18f | [
"BSD-2-Clause"
] | null | null | null | from __future__ import unicode_literals
from datetime import date
from django.test import TestCase
from import_export import fields
class Obj:
def __init__(self, name, date=None):
self.name = name
self.date = date
class FieldTest(TestCase):
def setUp(self):
self.field = fields.Field(column_name='name', attribute='name')
self.row = {
'name': 'Foo',
}
self.obj = Obj(name='Foo', date=date(2012, 8, 13))
def test_clean(self):
self.assertEqual(self.field.clean(self.row, self.obj),
self.row['name'])
def test_export(self):
self.assertEqual(self.field.export(self.obj),
self.row['name'])
def test_save(self):
self.row['name'] = 'foo'
self.field.save(self.obj, self.row)
self.assertEqual(self.obj.name, 'foo')
def test_following_attribute(self):
field = fields.Field(attribute='other_obj__name')
obj2 = Obj(name="bar")
self.obj.other_obj = obj2
self.assertEqual(field.export(self.obj), "bar")
| 25.045455 | 71 | 0.604356 |
d6bd86eb1ec397b3e91d44cd97cc4cf30a113764 | 1,224 | py | Python | load_balancer.py | dgomes/load-balancer | fca48a8bf5e035d5a493b2febaece8eaad0ef453 | [
"MIT"
] | null | null | null | load_balancer.py | dgomes/load-balancer | fca48a8bf5e035d5a493b2febaece8eaad0ef453 | [
"MIT"
] | null | null | null | load_balancer.py | dgomes/load-balancer | fca48a8bf5e035d5a493b2febaece8eaad0ef453 | [
"MIT"
] | null | null | null | # coding: utf-8
import socket
import logging
# configure logger output format
logging.basicConfig(level=logging.DEBUG,format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',datefmt='%m-%d %H:%M:%S')
def main(addr, servers):
# create a logger
logger = logging.getLogger('Load Balancer')
# connect to the back end servers
client_sockets = []
for server in servers:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(server)
client_sockets.append(s)
# open a socket for the clients
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
logger.info('Bind address %s', addr)
server_socket.bind(addr)
server_socket.listen(1)
while True:
conn, addr = server_socket.accept()
logger.info('New Connection %s', addr)
request = conn.recv(4096)
logger.debug("%s", request)
client_sockets[0].send(request)
reply = client_sockets[0].recv(4096)
logger.debug("%s", reply)
conn.send(reply)
conn.close()
return 0
if __name__ == '__main__':
main(('127.0.0.1', 8080),[('localhost', 5000)])
#,('localhost', 5001),('localhost', 5002), ('localhost', 5003)])
| 29.853659 | 126 | 0.638889 |
a65c326385777a1cb463be0caa8c5db71401bbe8 | 6,641 | py | Python | BasicImageManipulation/code/Task4and5.py | coderkhaleesi/Computer-Vision-Traditional-Techniques | ac24334e2f1762ad8ae450b6a01c9086474c132c | [
"MIT"
] | null | null | null | BasicImageManipulation/code/Task4and5.py | coderkhaleesi/Computer-Vision-Traditional-Techniques | ac24334e2f1762ad8ae450b6a01c9086474c132c | [
"MIT"
] | null | null | null | BasicImageManipulation/code/Task4and5.py | coderkhaleesi/Computer-Vision-Traditional-Techniques | ac24334e2f1762ad8ae450b6a01c9086474c132c | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
import cv2
#please make sure to keep the face images in the same folder as code
#before running this
#Task 4
#Part 1
I = cv2.imread('face 02 7043565.jpg', 1);
I = cv2.cvtColor(I, cv2.COLOR_BGR2RGB)
I = np.clip(I, 0, 255)
J = I[50:600, 200:800]
#cropped the image
J = cv2.cvtColor(J, cv2.COLOR_RGB2GRAY)
cropped_resized_gray = cv2.resize(J, (256, 256), interpolation = cv2.INTER_AREA)
cropped_resized_gray = np.clip(cropped_resized_gray, 0, 255)
fig, ax = plt.subplots(1, 2)
fig.suptitle('Display img and resized gray img')
ax[0].imshow(I)
ax[0].set(title="The original img")
ax[1].imshow(cropped_resized_gray, cmap=plt.cm.gray)
ax[1].set(title="Cropped resized gray img")
plt.show()
#Part 2
noise = 15*np.random.randn(256,256)
gray_with_noise = cropped_resized_gray + noise
plt.imshow(gray_with_noise, cmap=plt.cm.gray)
#Part 3
hist_without_noise = cv2.calcHist([cropped_resized_gray], [0], None, [256], [0,256])
hist_with_noise = cv2.calcHist([np.clip(np.uint8(gray_with_noise), 0, 255)], [0], None, [256], [0,256])
fig, ax = plt.subplots(1, 2)
fig.suptitle('Added Gaussian noise')
ax[0].plot(hist_without_noise)
ax[0].set(title="Without Noise")
ax[1].plot(hist_with_noise)
ax[1].set(title="With Noise")
plt.show()
#Part 4
#First let's create a Gaussian kernel
kernel_1 = cv2.getGaussianKernel(5, 1) #size, std is 1
kernel_5 = cv2.getGaussianKernel(5, 5) #std=5
kernel_10 = cv2.getGaussianKernel(5, 10) #std=10
kernel_15 = cv2.getGaussianKernel(5, 15) #std=15
window = np.outer(kernel_1, kernel_1.T) #the 5x5 kernel
#The convolution function
def conv(img, kernel):
img_row, img_col = img.shape
kernel_row, kernel_col = kernel.shape
final_img = np.zeros(img.shape)
pad_h = int((kernel_row-1)/2)
pad_w = int((kernel_col-1)/2)
padded_img = np.zeros((img_row+2*pad_h, img_col+2*pad_w))
padded_img[pad_h:padded_img.shape[0]-pad_h,pad_w:padded_img.shape[1]-pad_w] = img
for i in range(img_row):
for j in range(img_col):
final_img[i, j] = np.sum(kernel*padded_img[i:i+kernel_row, j:j+kernel_col])
return final_img
#Part 5
output_img = conv(gray_with_noise, window)
fig, ax = plt.subplots(1, 2)
fig.suptitle('Applying custom Gaussian filter')
ax[0].imshow(gray_with_noise, cmap=plt.cm.gray)
ax[0].set(title="Gray img with noise")
ax[1].imshow(output_img, cmap=plt.cm.gray)
ax[1].set(title="Gray img after filter applied")
plt.show()
fig, ax = plt.subplots(2, 2)
ax[0,0].imshow(output_img, cmap=plt.cm.gray)
ax[0,0].set(title="Gaussian Filter with std 1")
ax[0,1].imshow(conv(gray_with_noise, np.outer(kernel_5, kernel_5.T)), cmap=plt.cm.gray)
ax[0,1].set(title="Gaussian Filter with std 1")
ax[1,0].imshow(conv(gray_with_noise, np.outer(kernel_10, kernel_10.T)), cmap=plt.cm.gray)
ax[1,0].set(title="Gaussian Filter with std 10")
ax[1,1].imshow(conv(gray_with_noise, np.outer(kernel_15, kernel_15.T)), cmap=plt.cm.gray)
ax[1,1].set(title="Gaussian Filter with std 15")
plt.tight_layout()
plt.show()
#Part 6
output_img = conv(gray_with_noise, np.outer(kernel_15, kernel_15.T))
output_with_built_in = cv2.GaussianBlur(gray_with_noise, (5,5), 15)
#https://docs.opencv.org/2.4/modules/imgproc/doc/filtering.html#void%20GaussianBlur(InputArray%20src,%20OutputArray%20dst,%20Size%20ksize,%20double%20sigmaX,%20double%20sigmaY,%20int%20borderType)
fig, ax = plt.subplots(1, 2)
fig.suptitle('Comparing custom filter with built-in filter')
ax[0].imshow(output_img, cmap=plt.cm.gray)
ax[0].set(title="Gray img after Custom filter")
ax[1].imshow(output_with_built_in, cmap=plt.cm.gray)
ax[1].set(title="Gray img after Built-in Filter")
plt.show()
hist_my_func = cv2.calcHist([np.uint8(output_img)], [0], None, [256], [0,256])
hist_built_in = cv2.calcHist([np.uint8(output_with_built_in)], [0], None, [256], [0,256])
fig, ax = plt.subplots(1, 2)
fig.suptitle('Compare histograms of filtered images')
ax[0].plot(hist_my_func)
ax[0].set(title="Hist custom function")
ax[1].plot(hist_built_in)
ax[1].set(title="Hist built-in function")
plt.show()
#Task 5
sobel_filter = np.array([[-1, 0, 1],[-2, 0, 2],[-1, 0, 1]])
output_sobel_vertical = conv(cropped_resized_gray, sobel_filter)
output_sobel_horizontal = conv(cropped_resized_gray, sobel_filter.T)#np.flip axis=0
fig, ax = plt.subplots(1, 3)
fig.suptitle('')
ax[0].imshow(cropped_resized_gray, cmap=plt.cm.gray)
ax[0].set(title="Original image")
ax[1].imshow(output_sobel_vertical, cmap=plt.cm.gray)
ax[1].set(title="After vertical sobel")
ax[2].imshow(output_sobel_horizontal, cmap=plt.cm.gray)
ax[2].set(title="After horizontal sobel")
plt.show()
#Comparison with in-built func
sobelx = cv2.Sobel(cropped_resized_gray,cv2.CV_64F,1,0,ksize=3)
sobely = cv2.Sobel(cropped_resized_gray,cv2.CV_64F,0,1,ksize=3)
fig, ax = plt.subplots(1, 3)
fig.suptitle('In-built Sobel Filter')
ax[0].imshow(cropped_resized_gray, cmap=plt.cm.gray)
ax[0].set(title="Original image")
ax[1].imshow(sobelx, cmap=plt.cm.gray)
ax[1].set(title="After vertical sobel")
ax[2].imshow(sobely, cmap=plt.cm.gray)
ax[2].set(title="After horizontal sobel")
plt.show()
#On more images
I = cv2.imread('face 03 7043565.jpg', 1);
I = cv2.cvtColor(I, cv2.COLOR_BGR2RGB)
I = np.clip(I, 0, 255)
J = I[50:600, 200:800]
#cropped the image
J = cv2.cvtColor(J, cv2.COLOR_RGB2GRAY)
cropped_resized_gray = cv2.resize(J, (256, 256), interpolation = cv2.INTER_AREA)
cropped_resized_gray = np.clip(cropped_resized_gray, 0, 255)
output_sobel_vertical = conv(cropped_resized_gray, sobel_filter)
output_sobel_horizontal = conv(cropped_resized_gray, sobel_filter.T)#np.flip axis=0
fig, ax = plt.subplots(1, 3)
fig.suptitle('')
ax[0].imshow(cropped_resized_gray, cmap=plt.cm.gray)
ax[0].set(title="Original image")
ax[1].imshow(output_sobel_vertical, cmap=plt.cm.gray)
ax[1].set(title="After vertical sobel")
ax[2].imshow(output_sobel_horizontal, cmap=plt.cm.gray)
ax[2].set(title="After horizontal sobel")
plt.show()
sobelx = cv2.Sobel(cropped_resized_gray,cv2.CV_64F,1,0,ksize=3)
sobely = cv2.Sobel(cropped_resized_gray,cv2.CV_64F,0,1,ksize=3)
fig, ax = plt.subplots(1, 3)
fig.suptitle('In-built Sobel Filter')
ax[0].imshow(cropped_resized_gray, cmap=plt.cm.gray)
ax[0].set(title="Original image")
ax[1].imshow(sobelx, cmap=plt.cm.gray)
ax[1].set(title="After vertical sobel")
ax[2].imshow(sobely, cmap=plt.cm.gray)
ax[2].set(title="After horizontal sobel")
plt.show()
| 29.914414 | 197 | 0.70652 |
15ade09ea9986ac1337436691bde0d628f9b6cda | 585 | py | Python | arelle/examples/plugin/packagedImportTest/importTestImported11.py | DataFinnovation/Arelle | d4bf45f56fc9249f75ab22e6217dbe55f0510841 | [
"Apache-2.0"
] | 292 | 2015-01-27T03:31:51.000Z | 2022-03-26T07:00:05.000Z | arelle/examples/plugin/packagedImportTest/importTestImported11.py | DataFinnovation/Arelle | d4bf45f56fc9249f75ab22e6217dbe55f0510841 | [
"Apache-2.0"
] | 94 | 2015-04-18T23:03:00.000Z | 2022-03-28T17:24:55.000Z | arelle/examples/plugin/packagedImportTest/importTestImported11.py | DataFinnovation/Arelle | d4bf45f56fc9249f75ab22e6217dbe55f0510841 | [
"Apache-2.0"
] | 200 | 2015-01-13T03:55:47.000Z | 2022-03-29T12:38:56.000Z | '''
pluginPackages test case
(c) Copyright 2012 Mark V Systems Limited, All rights reserved.
'''
def foo():
print ("imported packaged plug-in relative imported 1.1")
__pluginInfo__ = {
'name': 'Package Relative Import 1.1',
'version': '0.9',
'description': "This is a packaged relative imported plugin.",
'license': 'Apache-2',
'author': 'Mark V Systems',
'copyright': '(c) Copyright 2015 Mark V Systems Limited, All rights reserved.',
# classes of mount points (required)
'Import.Packaged.Entry7': foo,
# imported plugins
}
| 27.857143 | 84 | 0.646154 |
ddeea2756d4c5d64b46cda43cec9f8cfa106bbba | 81 | py | Python | tests/periodicities/Hour/Cycle_Hour_400_H_5.py | shaido987/pyaf | b9afd089557bed6b90b246d3712c481ae26a1957 | [
"BSD-3-Clause"
] | 377 | 2016-10-13T20:52:44.000Z | 2022-03-29T18:04:14.000Z | tests/periodicities/Hour/Cycle_Hour_400_H_5.py | ysdede/pyaf | b5541b8249d5a1cfdc01f27fdfd99b6580ed680b | [
"BSD-3-Clause"
] | 160 | 2016-10-13T16:11:53.000Z | 2022-03-28T04:21:34.000Z | tests/periodicities/Hour/Cycle_Hour_400_H_5.py | ysdede/pyaf | b5541b8249d5a1cfdc01f27fdfd99b6580ed680b | [
"BSD-3-Clause"
] | 63 | 2017-03-09T14:51:18.000Z | 2022-03-27T20:52:57.000Z | import tests.periodicities.period_test as per
per.buildModel((5 , 'H' , 400));
| 16.2 | 45 | 0.716049 |
e1aff8fc1e669176b13827b97cf124612e50e361 | 66 | py | Python | 13/00/0.py | pylangstudy/201710 | 139cad34d40f23beac85800633ec2ed63d530bfd | [
"CC0-1.0"
] | null | null | null | 13/00/0.py | pylangstudy/201710 | 139cad34d40f23beac85800633ec2ed63d530bfd | [
"CC0-1.0"
] | 25 | 2017-10-03T00:12:53.000Z | 2017-10-29T23:58:17.000Z | 13/00/0.py | pylangstudy/201710 | 139cad34d40f23beac85800633ec2ed63d530bfd | [
"CC0-1.0"
] | null | null | null | import fileinput
for line in fileinput.input():
print(line)
| 11 | 30 | 0.712121 |
94e8ba0b2d8559da06bd9ceccf30e82fdb4ab099 | 180,821 | py | Python | psx/_dump_/51/_dump_ida_/make_psx.py | maoa3/scalpel | 2e7381b516cded28996d290438acc618d00b2aa7 | [
"Unlicense"
] | 15 | 2018-06-28T01:11:25.000Z | 2021-09-27T15:57:18.000Z | psx/_dump_/51/_dump_ida_/make_psx.py | maoa3/scalpel | 2e7381b516cded28996d290438acc618d00b2aa7 | [
"Unlicense"
] | 7 | 2018-06-29T04:08:23.000Z | 2019-10-17T13:57:22.000Z | psx/_dump_/51/_dump_ida_/make_psx.py | maoa3/scalpel | 2e7381b516cded28996d290438acc618d00b2aa7 | [
"Unlicense"
] | 7 | 2018-06-28T01:11:34.000Z | 2020-05-23T09:21:48.000Z | set_name(0x8007DBFC, "GetTpY__FUs", SN_NOWARN)
set_name(0x8007DC18, "GetTpX__FUs", SN_NOWARN)
set_name(0x8007DC24, "Remove96__Fv", SN_NOWARN)
set_name(0x8007DC5C, "AppMain", SN_NOWARN)
set_name(0x8007DD04, "MAIN_RestartGameTask__Fv", SN_NOWARN)
set_name(0x8007DD30, "GameTask__FP4TASK", SN_NOWARN)
set_name(0x8007DE24, "MAIN_MainLoop__Fv", SN_NOWARN)
set_name(0x8007DE6C, "CheckMaxArgs__Fv", SN_NOWARN)
set_name(0x8007DEA0, "GPUQ_InitModule__Fv", SN_NOWARN)
set_name(0x8007DEAC, "GPUQ_FlushQ__Fv", SN_NOWARN)
set_name(0x8007E020, "GPUQ_LoadImage__FP4RECTli", SN_NOWARN)
set_name(0x8007E0D4, "GPUQ_DiscardHandle__Fl", SN_NOWARN)
set_name(0x8007E174, "GPUQ_LoadClutAddr__FiiiPv", SN_NOWARN)
set_name(0x8007E210, "GPUQ_MoveImage__FP4RECTii", SN_NOWARN)
set_name(0x8007E2B0, "PRIM_Open__FiiiP10SCREEN_ENVUl", SN_NOWARN)
set_name(0x8007E3CC, "InitPrimBuffer__FP11PRIM_BUFFERii", SN_NOWARN)
set_name(0x8007E4A8, "PRIM_Clip__FP4RECTi", SN_NOWARN)
set_name(0x8007E5D0, "PRIM_GetCurrentScreen__Fv", SN_NOWARN)
set_name(0x8007E5DC, "PRIM_FullScreen__Fi", SN_NOWARN)
set_name(0x8007E618, "PRIM_Flush__Fv", SN_NOWARN)
set_name(0x8007E82C, "PRIM_GetCurrentOtList__Fv", SN_NOWARN)
set_name(0x8007E838, "ClearPbOnDrawSync", SN_NOWARN)
set_name(0x8007E874, "ClearedYet__Fv", SN_NOWARN)
set_name(0x8007E880, "PrimDrawSycnCallBack", SN_NOWARN)
set_name(0x8007E8A0, "SendDispEnv__Fv", SN_NOWARN)
set_name(0x8007E8C4, "PRIM_GetNextPolyF4__Fv", SN_NOWARN)
set_name(0x8007E8DC, "PRIM_GetNextPolyFt4__Fv", SN_NOWARN)
set_name(0x8007E8F4, "PRIM_GetNextPolyGt4__Fv", SN_NOWARN)
set_name(0x8007E90C, "PRIM_GetNextPolyG4__Fv", SN_NOWARN)
set_name(0x8007E924, "PRIM_GetNextPolyF3__Fv", SN_NOWARN)
set_name(0x8007E93C, "PRIM_GetNextDrArea__Fv", SN_NOWARN)
set_name(0x8007E954, "ClipRect__FRC4RECTR4RECT", SN_NOWARN)
set_name(0x8007EA68, "IsColiding__FRC4RECTT0", SN_NOWARN)
set_name(0x8007EAD0, "VID_AfterDisplay__Fv", SN_NOWARN)
set_name(0x8007EAF0, "VID_ScrOn__Fv", SN_NOWARN)
set_name(0x8007EB18, "VID_DoThisNextSync__FPFv_v", SN_NOWARN)
set_name(0x8007EB70, "VID_NextSyncRoutHasExecuted__Fv", SN_NOWARN)
set_name(0x8007EB7C, "VID_GetTick__Fv", SN_NOWARN)
set_name(0x8007EB88, "VID_DispEnvSend", SN_NOWARN)
set_name(0x8007EBC4, "VID_SetXYOff__Fii", SN_NOWARN)
set_name(0x8007EBD4, "VID_GetXOff__Fv", SN_NOWARN)
set_name(0x8007EBE0, "VID_GetYOff__Fv", SN_NOWARN)
set_name(0x8007EBEC, "VID_SetDBuffer__Fb", SN_NOWARN)
set_name(0x8007ED5C, "MyFilter__FUlUlPCc", SN_NOWARN)
set_name(0x8007ED64, "SlowMemMove__FPvT0Ul", SN_NOWARN)
set_name(0x8007ED84, "GetTpY__FUs_addr_8007ED84", SN_NOWARN)
set_name(0x8007EDA0, "GetTpX__FUs_addr_8007EDA0", SN_NOWARN)
set_name(0x8007EDAC, "SYSI_GetFs__Fv", SN_NOWARN)
set_name(0x8007EDB8, "SYSI_GetOverlayFs__Fv", SN_NOWARN)
set_name(0x8007EDC4, "SortOutFileSystem__Fv", SN_NOWARN)
set_name(0x8007EF00, "MemCb__FlPvUlPCcii", SN_NOWARN)
set_name(0x8007EF20, "Spanker__Fv", SN_NOWARN)
set_name(0x8007EF60, "GaryLiddon__Fv", SN_NOWARN)
set_name(0x8007EF68, "ReadPad__Fi", SN_NOWARN)
set_name(0x8007F0D8, "DummyPoll__Fv", SN_NOWARN)
set_name(0x8007F0E0, "DaveOwens__Fv", SN_NOWARN)
set_name(0x8007F108, "GetCur__C4CPad", SN_NOWARN)
set_name(0x8007F130, "CheckActive__4CPad", SN_NOWARN)
set_name(0x8007F13C, "GetTpY__FUs_addr_8007F13C", SN_NOWARN)
set_name(0x8007F158, "GetTpX__FUs_addr_8007F158", SN_NOWARN)
set_name(0x8007F164, "TimSwann__Fv", SN_NOWARN)
set_name(0x8007F16C, "__6FileIOUl", SN_NOWARN)
set_name(0x8007F1BC, "___6FileIO", SN_NOWARN)
set_name(0x8007F210, "Read__6FileIOPCcUl", SN_NOWARN)
set_name(0x8007F378, "FileLen__6FileIOPCc", SN_NOWARN)
set_name(0x8007F3DC, "FileNotFound__6FileIOPCc", SN_NOWARN)
set_name(0x8007F3FC, "StreamFile__6FileIOPCciPFPUciib_bii", SN_NOWARN)
set_name(0x8007F4DC, "ReadAtAddr__6FileIOPCcPUci", SN_NOWARN)
set_name(0x8007F5A0, "DumpOldPath__6FileIO", SN_NOWARN)
set_name(0x8007F604, "SetSearchPath__6FileIOPCc", SN_NOWARN)
set_name(0x8007F6E0, "FindFile__6FileIOPCcPc", SN_NOWARN)
set_name(0x8007F7F4, "CopyPathItem__6FileIOPcPCc", SN_NOWARN)
set_name(0x8007F89C, "LockSearchPath__6FileIO", SN_NOWARN)
set_name(0x8007F8F4, "UnlockSearchPath__6FileIO", SN_NOWARN)
set_name(0x8007F94C, "SearchPathExists__6FileIO", SN_NOWARN)
set_name(0x8007F960, "Save__6FileIOPCcPUci", SN_NOWARN)
set_name(0x8007F99C, "__4PCIOUl", SN_NOWARN)
set_name(0x8007FA04, "___4PCIO", SN_NOWARN)
set_name(0x8007FA5C, "FileExists__4PCIOPCc", SN_NOWARN)
set_name(0x8007FAA0, "LoReadFileAtAddr__4PCIOPCcPUci", SN_NOWARN)
set_name(0x8007FB64, "GetFileLength__4PCIOPCc", SN_NOWARN)
set_name(0x8007FC1C, "LoSave__4PCIOPCcPUci", SN_NOWARN)
set_name(0x8007FCF0, "LoStreamFile__4PCIOPCciPFPUciib_bii", SN_NOWARN)
set_name(0x8007FF00, "__6SysObj", SN_NOWARN)
set_name(0x8007FF18, "__nw__6SysObji", SN_NOWARN)
set_name(0x8007FF44, "__nw__6SysObjiUl", SN_NOWARN)
set_name(0x8007FFC0, "__dl__6SysObjPv", SN_NOWARN)
set_name(0x8008002C, "__5DatIOUl", SN_NOWARN)
set_name(0x80080068, "___5DatIO", SN_NOWARN)
set_name(0x800800C0, "FileExists__5DatIOPCc", SN_NOWARN)
set_name(0x80080100, "LoReadFileAtAddr__5DatIOPCcPUci", SN_NOWARN)
set_name(0x800801C0, "GetFileLength__5DatIOPCc", SN_NOWARN)
set_name(0x80080274, "LoSave__5DatIOPCcPUci", SN_NOWARN)
set_name(0x8008031C, "LoStreamFile__5DatIOPCciPFPUciib_bii", SN_NOWARN)
set_name(0x80080528, "__7TextDat", SN_NOWARN)
set_name(0x80080568, "___7TextDat", SN_NOWARN)
set_name(0x800805B0, "Use__7TextDat", SN_NOWARN)
set_name(0x800807A4, "TpLoadCallBack__FPUciib", SN_NOWARN)
set_name(0x8008084C, "StreamLoadTP__7TextDat", SN_NOWARN)
set_name(0x80080904, "FinishedUsing__7TextDat", SN_NOWARN)
set_name(0x80080974, "MakeBlockOffsetTab__7TextDat", SN_NOWARN)
set_name(0x800809E4, "MakeOffsetTab__C9CBlockHdr", SN_NOWARN)
set_name(0x80080B10, "SetUVTp__7TextDatP9FRAME_HDRP8POLY_FT4ii", SN_NOWARN)
set_name(0x80080C10, "PrintMonster__7TextDatiiibi", SN_NOWARN)
set_name(0x8008101C, "PrepareFt4__7TextDatP8POLY_FT4iiiii", SN_NOWARN)
set_name(0x80081288, "GetDecompBufffer__7TextDati", SN_NOWARN)
set_name(0x800813E8, "SetUVTpGT4__7TextDatP9FRAME_HDRP8POLY_GT4ii", SN_NOWARN)
set_name(0x800814E8, "PrepareGt4__7TextDatP8POLY_GT4iiiii", SN_NOWARN)
set_name(0x80081740, "SetUVTpGT3__7TextDatP9FRAME_HDRP8POLY_GT3", SN_NOWARN)
set_name(0x800817C4, "PrepareGt3__7TextDatP8POLY_GT3iii", SN_NOWARN)
set_name(0x8008198C, "PrintFt4__7TextDatiiiiii", SN_NOWARN)
set_name(0x80081AE0, "PrintGt4__7TextDatiiiiii", SN_NOWARN)
set_name(0x80081C34, "PrintGt3__7TextDatiiii", SN_NOWARN)
set_name(0x80081D18, "DecompFrame__7TextDatP9FRAME_HDR", SN_NOWARN)
set_name(0x80081E70, "MakeCreatureOffsetTab__7TextDat", SN_NOWARN)
set_name(0x80081FB0, "MakePalOffsetTab__7TextDat", SN_NOWARN)
set_name(0x800820AC, "InitData__7TextDat", SN_NOWARN)
set_name(0x800820D8, "DumpData__7TextDat", SN_NOWARN)
set_name(0x80082220, "GM_UseTexData__Fi", SN_NOWARN)
set_name(0x80082340, "GM_FinishedUsing__FP7TextDat", SN_NOWARN)
set_name(0x80082394, "SetPal__7TextDatP9FRAME_HDRP8POLY_FT4", SN_NOWARN)
set_name(0x80082458, "GetFrNum__7TextDatiiii", SN_NOWARN)
set_name(0x800824AC, "IsDirAliased__7TextDatiii", SN_NOWARN)
set_name(0x80082504, "DoDecompRequests__7TextDat", SN_NOWARN)
set_name(0x80082628, "FindDecompArea__7TextDatR4RECT", SN_NOWARN)
set_name(0x80082700, "GetFileInfo__7TextDati", SN_NOWARN)
set_name(0x80082750, "GetSize__C15CCreatureAction", SN_NOWARN)
set_name(0x80082778, "GetFrNum__C15CCreatureActionii", SN_NOWARN)
set_name(0x80082820, "InitDirRemap__15CCreatureAction", SN_NOWARN)
set_name(0x800828E0, "GetFrNum__C12CCreatureHdriii", SN_NOWARN)
set_name(0x80082924, "GetAction__C12CCreatureHdri", SN_NOWARN)
set_name(0x800829B4, "InitActionDirRemaps__12CCreatureHdr", SN_NOWARN)
set_name(0x80082A24, "GetSize__C12CCreatureHdr", SN_NOWARN)
set_name(0x80082A90, "LoadDat__C13CTextFileInfo", SN_NOWARN)
set_name(0x80082AE0, "LoadHdr__C13CTextFileInfo", SN_NOWARN)
set_name(0x80082B08, "GetFile__C13CTextFileInfoPc", SN_NOWARN)
set_name(0x80082BA4, "HasFile__C13CTextFileInfoPc", SN_NOWARN)
set_name(0x80082C0C, "Un64__FPUcT0l", SN_NOWARN)
set_name(0x80082CE0, "__7CScreen", SN_NOWARN)
set_name(0x80082D14, "Load__7CScreeniii", SN_NOWARN)
set_name(0x80082FB4, "Unload__7CScreen", SN_NOWARN)
set_name(0x80082FD8, "Display__7CScreeniiii", SN_NOWARN)
set_name(0x800832B8, "SetRect__5CPartR7TextDatR4RECT", SN_NOWARN)
set_name(0x80083334, "GetBoundingBox__6CBlockR7TextDatR4RECT", SN_NOWARN)
set_name(0x80083490, "_GLOBAL__D_DatPool", SN_NOWARN)
set_name(0x800834E8, "_GLOBAL__I_DatPool", SN_NOWARN)
set_name(0x8008353C, "PRIM_GetPrim__FPP8POLY_GT3", SN_NOWARN)
set_name(0x800835B8, "PRIM_GetPrim__FPP8POLY_GT4", SN_NOWARN)
set_name(0x80083634, "PRIM_GetPrim__FPP8POLY_FT4", SN_NOWARN)
set_name(0x800836B0, "CanXferFrame__C7TextDat", SN_NOWARN)
set_name(0x800836D8, "CanXferPal__C7TextDat", SN_NOWARN)
set_name(0x80083700, "IsLoaded__C7TextDat", SN_NOWARN)
set_name(0x8008370C, "GetTexNum__C7TextDat", SN_NOWARN)
set_name(0x80083718, "GetCreature__7TextDati", SN_NOWARN)
set_name(0x80083790, "GetNumOfCreatures__7TextDat", SN_NOWARN)
set_name(0x800837A4, "SetFileInfo__7TextDatPC13CTextFileInfoi", SN_NOWARN)
set_name(0x800837B0, "GetNumOfFrames__7TextDat", SN_NOWARN)
set_name(0x800837C4, "GetPal__7TextDati", SN_NOWARN)
set_name(0x800837E0, "GetFr__7TextDati", SN_NOWARN)
set_name(0x800837FC, "GetName__C13CTextFileInfo", SN_NOWARN)
set_name(0x80083808, "HasDat__C13CTextFileInfo", SN_NOWARN)
set_name(0x80083830, "HasTp__C13CTextFileInfo", SN_NOWARN)
set_name(0x80083858, "GetSize__C6CBlock", SN_NOWARN)
set_name(0x8008386C, "__4CdIOUl", SN_NOWARN)
set_name(0x800838B0, "___4CdIO", SN_NOWARN)
set_name(0x80083908, "FileExists__4CdIOPCc", SN_NOWARN)
set_name(0x8008392C, "LoReadFileAtAddr__4CdIOPCcPUci", SN_NOWARN)
set_name(0x800839B0, "GetFileLength__4CdIOPCc", SN_NOWARN)
set_name(0x800839D4, "LoSave__4CdIOPCcPUci", SN_NOWARN)
set_name(0x80083AB4, "LoStreamCallBack__Fi", SN_NOWARN)
set_name(0x80083AC4, "CD_GetCdlFILE__FPCcP7CdlFILE", SN_NOWARN)
set_name(0x80083C10, "LoStreamFile__4CdIOPCciPFPUciib_bii", SN_NOWARN)
set_name(0x80083E9C, "LoAsyncStreamFile__4CdIOPCciPFPUciib_bii", SN_NOWARN)
set_name(0x80083FFC, "BL_InitEAC__Fv", SN_NOWARN)
set_name(0x800840E8, "BL_ReadFile__FPcUl", SN_NOWARN)
set_name(0x80084214, "BL_AsyncReadFile__FPcUl", SN_NOWARN)
set_name(0x80084388, "BL_LoadDirectory__Fv", SN_NOWARN)
set_name(0x800844B0, "BL_LoadStreamDir__Fv", SN_NOWARN)
set_name(0x80084790, "BL_MakeFilePosTab__FPUcUl", SN_NOWARN)
set_name(0x80084890, "BL_FindStreamFile__FPcc", SN_NOWARN)
set_name(0x80084A5C, "BL_FileExists__FPcc", SN_NOWARN)
set_name(0x80084A80, "BL_FileLength__FPcc", SN_NOWARN)
set_name(0x80084AB4, "BL_LoadFileAtAddr__FPcPUcc", SN_NOWARN)
set_name(0x80084B9C, "BL_AsyncLoadDone__Fv", SN_NOWARN)
set_name(0x80084BA8, "BL_WaitForAsyncFinish__Fv", SN_NOWARN)
set_name(0x80084BF4, "BL_AsyncLoadCallBack__Fi", SN_NOWARN)
set_name(0x80084C24, "BL_LoadFileAsync__FPcc", SN_NOWARN)
set_name(0x80084D9C, "BL_AsyncLoadFileAtAddr__FPcPUcc", SN_NOWARN)
set_name(0x80084E64, "BL_OpenStreamFile__FPcc", SN_NOWARN)
set_name(0x80084E90, "BL_CloseStreamFile__FP6STRHDR", SN_NOWARN)
set_name(0x80084EC8, "LZNP_Decode__FPUcT0", SN_NOWARN)
set_name(0x80084F9C, "Tmalloc__Fi", SN_NOWARN)
set_name(0x800850C0, "Tfree__FPv", SN_NOWARN)
set_name(0x80085170, "InitTmalloc__Fv", SN_NOWARN)
set_name(0x80085198, "strupr__FPc", SN_NOWARN)
set_name(0x800851EC, "PauseTask__FP4TASK", SN_NOWARN)
set_name(0x80085238, "GetPausePad__Fv", SN_NOWARN)
set_name(0x8008532C, "TryPadForPause__Fi", SN_NOWARN)
set_name(0x80085358, "DoPause__14CPauseMessagesi", SN_NOWARN)
set_name(0x8008559C, "DoPausedMessage__14CPauseMessages", SN_NOWARN)
set_name(0x800858B4, "DoQuitMessage__14CPauseMessages", SN_NOWARN)
set_name(0x800859D4, "AreYouSureMessage__14CPauseMessages", SN_NOWARN)
set_name(0x80085AD8, "PA_SetPauseOk__Fb", SN_NOWARN)
set_name(0x80085AE8, "PA_GetPauseOk__Fv", SN_NOWARN)
set_name(0x80085AF4, "MY_PausePrint__17CTempPauseMessageiPciP4RECT", SN_NOWARN)
set_name(0x80085CC4, "InitPrintQuitMessage__17CTempPauseMessage", SN_NOWARN)
set_name(0x80085CCC, "PrintQuitMessage__17CTempPauseMessagei", SN_NOWARN)
set_name(0x80085DE8, "LeavePrintQuitMessage__17CTempPauseMessagei", SN_NOWARN)
set_name(0x80085DF0, "InitPrintAreYouSure__17CTempPauseMessage", SN_NOWARN)
set_name(0x80085DF8, "PrintAreYouSure__17CTempPauseMessagei", SN_NOWARN)
set_name(0x80085F14, "LeavePrintAreYouSure__17CTempPauseMessagei", SN_NOWARN)
set_name(0x80085F1C, "InitPrintPaused__17CTempPauseMessage", SN_NOWARN)
set_name(0x80085F24, "ShowInActive__17CTempPauseMessage", SN_NOWARN)
set_name(0x80086004, "PrintPaused__17CTempPauseMessage", SN_NOWARN)
set_name(0x80086154, "LeavePrintPaused__17CTempPauseMessage", SN_NOWARN)
set_name(0x8008615C, "___17CTempPauseMessage", SN_NOWARN)
set_name(0x80086184, "_GLOBAL__D_DoPause__14CPauseMessagesi", SN_NOWARN)
set_name(0x800861AC, "_GLOBAL__I_DoPause__14CPauseMessagesi", SN_NOWARN)
set_name(0x800861D4, "__17CTempPauseMessage", SN_NOWARN)
set_name(0x80086218, "___14CPauseMessages", SN_NOWARN)
set_name(0x8008624C, "__14CPauseMessages", SN_NOWARN)
set_name(0x80086260, "SetRGB__6DialogUcUcUc", SN_NOWARN)
set_name(0x80086280, "SetBack__6Dialogi", SN_NOWARN)
set_name(0x80086288, "SetBorder__6Dialogi", SN_NOWARN)
set_name(0x80086290, "___6Dialog", SN_NOWARN)
set_name(0x800862B8, "__6Dialog", SN_NOWARN)
set_name(0x80086314, "GetDown__C4CPad", SN_NOWARN)
set_name(0x8008633C, "CheckActive__4CPad_addr_8008633C", SN_NOWARN)
set_name(0x80086348, "ReadPadStream__Fv", SN_NOWARN)
set_name(0x80086460, "PAD_Handler__Fv", SN_NOWARN)
set_name(0x80086628, "PAD_GetPad__FiUc", SN_NOWARN)
set_name(0x800866C4, "NewVal__4CPadUs", SN_NOWARN)
set_name(0x800867FC, "BothNewVal__4CPadUsUs", SN_NOWARN)
set_name(0x80086958, "Trans__4CPadUs", SN_NOWARN)
set_name(0x80086A7C, "_GLOBAL__I_Pad0", SN_NOWARN)
set_name(0x80086AB4, "SetPadType__4CPadUc", SN_NOWARN)
set_name(0x80086ABC, "CheckActive__4CPad_addr_80086ABC", SN_NOWARN)
set_name(0x80086AC8, "SetActive__4CPadUc", SN_NOWARN)
set_name(0x80086AD0, "SetBothFlag__4CPadUc", SN_NOWARN)
set_name(0x80086AD8, "__4CPadi", SN_NOWARN)
set_name(0x80086B0C, "Flush__4CPad", SN_NOWARN)
set_name(0x80086B30, "Set__7FontTab", SN_NOWARN)
set_name(0x80086BCC, "InitPrinty__Fv", SN_NOWARN)
set_name(0x80086C6C, "SetTextDat__5CFontP7TextDat", SN_NOWARN)
set_name(0x80086C74, "PrintChar__5CFontUsUsUcUcUcUc", SN_NOWARN)
set_name(0x80086DF8, "Print__5CFontiiPc8TXT_JUSTP4RECTUcUcUc", SN_NOWARN)
set_name(0x80087418, "GetStrWidth__5CFontPc", SN_NOWARN)
set_name(0x800874B4, "SetChar__5CFontiUs", SN_NOWARN)
set_name(0x80087534, "SetOTpos__5CFonti", SN_NOWARN)
set_name(0x80087540, "GetCharWidth__5CFontUc", SN_NOWARN)
set_name(0x800875D0, "ClearFont__5CFont", SN_NOWARN)
set_name(0x800875F4, "IsDefined__5CFontUc", SN_NOWARN)
set_name(0x80087614, "GetCharFrameNum__5CFontUc", SN_NOWARN)
set_name(0x8008762C, "Init__5CFont", SN_NOWARN)
set_name(0x80087660, "GetFr__7TextDati_addr_80087660", SN_NOWARN)
set_name(0x8008767C, "TrimCol__Fs", SN_NOWARN)
set_name(0x800876B4, "DialogPrint__Fiiiiiiiiii", SN_NOWARN)
set_name(0x80088034, "GetDropShadowG4__FUcUcUcUcUcUcUcUcUcUcUcUc", SN_NOWARN)
set_name(0x8008816C, "DropShadows__Fiiii", SN_NOWARN)
set_name(0x80088410, "InitDialog__Fv", SN_NOWARN)
set_name(0x80088548, "GetSizes__6Dialog", SN_NOWARN)
set_name(0x800887CC, "Back__6Dialogiiii", SN_NOWARN)
set_name(0x8008998C, "Line__6Dialogiii", SN_NOWARN)
set_name(0x80089BA4, "GetPal__7TextDati_addr_80089BA4", SN_NOWARN)
set_name(0x80089BC0, "GetFr__7TextDati_addr_80089BC0", SN_NOWARN)
set_name(0x80089BDC, "ATT_DoAttract__Fv", SN_NOWARN)
set_name(0x80089D2C, "CreatePlayersFromFeData__FR9FE_CREATE", SN_NOWARN)
set_name(0x80089DF8, "UpdateSel__FPUsUsPUc", SN_NOWARN)
set_name(0x80089E38, "CycleSelCols__Fv", SN_NOWARN)
set_name(0x80089FF0, "FindTownCreature__7CBlocksi", SN_NOWARN)
set_name(0x8008A064, "FindCreature__7CBlocksi", SN_NOWARN)
set_name(0x8008A0B8, "__7CBlocksiiiii", SN_NOWARN)
set_name(0x8008A20C, "SetTownersGraphics__7CBlocks", SN_NOWARN)
set_name(0x8008A244, "SetMonsterGraphics__7CBlocksii", SN_NOWARN)
set_name(0x8008A30C, "___7CBlocks", SN_NOWARN)
set_name(0x8008A394, "DumpGt4s__7CBlocks", SN_NOWARN)
set_name(0x8008A3FC, "DumpRects__7CBlocks", SN_NOWARN)
set_name(0x8008A464, "SetGraphics__7CBlocksPP7TextDatPii", SN_NOWARN)
set_name(0x8008A4C0, "DumpGraphics__7CBlocksPP7TextDatPi", SN_NOWARN)
set_name(0x8008A510, "PrintBlockOutline__7CBlocksiiiii", SN_NOWARN)
set_name(0x8008A85C, "Load__7CBlocksi", SN_NOWARN)
set_name(0x8008A908, "MakeRectTable__7CBlocks", SN_NOWARN)
set_name(0x8008A9DC, "MakeGt4Table__7CBlocks", SN_NOWARN)
set_name(0x8008AAE4, "MakeGt4__7CBlocksP8POLY_GT4P9FRAME_HDR", SN_NOWARN)
set_name(0x8008AC24, "GetBlock__7CBlocksi", SN_NOWARN)
set_name(0x8008AC9C, "Print__7CBlocks", SN_NOWARN)
set_name(0x8008ACC4, "SetXY__7CBlocksii", SN_NOWARN)
set_name(0x8008ACEC, "GetXY__7CBlocksPiT1", SN_NOWARN)
set_name(0x8008AD04, "PrintMap__7CBlocksii", SN_NOWARN)
set_name(0x8008C1F4, "PrintGameSprites__7CBlocksiiiii", SN_NOWARN)
set_name(0x8008C364, "PrintGameSprites__7CBlocksP8map_infoiiiiiii", SN_NOWARN)
set_name(0x8008D168, "PrintSprites__7CBlocksP8map_infoiiiiiii", SN_NOWARN)
set_name(0x8008D89C, "PrintSprites__7CBlocksiiiii", SN_NOWARN)
set_name(0x8008DA0C, "ScrToWorldX__7CBlocksii", SN_NOWARN)
set_name(0x8008DA20, "ScrToWorldY__7CBlocksii", SN_NOWARN)
set_name(0x8008DA34, "SetScrollTarget__7CBlocksii", SN_NOWARN)
set_name(0x8008DAF8, "DoScroll__7CBlocks", SN_NOWARN)
set_name(0x8008DB7C, "SetPlayerPosBlocks__7CBlocksiii", SN_NOWARN)
set_name(0x8008DC1C, "GetScrXY__7CBlocksR4RECTiiii", SN_NOWARN)
set_name(0x8008DCF0, "ShadScaleSkew__7CBlocksP8POLY_FT4", SN_NOWARN)
set_name(0x8008DD70, "WorldToScrX__7CBlocksii", SN_NOWARN)
set_name(0x8008DD78, "WorldToScrY__7CBlocksii", SN_NOWARN)
set_name(0x8008DD8C, "BL_GetCurrentBlocks__Fv", SN_NOWARN)
set_name(0x8008DD98, "PRIM_GetPrim__FPP8POLY_FT4_addr_8008DD98", SN_NOWARN)
set_name(0x8008DE14, "GetHighlightCol__FiPiUsUsUs", SN_NOWARN)
set_name(0x8008DE5C, "PRIM_GetCopy__FP8POLY_FT4", SN_NOWARN)
set_name(0x8008DE98, "GetHighlightCol__FiPcUsUsUs", SN_NOWARN)
set_name(0x8008DEE0, "PRIM_GetPrim__FPP8POLY_GT4_addr_8008DEE0", SN_NOWARN)
set_name(0x8008DF5C, "PRIM_GetPrim__FPP7LINE_F2", SN_NOWARN)
set_name(0x8008DFD8, "PRIM_CopyPrim__FP8POLY_FT4T0", SN_NOWARN)
set_name(0x8008E000, "GetCreature__14TownToCreaturei", SN_NOWARN)
set_name(0x8008E01C, "SetItemGraphics__7CBlocksi", SN_NOWARN)
set_name(0x8008E044, "SetObjGraphics__7CBlocksi", SN_NOWARN)
set_name(0x8008E06C, "DumpItems__7CBlocks", SN_NOWARN)
set_name(0x8008E090, "DumpObjs__7CBlocks", SN_NOWARN)
set_name(0x8008E0B4, "DumpMonsters__7CBlocks", SN_NOWARN)
set_name(0x8008E0DC, "GetNumOfBlocks__7CBlocks", SN_NOWARN)
set_name(0x8008E0E8, "CopyToGt4__9LittleGt4P8POLY_GT4", SN_NOWARN)
set_name(0x8008E180, "InitFromGt4__9LittleGt4P8POLY_GT4ii", SN_NOWARN)
set_name(0x8008E210, "GetNumOfFrames__7TextDatii", SN_NOWARN)
set_name(0x8008E248, "GetCreature__7TextDati_addr_8008E248", SN_NOWARN)
set_name(0x8008E2C0, "GetNumOfCreatures__7TextDat_addr_8008E2C0", SN_NOWARN)
set_name(0x8008E2D4, "SetFileInfo__7TextDatPC13CTextFileInfoi_addr_8008E2D4", SN_NOWARN)
set_name(0x8008E2E0, "GetPal__7TextDati_addr_8008E2E0", SN_NOWARN)
set_name(0x8008E2FC, "GetFr__7TextDati_addr_8008E2FC", SN_NOWARN)
set_name(0x8008E318, "OVR_IsMemcardOverlayBlank__Fv", SN_NOWARN)
set_name(0x8008E344, "OVR_LoadPregame__Fv", SN_NOWARN)
set_name(0x8008E36C, "OVR_LoadFrontend__Fv", SN_NOWARN)
set_name(0x8008E394, "OVR_LoadGame__Fv", SN_NOWARN)
set_name(0x8008E3BC, "OVR_LoadFmv__Fv", SN_NOWARN)
set_name(0x8008E3E4, "OVR_LoadMemcard__Fv", SN_NOWARN)
set_name(0x8008E410, "ClearOutOverlays__Fv", SN_NOWARN)
set_name(0x8008E468, "ClearOut__7Overlay", SN_NOWARN)
set_name(0x8008E52C, "Load__7Overlay", SN_NOWARN)
set_name(0x8008E59C, "OVR_GetCurrentOverlay__Fv", SN_NOWARN)
set_name(0x8008E5A8, "LoadOver__FR7Overlay", SN_NOWARN)
set_name(0x8008E5FC, "_GLOBAL__I_OVR_Open__Fv", SN_NOWARN)
set_name(0x8008E76C, "GetOverType__7Overlay", SN_NOWARN)
set_name(0x8008E778, "StevesDummyPoll__Fv", SN_NOWARN)
set_name(0x8008E780, "Lambo__Fv", SN_NOWARN)
set_name(0x8008E788, "__7CPlayerbi", SN_NOWARN)
set_name(0x8008E86C, "___7CPlayer", SN_NOWARN)
set_name(0x8008E8C4, "Load__7CPlayeri", SN_NOWARN)
set_name(0x8008E920, "SetBlockXY__7CPlayerR7CBlocksR12PlayerStruct", SN_NOWARN)
set_name(0x8008EA6C, "SetScrollTarget__7CPlayerR12PlayerStructR7CBlocks", SN_NOWARN)
set_name(0x8008EE98, "GetNumOfSpellAnims__FR12PlayerStruct", SN_NOWARN)
set_name(0x8008EF18, "Print__7CPlayerR12PlayerStructR7CBlocks", SN_NOWARN)
set_name(0x8008F3F0, "FindAction__7CPlayerR12PlayerStruct", SN_NOWARN)
set_name(0x8008F46C, "FindActionEnum__7CPlayerR12PlayerStruct", SN_NOWARN)
set_name(0x8008F4E8, "Init__7CPlayer", SN_NOWARN)
set_name(0x8008F4F0, "Dump__7CPlayer", SN_NOWARN)
set_name(0x8008F4F8, "PRIM_GetPrim__FPP8POLY_FT4_addr_8008F4F8", SN_NOWARN)
set_name(0x8008F574, "PRIM_GetCopy__FP8POLY_FT4_addr_8008F574", SN_NOWARN)
set_name(0x8008F5B0, "PRIM_CopyPrim__FP8POLY_FT4T0_addr_8008F5B0", SN_NOWARN)
set_name(0x8008F5D8, "GetPlrOt__7CBlocksi", SN_NOWARN)
set_name(0x8008F5EC, "SetDecompArea__7TextDatiiii", SN_NOWARN)
set_name(0x8008F604, "GetNumOfFrames__7TextDatii_addr_8008F604", SN_NOWARN)
set_name(0x8008F63C, "GetNumOfActions__7TextDati", SN_NOWARN)
set_name(0x8008F660, "GetCreature__7TextDati_addr_8008F660", SN_NOWARN)
set_name(0x8008F6D8, "GetNumOfCreatures__7TextDat_addr_8008F6D8", SN_NOWARN)
set_name(0x8008F6EC, "SetFileInfo__7TextDatPC13CTextFileInfoi_addr_8008F6EC", SN_NOWARN)
set_name(0x8008F6F8, "PROF_Open__Fv", SN_NOWARN)
set_name(0x8008F738, "PROF_State__Fv", SN_NOWARN)
set_name(0x8008F744, "PROF_On__Fv", SN_NOWARN)
set_name(0x8008F754, "PROF_Off__Fv", SN_NOWARN)
set_name(0x8008F760, "PROF_CpuEnd__Fv", SN_NOWARN)
set_name(0x8008F790, "PROF_CpuStart__Fv", SN_NOWARN)
set_name(0x8008F7B4, "PROF_DrawStart__Fv", SN_NOWARN)
set_name(0x8008F7D8, "PROF_DrawEnd__Fv", SN_NOWARN)
set_name(0x8008F808, "PROF_Draw__FPUl", SN_NOWARN)
set_name(0x8008F9FC, "PROF_Restart__Fv", SN_NOWARN)
set_name(0x8008FA1C, "PSX_WndProc__FUilUl", SN_NOWARN)
set_name(0x8008FD10, "PSX_PostWndProc__FUilUl", SN_NOWARN)
set_name(0x8008FDC0, "GoBackLevel__Fv", SN_NOWARN)
set_name(0x8008FE34, "GoWarpLevel__Fv", SN_NOWARN)
set_name(0x8008FE70, "PostLoadGame__Fv", SN_NOWARN)
set_name(0x8008FF0C, "GoLoadGame__Fv", SN_NOWARN)
set_name(0x8008FF6C, "PostNewLevel__Fv", SN_NOWARN)
set_name(0x80090008, "GoNewLevel__Fv", SN_NOWARN)
set_name(0x80090060, "PostGoBackLevel__Fv", SN_NOWARN)
set_name(0x800900F8, "GoForwardLevel__Fv", SN_NOWARN)
set_name(0x8009014C, "PostGoForwardLevel__Fv", SN_NOWARN)
set_name(0x800901E4, "GoNewGame__Fv", SN_NOWARN)
set_name(0x80090238, "PostNewGame__Fv", SN_NOWARN)
set_name(0x80090270, "LevelToLevelInit__Fv", SN_NOWARN)
set_name(0x800902B8, "GetPal__6GPaneli", SN_NOWARN)
set_name(0x800902FC, "__6GPaneli", SN_NOWARN)
set_name(0x80090354, "DrawFlask__6GPanelP7PanelXYP12PlayerStruct", SN_NOWARN)
set_name(0x80090788, "DrawSpeedBar__6GPanelP7PanelXYP12PlayerStruct", SN_NOWARN)
set_name(0x80090C0C, "DrawSpell__6GPanelP7PanelXYP12PlayerStruct", SN_NOWARN)
set_name(0x80090DAC, "DrawMsgWindow__6GPanelP7PanelXYP12PlayerStruct", SN_NOWARN)
set_name(0x80090DF8, "DrawDurThingy__6GPaneliiP10ItemStructi", SN_NOWARN)
set_name(0x800911B4, "DrawDurIcon__6GPanelP7PanelXYP12PlayerStruct", SN_NOWARN)
set_name(0x800912A8, "Print__6GPanelP7PanelXYP12PlayerStruct", SN_NOWARN)
set_name(0x800913AC, "GetPal__7TextDati_addr_800913AC", SN_NOWARN)
set_name(0x800913C8, "GetFr__7TextDati_addr_800913C8", SN_NOWARN)
set_name(0x800913E4, "PrintCDWaitTask__FP4TASK", SN_NOWARN)
set_name(0x8009149C, "InitCDWaitIcon__Fv", SN_NOWARN)
set_name(0x800914D0, "STR_Debug__FP6SFXHDRPce", SN_NOWARN)
set_name(0x800914E4, "STR_SystemTask__FP4TASK", SN_NOWARN)
set_name(0x80091524, "STR_AllocBuffer__Fv", SN_NOWARN)
set_name(0x80091578, "STR_Init__Fv", SN_NOWARN)
set_name(0x800916A0, "STR_InitStream__Fv", SN_NOWARN)
set_name(0x800917D8, "STR_PlaySound__FUscic", SN_NOWARN)
set_name(0x80091914, "STR_setvolume__FP6SFXHDR", SN_NOWARN)
set_name(0x8009196C, "STR_PlaySFX__FP6SFXHDR", SN_NOWARN)
set_name(0x80091A78, "STR_pauseall__Fv", SN_NOWARN)
set_name(0x80091AC8, "STR_resumeall__Fv", SN_NOWARN)
set_name(0x80091B18, "STR_CloseStream__FP6SFXHDR", SN_NOWARN)
set_name(0x80091B9C, "STR_SoundCommand__FP6SFXHDRi", SN_NOWARN)
set_name(0x80091CA8, "STR_Command__FP6SFXHDR", SN_NOWARN)
set_name(0x80091E54, "STR_DMAControl__FP6SFXHDR", SN_NOWARN)
set_name(0x80091F1C, "STR_PlayStream__FP6SFXHDRPUci", SN_NOWARN)
set_name(0x800920F8, "STR_AsyncWeeTASK__FP4TASK", SN_NOWARN)
set_name(0x800923F8, "STR_AsyncTASK__FP4TASK", SN_NOWARN)
set_name(0x8009282C, "STR_StreamMainTask__FP6SFXHDRc", SN_NOWARN)
set_name(0x80092934, "SND_Monitor__FP4TASK", SN_NOWARN)
set_name(0x800929C0, "SPU_OnceOnlyInit__Fv", SN_NOWARN)
set_name(0x800929F8, "SPU_Init__Fv", SN_NOWARN)
set_name(0x80092ADC, "SND_FindChannel__Fv", SN_NOWARN)
set_name(0x80092B48, "SND_ClearBank__Fv", SN_NOWARN)
set_name(0x80092BC0, "SndLoadCallBack__FPUciib", SN_NOWARN)
set_name(0x80092C38, "SND_LoadBank__Fi", SN_NOWARN)
set_name(0x80092D6C, "SND_FindSFX__FUs", SN_NOWARN)
set_name(0x80092DC0, "SND_StopSnd__Fi", SN_NOWARN)
set_name(0x80092DE4, "SND_IsSfxPlaying__Fi", SN_NOWARN)
set_name(0x80092E20, "SND_RemapSnd__Fi", SN_NOWARN)
set_name(0x80092E94, "SND_PlaySnd__FUsiii", SN_NOWARN)
set_name(0x80093050, "AS_CallBack0__Fi", SN_NOWARN)
set_name(0x80093064, "AS_CallBack1__Fi", SN_NOWARN)
set_name(0x80093078, "AS_WasLastBlock__FiP6STRHDRP6SFXHDR", SN_NOWARN)
set_name(0x80093154, "AS_OpenStream__FP6STRHDRP6SFXHDR", SN_NOWARN)
set_name(0x800931F4, "AS_GetBlock__FP6SFXHDR", SN_NOWARN)
set_name(0x80093200, "AS_CloseStream__FP6STRHDRP6SFXHDR", SN_NOWARN)
set_name(0x8009322C, "AS_LoopStream__FiP6STRHDRP6SFXHDR", SN_NOWARN)
set_name(0x8009334C, "SCR_Open__Fv", SN_NOWARN)
set_name(0x8009337C, "SCR_NeedHighlightPal__FUsUsi", SN_NOWARN)
set_name(0x800933B0, "Init__13PalCollectionPC7InitPos", SN_NOWARN)
set_name(0x80093440, "FindPal__13PalCollectionUsUsi", SN_NOWARN)
set_name(0x8009351C, "NewPal__13PalCollectionUsUsi", SN_NOWARN)
set_name(0x8009359C, "MakePal__8PalEntryUsUsi", SN_NOWARN)
set_name(0x8009363C, "GetHighlightPal__13PalCollectionUsUsi", SN_NOWARN)
set_name(0x800936D0, "UpdatePals__13PalCollection", SN_NOWARN)
set_name(0x80093744, "SCR_Handler__Fv", SN_NOWARN)
set_name(0x8009376C, "GetNumOfObjs__t10Collection2Z8PalEntryi20", SN_NOWARN)
set_name(0x80093774, "GetObj__t10Collection2Z8PalEntryi20", SN_NOWARN)
set_name(0x800937B0, "Init__t10Collection2Z8PalEntryi20", SN_NOWARN)
set_name(0x80093814, "MoveFromUsedToUnused__t10Collection2Z8PalEntryi20P8PalEntry", SN_NOWARN)
set_name(0x8009386C, "MoveFromUnusedToUsed__t10Collection2Z8PalEntryi20P8PalEntry", SN_NOWARN)
set_name(0x800938C4, "Set__8PalEntryUsUsi", SN_NOWARN)
set_name(0x800938D8, "Set__8PalEntryRC7InitPos", SN_NOWARN)
set_name(0x80093904, "SetJustUsed__8PalEntryb", SN_NOWARN)
set_name(0x8009390C, "Init__8PalEntry", SN_NOWARN)
set_name(0x80093914, "GetClut__C8PalEntry", SN_NOWARN)
set_name(0x80093920, "IsEqual__C8PalEntryUsUsi", SN_NOWARN)
set_name(0x80093958, "GetNext__Ct11TLinkedList1Z8PalEntry", SN_NOWARN)
set_name(0x80093964, "AddToList__t11TLinkedList1Z8PalEntryPP8PalEntry", SN_NOWARN)
set_name(0x80093984, "DetachFromList__t11TLinkedList1Z8PalEntryPP8PalEntry", SN_NOWARN)
set_name(0x800939D0, "stub__FPcPv", SN_NOWARN)
set_name(0x800939D8, "new_eprint__FPcT0i", SN_NOWARN)
set_name(0x80093A0C, "TonysGameTask__FP4TASK", SN_NOWARN)
set_name(0x80093A94, "SetAmbientLight__Fv", SN_NOWARN)
set_name(0x80093BA0, "print_demo_task__FP4TASK", SN_NOWARN)
set_name(0x80093D8C, "TonysDummyPoll__Fv", SN_NOWARN)
set_name(0x80093DB0, "load_demo_pad_data__FUl", SN_NOWARN)
set_name(0x80093E10, "save_demo_pad_data__FUl", SN_NOWARN)
set_name(0x80093E70, "set_pad_record_play__Fi", SN_NOWARN)
set_name(0x80093EE4, "start_demo__Fv", SN_NOWARN)
set_name(0x80093F80, "SetQuest__Fv", SN_NOWARN)
set_name(0x80093FA8, "CurrCheatStr__Fv", SN_NOWARN)
set_name(0x80093FC8, "tony__Fv", SN_NOWARN)
set_name(0x80094000, "GLUE_SetMonsterList__Fi", SN_NOWARN)
set_name(0x8009400C, "GLUE_GetMonsterList__Fv", SN_NOWARN)
set_name(0x80094018, "GLUE_SuspendGame__Fv", SN_NOWARN)
set_name(0x8009406C, "GLUE_ResumeGame__Fv", SN_NOWARN)
set_name(0x800940C0, "GLUE_PreTown__Fv", SN_NOWARN)
set_name(0x80094114, "GLUE_PreDun__Fv", SN_NOWARN)
set_name(0x80094128, "GLUE_Finished__Fv", SN_NOWARN)
set_name(0x80094134, "GLUE_SetFinished__Fb", SN_NOWARN)
set_name(0x80094140, "GLUE_StartBg__Fibi", SN_NOWARN)
set_name(0x800941C4, "GLUE_SetShowGameScreenFlag__Fb", SN_NOWARN)
set_name(0x800941D4, "GLUE_SetHomingScrollFlag__Fb", SN_NOWARN)
set_name(0x800941E4, "GLUE_SetShowPanelFlag__Fb", SN_NOWARN)
set_name(0x800941F4, "DoShowPanelGFX__FP6GPanelT0", SN_NOWARN)
set_name(0x800942CC, "BgTask__FP4TASK", SN_NOWARN)
set_name(0x8009482C, "FindPlayerChar__FPc", SN_NOWARN)
set_name(0x800948C4, "FindPlayerChar__Fiii", SN_NOWARN)
set_name(0x80094920, "FindPlayerChar__FP12PlayerStruct", SN_NOWARN)
set_name(0x80094950, "FindPlayerChar__FP12PlayerStructb", SN_NOWARN)
set_name(0x800949B0, "MakeSurePlayerDressedProperly__FR7CPlayerR12PlayerStructb", SN_NOWARN)
set_name(0x80094A30, "GLUE_GetCurrentList__Fi", SN_NOWARN)
set_name(0x80094ADC, "GLUE_StartGameExit__Fv", SN_NOWARN)
set_name(0x80094B48, "GLUE_Init__Fv", SN_NOWARN)
set_name(0x80094B78, "GetTexId__7CPlayer", SN_NOWARN)
set_name(0x80094B84, "SetTown__7CBlocksb", SN_NOWARN)
set_name(0x80094B8C, "MoveToScrollTarget__7CBlocks", SN_NOWARN)
set_name(0x80094BA0, "SetDemoKeys__FPi", SN_NOWARN)
set_name(0x80094C78, "RestoreDemoKeys__FPi", SN_NOWARN)
set_name(0x80094D08, "get_action_str__Fii", SN_NOWARN)
set_name(0x80094D80, "get_key_pad__Fi", SN_NOWARN)
set_name(0x80094DB8, "checkvalid__Fv", SN_NOWARN)
set_name(0x80094E1C, "RemoveCtrlScreen__Fv", SN_NOWARN)
set_name(0x80094E84, "Init_ctrl_pos__Fv", SN_NOWARN)
set_name(0x80094F3C, "remove_padval__Fi", SN_NOWARN)
set_name(0x80094F7C, "remove_comboval__Fi", SN_NOWARN)
set_name(0x80094FBC, "set_buttons__Fii", SN_NOWARN)
set_name(0x80095110, "restore_controller_settings__Fv", SN_NOWARN)
set_name(0x80095158, "only_one_button__Fi", SN_NOWARN)
set_name(0x80095184, "main_ctrl_setup__Fv", SN_NOWARN)
set_name(0x80095634, "PrintCtrlString__FiiUcic", SN_NOWARN)
set_name(0x80095B30, "DrawCtrlSetup__Fv", SN_NOWARN)
set_name(0x80095FEC, "_GLOBAL__D_ctrlflag", SN_NOWARN)
set_name(0x80096014, "_GLOBAL__I_ctrlflag", SN_NOWARN)
set_name(0x8009603C, "GetTick__C4CPad", SN_NOWARN)
set_name(0x80096064, "GetDown__C4CPad_addr_80096064", SN_NOWARN)
set_name(0x8009608C, "GetUp__C4CPad", SN_NOWARN)
set_name(0x800960B4, "SetPadTickMask__4CPadUs", SN_NOWARN)
set_name(0x800960BC, "SetPadTick__4CPadUs", SN_NOWARN)
set_name(0x800960C4, "SetRGB__6DialogUcUcUc_addr_800960C4", SN_NOWARN)
set_name(0x800960E4, "SetBorder__6Dialogi_addr_800960E4", SN_NOWARN)
set_name(0x800960EC, "SetOTpos__6Dialogi", SN_NOWARN)
set_name(0x800960F8, "___6Dialog_addr_800960F8", SN_NOWARN)
set_name(0x80096120, "__6Dialog_addr_80096120", SN_NOWARN)
set_name(0x8009617C, "switchnight__FP4TASK", SN_NOWARN)
set_name(0x800961C8, "city_lights__FP4TASK", SN_NOWARN)
set_name(0x8009631C, "color_cycle__FP4TASK", SN_NOWARN)
set_name(0x80096460, "ReInitDFL__Fv", SN_NOWARN)
set_name(0x80096498, "DrawFlameLogo__Fv", SN_NOWARN)
set_name(0x80096858, "TitleScreen__FP7CScreen", SN_NOWARN)
set_name(0x800968AC, "TryCreaturePrint__Fiiiiiii", SN_NOWARN)
set_name(0x80096B10, "TryWater__FiiP8POLY_GT4i", SN_NOWARN)
set_name(0x80096CC4, "nightgfx__FibiP8POLY_GT4i", SN_NOWARN)
set_name(0x80096D50, "PRIM_GetCopy__FP8POLY_FT4_addr_80096D50", SN_NOWARN)
set_name(0x80096D8C, "PRIM_CopyPrim__FP8POLY_FT4T0_addr_80096D8C", SN_NOWARN)
set_name(0x80096DB4, "PRIM_GetPrim__FPP8POLY_FT4_addr_80096DB4", SN_NOWARN)
set_name(0x80096E30, "GetNumOfActions__7TextDati_addr_80096E30", SN_NOWARN)
set_name(0x80096E54, "GetCreature__7TextDati_addr_80096E54", SN_NOWARN)
set_name(0x80096ECC, "GetNumOfCreatures__7TextDat_addr_80096ECC", SN_NOWARN)
set_name(0x80096EE0, "DaveLDummyPoll__Fv", SN_NOWARN)
set_name(0x80096EE8, "DaveL__Fv", SN_NOWARN)
set_name(0x80096F10, "DoReflection__FP8POLY_FT4iii", SN_NOWARN)
set_name(0x800971FC, "mteleportfx__Fv", SN_NOWARN)
set_name(0x800974FC, "invistimer__Fv", SN_NOWARN)
set_name(0x800975D4, "setUVparams__FP8POLY_FT4P9FRAME_HDR", SN_NOWARN)
set_name(0x80097664, "drawparticle__Fiiiiii", SN_NOWARN)
set_name(0x8009785C, "drawpolyF4__Fiiiiii", SN_NOWARN)
set_name(0x80097990, "drawpolyG4__Fiiiiiiii", SN_NOWARN)
set_name(0x80097B60, "particlejump__Fv", SN_NOWARN)
set_name(0x80097D10, "particleglow__Fv", SN_NOWARN)
set_name(0x80097E04, "doparticlejump__Fv", SN_NOWARN)
set_name(0x80097E44, "StartPartJump__Fiiiiii", SN_NOWARN)
set_name(0x80097FAC, "doparticlechain__Fiiiiiiiiiiii", SN_NOWARN)
set_name(0x800983B4, "ParticleBlob__FP13MissileStructiiii", SN_NOWARN)
set_name(0x8009844C, "ParticleMissile__FP13MissileStructiiii", SN_NOWARN)
set_name(0x8009850C, "Teleportfx__Fiiiiiiii", SN_NOWARN)
set_name(0x8009880C, "ResurrectFX__Fiiii", SN_NOWARN)
set_name(0x80098A34, "ParticleExp__FP13MissileStructiiii", SN_NOWARN)
set_name(0x80098AD0, "GetPlrPos__11SPELLFX_DATP12PlayerStruct", SN_NOWARN)
set_name(0x80098BF4, "healFX__Fv", SN_NOWARN)
set_name(0x80098D30, "HealStart__Fi", SN_NOWARN)
set_name(0x80098D64, "HealotherStart__Fi", SN_NOWARN)
set_name(0x80098D9C, "TeleStart__Fi", SN_NOWARN)
set_name(0x80098DF8, "PhaseStart__Fi", SN_NOWARN)
set_name(0x80098E2C, "PhaseEnd__Fi", SN_NOWARN)
set_name(0x80098E58, "ApocInit__11SPELLFX_DATP12PlayerStruct", SN_NOWARN)
set_name(0x80099034, "ApocaStart__Fi", SN_NOWARN)
set_name(0x8009908C, "DaveLTask__FP4TASK", SN_NOWARN)
set_name(0x80099128, "PRIM_GetPrim__FPP7POLY_G4", SN_NOWARN)
set_name(0x800991A4, "PRIM_GetPrim__FPP7POLY_F4", SN_NOWARN)
set_name(0x80099220, "PRIM_GetPrim__FPP8POLY_FT4_addr_80099220", SN_NOWARN)
set_name(0x8009929C, "GetPlayer__7CPlayeri", SN_NOWARN)
set_name(0x800992EC, "GetLastOtPos__C7CPlayer", SN_NOWARN)
set_name(0x800992F8, "GetFr__7TextDati_addr_800992F8", SN_NOWARN)
set_name(0x80099314, "DrawArrow__Fii", SN_NOWARN)
set_name(0x80099518, "show_spell_dir__Fi", SN_NOWARN)
set_name(0x8009984C, "release_spell__Fi", SN_NOWARN)
set_name(0x800998C0, "select_belt_item__Fi", SN_NOWARN)
set_name(0x800998C8, "any_belt_items__Fv", SN_NOWARN)
set_name(0x80099930, "get_last_inv__Fv", SN_NOWARN)
set_name(0x80099A60, "get_next_inv__Fv", SN_NOWARN)
set_name(0x80099B98, "pad_func_up__Fi", SN_NOWARN)
set_name(0x80099BC4, "pad_func_down__Fi", SN_NOWARN)
set_name(0x80099BF0, "pad_func_left__Fi", SN_NOWARN)
set_name(0x80099BF8, "pad_func_right__Fi", SN_NOWARN)
set_name(0x80099C00, "pad_func_select__Fi", SN_NOWARN)
set_name(0x80099CBC, "pad_func_Attack__Fi", SN_NOWARN)
set_name(0x8009A27C, "pad_func_Action__Fi", SN_NOWARN)
set_name(0x8009A5C8, "InitTargetCursor__Fi", SN_NOWARN)
set_name(0x8009A6D0, "RemoveTargetCursor__Fi", SN_NOWARN)
set_name(0x8009A760, "pad_func_Cast_Spell__Fi", SN_NOWARN)
set_name(0x8009AB60, "pad_func_Use_Item__Fi", SN_NOWARN)
set_name(0x8009AC20, "pad_func_Chr__Fi", SN_NOWARN)
set_name(0x8009AD28, "pad_func_Inv__Fi", SN_NOWARN)
set_name(0x8009AE20, "pad_func_SplBook__Fi", SN_NOWARN)
set_name(0x8009AF0C, "pad_func_QLog__Fi", SN_NOWARN)
set_name(0x8009AF90, "pad_func_SpellBook__Fi", SN_NOWARN)
set_name(0x8009B028, "pad_func_AutoMap__Fi", SN_NOWARN)
set_name(0x8009B0E4, "pad_func_Quick_Spell__Fi", SN_NOWARN)
set_name(0x8009B160, "check_inv__FiPci", SN_NOWARN)
set_name(0x8009B328, "pad_func_Quick_Use_Health__Fi", SN_NOWARN)
set_name(0x8009B350, "pad_func_Quick_Use_Mana__Fi", SN_NOWARN)
set_name(0x8009B378, "get_max_find_size__FPici", SN_NOWARN)
set_name(0x8009B4B8, "sort_gold__Fi", SN_NOWARN)
set_name(0x8009B5BC, "DrawObjSelector__Fi", SN_NOWARN)
set_name(0x8009BE50, "DrawObjTask__FP4TASK", SN_NOWARN)
set_name(0x8009BF2C, "add_area_find_object__Fciii", SN_NOWARN)
set_name(0x8009C038, "CheckRangeObject__Fiici", SN_NOWARN)
set_name(0x8009C3F8, "CheckArea__FiiicUci", SN_NOWARN)
set_name(0x8009C76C, "PlacePlayer__FiiiUc", SN_NOWARN)
set_name(0x8009C990, "_GLOBAL__D_gplayer", SN_NOWARN)
set_name(0x8009C9B8, "_GLOBAL__I_gplayer", SN_NOWARN)
set_name(0x8009C9E0, "SetRGB__6DialogUcUcUc_addr_8009C9E0", SN_NOWARN)
set_name(0x8009CA00, "SetBack__6Dialogi_addr_8009CA00", SN_NOWARN)
set_name(0x8009CA08, "SetBorder__6Dialogi_addr_8009CA08", SN_NOWARN)
set_name(0x8009CA10, "___6Dialog_addr_8009CA10", SN_NOWARN)
set_name(0x8009CA38, "__6Dialog_addr_8009CA38", SN_NOWARN)
set_name(0x8009CA94, "GetTick__C4CPad_addr_8009CA94", SN_NOWARN)
set_name(0x8009CABC, "GetDown__C4CPad_addr_8009CABC", SN_NOWARN)
set_name(0x8009CAE4, "GetCur__C4CPad_addr_8009CAE4", SN_NOWARN)
set_name(0x8009CB0C, "SetPadTickMask__4CPadUs_addr_8009CB0C", SN_NOWARN)
set_name(0x8009CB14, "SetPadTick__4CPadUs_addr_8009CB14", SN_NOWARN)
set_name(0x8009CB1C, "DEC_AddAsDecRequestor__FP7TextDat", SN_NOWARN)
set_name(0x8009CB98, "DEC_RemoveAsDecRequestor__FP7TextDat", SN_NOWARN)
set_name(0x8009CBF0, "DEC_DoDecompRequests__Fv", SN_NOWARN)
set_name(0x8009CC4C, "FindThisTd__FP7TextDat", SN_NOWARN)
set_name(0x8009CC84, "FindEmptyIndex__Fv", SN_NOWARN)
set_name(0x8009CCBC, "UPDATEPROGRESS__Fi", SN_NOWARN)
set_name(0x8009CD1C, "IsGameLoading__Fv", SN_NOWARN)
set_name(0x8009CD28, "PutUpCutScreenTSK__FP4TASK", SN_NOWARN)
set_name(0x8009D19C, "PutUpCutScreen__Fi", SN_NOWARN)
set_name(0x8009D25C, "TakeDownCutScreen__Fv", SN_NOWARN)
set_name(0x8009D2E8, "FinishProgress__Fv", SN_NOWARN)
set_name(0x8009D34C, "PRIM_GetPrim__FPP7POLY_G4_addr_8009D34C", SN_NOWARN)
set_name(0x8009D3C8, "_GLOBAL__D_UPDATEPROGRESS__Fi", SN_NOWARN)
set_name(0x8009D400, "_GLOBAL__I_UPDATEPROGRESS__Fi", SN_NOWARN)
set_name(0x8009D438, "SetRGB__6DialogUcUcUc_addr_8009D438", SN_NOWARN)
set_name(0x8009D458, "SetBack__6Dialogi_addr_8009D458", SN_NOWARN)
set_name(0x8009D460, "SetBorder__6Dialogi_addr_8009D460", SN_NOWARN)
set_name(0x8009D468, "___6Dialog_addr_8009D468", SN_NOWARN)
set_name(0x8009D490, "__6Dialog_addr_8009D490", SN_NOWARN)
set_name(0x8009D4EC, "___7CScreen", SN_NOWARN)
set_name(0x8009D50C, "init_mem_card__FPFii_vUc", SN_NOWARN)
set_name(0x8009D744, "memcard_event__Fii", SN_NOWARN)
set_name(0x8009D74C, "init_card__Fib", SN_NOWARN)
set_name(0x8009D80C, "ping_card__Fi", SN_NOWARN)
set_name(0x8009D8A0, "CardUpdateTask__FP4TASK", SN_NOWARN)
set_name(0x8009D8D8, "MemcardON__Fv", SN_NOWARN)
set_name(0x8009D948, "MemcardOFF__Fv", SN_NOWARN)
set_name(0x8009D998, "CheckSavedOptions__Fv", SN_NOWARN)
set_name(0x8009DA20, "card_removed__Fi", SN_NOWARN)
set_name(0x8009DA48, "read_card_block__Fii", SN_NOWARN)
set_name(0x8009DA90, "test_hw_event__Fv", SN_NOWARN)
set_name(0x8009DB10, "PrintSelectBack__FbT0", SN_NOWARN)
set_name(0x8009DC90, "DrawDialogBox__FiiP4RECTiiii", SN_NOWARN)
set_name(0x8009DD74, "DrawSpinner__FiiUcUcUciiibiT8", SN_NOWARN)
set_name(0x8009E268, "DrawMenu__Fi", SN_NOWARN)
set_name(0x8009EEF8, "who_pressed__Fi", SN_NOWARN)
set_name(0x8009EF80, "ShowCharacterFiles__Fv", SN_NOWARN)
set_name(0x8009F588, "MemcardPad__Fv", SN_NOWARN)
set_name(0x8009FBF0, "SoundPad__Fv", SN_NOWARN)
set_name(0x800A0428, "CentrePad__Fv", SN_NOWARN)
set_name(0x800A08A4, "CalcVolumes__Fv", SN_NOWARN)
set_name(0x800A09DC, "SetLoadedVolumes__Fv", SN_NOWARN)
set_name(0x800A0A64, "GetVolumes__Fv", SN_NOWARN)
set_name(0x800A0B00, "PrintInfoMenu__Fv", SN_NOWARN)
set_name(0x800A0CA8, "SeedPad__Fv", SN_NOWARN)
set_name(0x800A0F2C, "DrawOptions__FP4TASK", SN_NOWARN)
set_name(0x800A17EC, "ToggleOptions__Fv", SN_NOWARN)
set_name(0x800A18A4, "FormatPad__Fv", SN_NOWARN)
set_name(0x800A1BC4, "ActivateMemcard__Fv", SN_NOWARN)
set_name(0x800A1C48, "PRIM_GetPrim__FPP7POLY_G4_addr_800A1C48", SN_NOWARN)
set_name(0x800A1CC4, "GetTick__C4CPad_addr_800A1CC4", SN_NOWARN)
set_name(0x800A1CEC, "GetDown__C4CPad_addr_800A1CEC", SN_NOWARN)
set_name(0x800A1D14, "GetUp__C4CPad_addr_800A1D14", SN_NOWARN)
set_name(0x800A1D3C, "SetPadTickMask__4CPadUs_addr_800A1D3C", SN_NOWARN)
set_name(0x800A1D44, "SetPadTick__4CPadUs_addr_800A1D44", SN_NOWARN)
set_name(0x800A1D4C, "Flush__4CPad_addr_800A1D4C", SN_NOWARN)
set_name(0x800A1D70, "SetRGB__6DialogUcUcUc_addr_800A1D70", SN_NOWARN)
set_name(0x800A1D90, "SetBack__6Dialogi_addr_800A1D90", SN_NOWARN)
set_name(0x800A1D98, "SetBorder__6Dialogi_addr_800A1D98", SN_NOWARN)
set_name(0x800A1DA0, "___6Dialog_addr_800A1DA0", SN_NOWARN)
set_name(0x800A1DC8, "__6Dialog_addr_800A1DC8", SN_NOWARN)
set_name(0x800A1E24, "GetFr__7TextDati_addr_800A1E24", SN_NOWARN)
set_name(0x800A1E40, "BirdDistanceOK__Fiiii", SN_NOWARN)
set_name(0x800A1E98, "AlterBirdPos__FP10BIRDSTRUCTUc", SN_NOWARN)
set_name(0x800A1FDC, "BirdWorld__FP10BIRDSTRUCTii", SN_NOWARN)
set_name(0x800A2058, "BirdScared__FP10BIRDSTRUCT", SN_NOWARN)
set_name(0x800A21E4, "GetPerch__FP10BIRDSTRUCT", SN_NOWARN)
set_name(0x800A2238, "BIRD_StartHop__FP10BIRDSTRUCT", SN_NOWARN)
set_name(0x800A23A0, "BIRD_DoHop__FP10BIRDSTRUCT", SN_NOWARN)
set_name(0x800A24A4, "BIRD_StartPerch__FP10BIRDSTRUCT", SN_NOWARN)
set_name(0x800A2510, "BIRD_DoPerch__FP10BIRDSTRUCT", SN_NOWARN)
set_name(0x800A2594, "BIRD_DoScatter__FP10BIRDSTRUCT", SN_NOWARN)
set_name(0x800A2640, "CheckDirOk__FP10BIRDSTRUCT", SN_NOWARN)
set_name(0x800A2750, "BIRD_StartScatter__FP10BIRDSTRUCT", SN_NOWARN)
set_name(0x800A27FC, "BIRD_StartFly__FP10BIRDSTRUCT", SN_NOWARN)
set_name(0x800A28A0, "BIRD_DoFly__FP10BIRDSTRUCT", SN_NOWARN)
set_name(0x800A2B4C, "BIRD_StartLanding__FP10BIRDSTRUCT", SN_NOWARN)
set_name(0x800A2BA4, "BIRD_DoLanding__FP10BIRDSTRUCT", SN_NOWARN)
set_name(0x800A2C10, "PlaceFlock__FP10BIRDSTRUCT", SN_NOWARN)
set_name(0x800A2CFC, "ProcessFlock__FP10BIRDSTRUCT", SN_NOWARN)
set_name(0x800A2E2C, "InitBird__Fv", SN_NOWARN)
set_name(0x800A2F04, "ProcessBird__Fv", SN_NOWARN)
set_name(0x800A305C, "GetBirdFrame__FP10BIRDSTRUCT", SN_NOWARN)
set_name(0x800A30F4, "bscale__FP8POLY_FT4i", SN_NOWARN)
set_name(0x800A3224, "doshadow__FP10BIRDSTRUCTii", SN_NOWARN)
set_name(0x800A3330, "DrawLBird__Fv", SN_NOWARN)
set_name(0x800A353C, "PRIM_GetPrim__FPP8POLY_FT4_addr_800A353C", SN_NOWARN)
set_name(0x800A35B8, "PlayFMV__FPcii", SN_NOWARN)
set_name(0x800A368C, "play_movie", SN_NOWARN)
set_name(0x800A3748, "DisplayMonsterTypes__Fv", SN_NOWARN)
set_name(0x800A3750, "LoadKanjiFont__FPc", SN_NOWARN)
set_name(0x800A3840, "LoadKanjiIndex__FPc", SN_NOWARN)
set_name(0x800A3950, "FreeKanji__Fv", SN_NOWARN)
set_name(0x800A39D8, "LoadKanji__F10LANG_DB_NO", SN_NOWARN)
set_name(0x800A3AAC, "getb__FUs", SN_NOWARN)
set_name(0x800A3B1C, "_get_font__FPUsUsUs", SN_NOWARN)
set_name(0x800A3BEC, "KPrintChar__FUsUsUcUcUs", SN_NOWARN)
set_name(0x800A3D18, "writeblock__FP5block", SN_NOWARN)
set_name(0x800A3E00, "PAK_DoPak__FPUcPCUci", SN_NOWARN)
set_name(0x800A4040, "PAK_DoUnpak__FPUcPCUc", SN_NOWARN)
set_name(0x800A40E0, "fputc__5blockUc", SN_NOWARN)
set_name(0x800A4108, "HelpPad__Fv", SN_NOWARN)
set_name(0x800A4228, "InitHelp__Fv", SN_NOWARN)
set_name(0x800A426C, "GetControlKey__FiPb", SN_NOWARN)
set_name(0x800A4314, "CheckStr__FPcT0i", SN_NOWARN)
set_name(0x800A4470, "DisplayHelp__Fv", SN_NOWARN)
set_name(0x800A484C, "DrawHelp__Fv", SN_NOWARN)
set_name(0x800A48E8, "_GLOBAL__D_DrawHelp__Fv", SN_NOWARN)
set_name(0x800A4910, "_GLOBAL__I_DrawHelp__Fv", SN_NOWARN)
set_name(0x800A4938, "SetRGB__6DialogUcUcUc_addr_800A4938", SN_NOWARN)
set_name(0x800A4958, "SetBorder__6Dialogi_addr_800A4958", SN_NOWARN)
set_name(0x800A4960, "___6Dialog_addr_800A4960", SN_NOWARN)
set_name(0x800A4988, "__6Dialog_addr_800A4988", SN_NOWARN)
set_name(0x800A49E4, "GetTick__C4CPad_addr_800A49E4", SN_NOWARN)
set_name(0x800A4A0C, "GetDown__C4CPad_addr_800A4A0C", SN_NOWARN)
set_name(0x800A4A34, "SetPadTickMask__4CPadUs_addr_800A4A34", SN_NOWARN)
set_name(0x800A4A3C, "SetPadTick__4CPadUs_addr_800A4A3C", SN_NOWARN)
set_name(0x8002FEEC, "TrimCol__Fs_addr_8002FEEC", SN_NOWARN)
set_name(0x8002FF24, "DrawSpellCel__FllUclUc", SN_NOWARN)
set_name(0x80030A44, "SetSpellTrans__Fc", SN_NOWARN)
set_name(0x80030A50, "DrawSpellBookTSK__FP4TASK", SN_NOWARN)
set_name(0x80030AF8, "DrawSpeedSpellTSK__FP4TASK", SN_NOWARN)
set_name(0x80030B9C, "ToggleSpell__Fi", SN_NOWARN)
set_name(0x80030C50, "DrawSpellList__Fv", SN_NOWARN)
set_name(0x80031814, "SetSpell__Fi", SN_NOWARN)
set_name(0x800318E8, "AddPanelString__FPCci", SN_NOWARN)
set_name(0x80031998, "ClearPanel__Fv", SN_NOWARN)
set_name(0x800319C8, "InitPanelStr__Fv", SN_NOWARN)
set_name(0x800319E8, "InitControlPan__Fv", SN_NOWARN)
set_name(0x80031C08, "DrawCtrlPan__Fv", SN_NOWARN)
set_name(0x80031C34, "DoAutoMap__Fv", SN_NOWARN)
set_name(0x80031CA8, "CheckPanelInfo__Fv", SN_NOWARN)
set_name(0x800323C8, "FreeControlPan__Fv", SN_NOWARN)
set_name(0x800324D8, "CPrintString__FiPci", SN_NOWARN)
set_name(0x800325F4, "PrintInfo__Fv", SN_NOWARN)
set_name(0x800328B0, "DrawInfoBox__FP4RECT", SN_NOWARN)
set_name(0x80032F6C, "MY_PlrStringXY__Fv", SN_NOWARN)
set_name(0x800334BC, "ADD_PlrStringXY__FPCcc", SN_NOWARN)
set_name(0x80033564, "DrawPlus__Fii", SN_NOWARN)
set_name(0x800336CC, "ChrCheckValidButton__Fi", SN_NOWARN)
set_name(0x800337A8, "DrawArrows__Fv", SN_NOWARN)
set_name(0x800338A0, "BuildChr__Fv", SN_NOWARN)
set_name(0x80034B78, "DrawChr__Fv", SN_NOWARN)
set_name(0x80035018, "DrawChrTSK__FP4TASK", SN_NOWARN)
set_name(0x800350FC, "DrawLevelUpIcon__Fi", SN_NOWARN)
set_name(0x80035190, "CheckChrBtns__Fv", SN_NOWARN)
set_name(0x800354FC, "DrawDurIcon4Item__FPC10ItemStructii", SN_NOWARN)
set_name(0x80035580, "RedBack__Fv", SN_NOWARN)
set_name(0x80035668, "PrintSBookStr__FiiUcPCcUc", SN_NOWARN)
set_name(0x80035768, "GetSBookTrans__FiUc", SN_NOWARN)
set_name(0x80035980, "DrawSpellBook__Fv", SN_NOWARN)
set_name(0x80036364, "CheckSBook__Fv", SN_NOWARN)
set_name(0x80036598, "get_pieces_str__Fi", SN_NOWARN)
set_name(0x800365CC, "_GLOBAL__D_DrawLevelUpFlag", SN_NOWARN)
set_name(0x800365F4, "_GLOBAL__I_DrawLevelUpFlag", SN_NOWARN)
set_name(0x80036630, "GetTick__C4CPad_addr_80036630", SN_NOWARN)
set_name(0x80036658, "GetDown__C4CPad_addr_80036658", SN_NOWARN)
set_name(0x80036680, "SetPadTickMask__4CPadUs_addr_80036680", SN_NOWARN)
set_name(0x80036688, "SetPadTick__4CPadUs_addr_80036688", SN_NOWARN)
set_name(0x80036690, "SetRGB__6DialogUcUcUc_addr_80036690", SN_NOWARN)
set_name(0x800366B0, "SetBack__6Dialogi_addr_800366B0", SN_NOWARN)
set_name(0x800366B8, "SetBorder__6Dialogi_addr_800366B8", SN_NOWARN)
set_name(0x800366C0, "___6Dialog_addr_800366C0", SN_NOWARN)
set_name(0x800366E8, "__6Dialog_addr_800366E8", SN_NOWARN)
set_name(0x80036744, "GetPal__7TextDati_addr_80036744", SN_NOWARN)
set_name(0x80036760, "GetFr__7TextDati_addr_80036760", SN_NOWARN)
set_name(0x8003677C, "InitCursor__Fv", SN_NOWARN)
set_name(0x80036784, "FreeCursor__Fv", SN_NOWARN)
set_name(0x8003678C, "SetICursor__Fi", SN_NOWARN)
set_name(0x800367E8, "SetCursor__Fi", SN_NOWARN)
set_name(0x8003684C, "NewCursor__Fi", SN_NOWARN)
set_name(0x8003686C, "InitLevelCursor__Fv", SN_NOWARN)
set_name(0x800368CC, "CheckTown__Fv", SN_NOWARN)
set_name(0x80036B58, "CheckRportal__Fv", SN_NOWARN)
set_name(0x80036DB8, "CheckCursMove__Fv", SN_NOWARN)
set_name(0x80036DC0, "InitDead__Fv", SN_NOWARN)
set_name(0x80036FBC, "AddDead__Fiici", SN_NOWARN)
set_name(0x80037004, "FreeGameMem__Fv", SN_NOWARN)
set_name(0x8003703C, "start_game__FUi", SN_NOWARN)
set_name(0x80037098, "free_game__Fv", SN_NOWARN)
set_name(0x8003710C, "LittleStart__FUcUc", SN_NOWARN)
set_name(0x800371D0, "StartGame__FUcUc", SN_NOWARN)
set_name(0x800373B8, "run_game_loop__FUi", SN_NOWARN)
set_name(0x80037528, "TryIconCurs__Fv", SN_NOWARN)
set_name(0x80037904, "DisableInputWndProc__FUlUilUl", SN_NOWARN)
set_name(0x8003790C, "GM_Game__FUlUilUl", SN_NOWARN)
set_name(0x800379BC, "LoadLvlGFX__Fv", SN_NOWARN)
set_name(0x80037A74, "LoadMegaTiles__FPCc", SN_NOWARN)
set_name(0x80037B04, "LoadAllGFX__Fv", SN_NOWARN)
set_name(0x80037B24, "CreateLevel__Fi", SN_NOWARN)
set_name(0x80037C1C, "LoCreateLevel__FPv", SN_NOWARN)
set_name(0x80037DA4, "ClearOutDungeonMap__Fv", SN_NOWARN)
set_name(0x80037E80, "LoadGameLevel__FUci", SN_NOWARN)
set_name(0x800387DC, "game_logic__Fv", SN_NOWARN)
set_name(0x800388E8, "timeout_cursor__FUc", SN_NOWARN)
set_name(0x80038990, "game_loop__FUc", SN_NOWARN)
set_name(0x800389C8, "alloc_plr__Fv", SN_NOWARN)
set_name(0x800389D0, "plr_encrypt__FUc", SN_NOWARN)
set_name(0x800389D8, "assert_fail__FiPCcT1", SN_NOWARN)
set_name(0x800389F8, "assert_fail__FiPCc", SN_NOWARN)
set_name(0x80038A18, "app_fatal", SN_NOWARN)
set_name(0x80038A48, "DoMemCardFromFrontEnd__Fv", SN_NOWARN)
set_name(0x80038A70, "DoMemCardFromInGame__Fv", SN_NOWARN)
set_name(0x80038A98, "GetActiveTowner__Fi", SN_NOWARN)
set_name(0x80038AEC, "SetTownerGPtrs__FPUcPPUc", SN_NOWARN)
set_name(0x80038B0C, "NewTownerAnim__FiPUcii", SN_NOWARN)
set_name(0x80038B54, "InitTownerInfo__FilUciiici", SN_NOWARN)
set_name(0x80038CB4, "InitQstSnds__Fi", SN_NOWARN)
set_name(0x80038D6C, "InitSmith__Fv", SN_NOWARN)
set_name(0x80038E98, "InitBarOwner__Fv", SN_NOWARN)
set_name(0x80038FCC, "InitTownDead__Fv", SN_NOWARN)
set_name(0x800390FC, "InitWitch__Fv", SN_NOWARN)
set_name(0x8003922C, "InitBarmaid__Fv", SN_NOWARN)
set_name(0x8003935C, "InitBoy__Fv", SN_NOWARN)
set_name(0x80039494, "InitHealer__Fv", SN_NOWARN)
set_name(0x800395C4, "InitTeller__Fv", SN_NOWARN)
set_name(0x800396F4, "InitDrunk__Fv", SN_NOWARN)
set_name(0x80039824, "InitCows__Fv", SN_NOWARN)
set_name(0x80039AE8, "InitTowners__Fv", SN_NOWARN)
set_name(0x80039B74, "FreeTownerGFX__Fv", SN_NOWARN)
set_name(0x80039C18, "TownCtrlMsg__Fi", SN_NOWARN)
set_name(0x80039D48, "TownBlackSmith__Fv", SN_NOWARN)
set_name(0x80039D7C, "TownBarOwner__Fv", SN_NOWARN)
set_name(0x80039DB0, "TownDead__Fv", SN_NOWARN)
set_name(0x80039E98, "TownHealer__Fv", SN_NOWARN)
set_name(0x80039EC0, "TownStory__Fv", SN_NOWARN)
set_name(0x80039EE8, "TownDrunk__Fv", SN_NOWARN)
set_name(0x80039F10, "TownBoy__Fv", SN_NOWARN)
set_name(0x80039F38, "TownWitch__Fv", SN_NOWARN)
set_name(0x80039F60, "TownBarMaid__Fv", SN_NOWARN)
set_name(0x80039F88, "TownCow__Fv", SN_NOWARN)
set_name(0x80039FB0, "ProcessTowners__Fv", SN_NOWARN)
set_name(0x8003A200, "PlrHasItem__FiiRi", SN_NOWARN)
set_name(0x8003A2D4, "CowSFX__Fi", SN_NOWARN)
set_name(0x8003A3F0, "TownerTalk__Fii", SN_NOWARN)
set_name(0x8003A430, "TalkToTowner__Fii", SN_NOWARN)
set_name(0x8003B904, "effect_is_playing__Fi", SN_NOWARN)
set_name(0x8003B90C, "stream_stop__Fv", SN_NOWARN)
set_name(0x8003B960, "stream_play__FP4TSFXll", SN_NOWARN)
set_name(0x8003BA50, "stream_update__Fv", SN_NOWARN)
set_name(0x8003BA58, "sfx_stop__Fv", SN_NOWARN)
set_name(0x8003BA74, "InitMonsterSND__Fi", SN_NOWARN)
set_name(0x8003BACC, "FreeMonsterSnd__Fv", SN_NOWARN)
set_name(0x8003BAD4, "calc_snd_position__FiiPlT2", SN_NOWARN)
set_name(0x8003BBD8, "PlaySFX_priv__FP4TSFXUcii", SN_NOWARN)
set_name(0x8003BCD4, "PlayEffect__Fii", SN_NOWARN)
set_name(0x8003BE18, "RndSFX__Fi", SN_NOWARN)
set_name(0x8003BEB8, "PlaySFX__Fi", SN_NOWARN)
set_name(0x8003BEF8, "PlaySfxLoc__Fiii", SN_NOWARN)
set_name(0x8003BF4C, "sound_stop__Fv", SN_NOWARN)
set_name(0x8003BFE4, "sound_update__Fv", SN_NOWARN)
set_name(0x8003C018, "priv_sound_init__FUc", SN_NOWARN)
set_name(0x8003C05C, "sound_init__Fv", SN_NOWARN)
set_name(0x8003C104, "GetDirection__Fiiii", SN_NOWARN)
set_name(0x8003C1A8, "SetRndSeed__Fl", SN_NOWARN)
set_name(0x8003C1B8, "GetRndSeed__Fv", SN_NOWARN)
set_name(0x8003C200, "random__Fil", SN_NOWARN)
set_name(0x8003C26C, "DiabloAllocPtr__FUl", SN_NOWARN)
set_name(0x8003C2B8, "mem_free_dbg__FPv", SN_NOWARN)
set_name(0x8003C308, "LoadFileInMem__FPCcPUl", SN_NOWARN)
set_name(0x8003C310, "PlayInGameMovie__FPCc", SN_NOWARN)
set_name(0x8003C318, "Enter__9CCritSect", SN_NOWARN)
set_name(0x8003C320, "InitDiabloMsg__Fc", SN_NOWARN)
set_name(0x8003C3B4, "ClrDiabloMsg__Fv", SN_NOWARN)
set_name(0x8003C3E0, "DrawDiabloMsg__Fv", SN_NOWARN)
set_name(0x8003C4EC, "interface_msg_pump__Fv", SN_NOWARN)
set_name(0x8003C4F4, "ShowProgress__FUi", SN_NOWARN)
set_name(0x8003CA2C, "InitAllItemsUseable__Fv", SN_NOWARN)
set_name(0x8003CA64, "InitItemGFX__Fv", SN_NOWARN)
set_name(0x8003CA90, "ItemPlace__Fii", SN_NOWARN)
set_name(0x8003CB58, "AddInitItems__Fv", SN_NOWARN)
set_name(0x8003CD70, "InitItems__Fv", SN_NOWARN)
set_name(0x8003CF48, "CalcPlrItemVals__FiUc", SN_NOWARN)
set_name(0x8003DA1C, "CalcPlrScrolls__Fi", SN_NOWARN)
set_name(0x8003DD9C, "CalcPlrStaff__FP12PlayerStruct", SN_NOWARN)
set_name(0x8003DE38, "CalcSelfItems__Fi", SN_NOWARN)
set_name(0x8003DF98, "ItemMinStats__FPC12PlayerStructPC10ItemStruct", SN_NOWARN)
set_name(0x8003DFE4, "CalcPlrItemMin__Fi", SN_NOWARN)
set_name(0x8003E0C4, "CalcPlrBookVals__Fi", SN_NOWARN)
set_name(0x8003E358, "CalcPlrInv__FiUc", SN_NOWARN)
set_name(0x8003E41C, "SetPlrHandItem__FP10ItemStructi", SN_NOWARN)
set_name(0x8003E534, "GetPlrHandSeed__FP10ItemStruct", SN_NOWARN)
set_name(0x8003E560, "GetGoldSeed__FiP10ItemStruct", SN_NOWARN)
set_name(0x8003E6DC, "SetPlrHandSeed__FP10ItemStructi", SN_NOWARN)
set_name(0x8003E6E4, "SetPlrHandGoldCurs__FP10ItemStruct", SN_NOWARN)
set_name(0x8003E714, "CreatePlrItems__Fi", SN_NOWARN)
set_name(0x8003EB50, "ItemSpaceOk__Fii", SN_NOWARN)
set_name(0x8003EE28, "GetItemSpace__Fiic", SN_NOWARN)
set_name(0x8003F054, "GetSuperItemSpace__Fiic", SN_NOWARN)
set_name(0x8003F1BC, "GetSuperItemLoc__FiiRiT2", SN_NOWARN)
set_name(0x8003F284, "CalcItemValue__Fi", SN_NOWARN)
set_name(0x8003F33C, "GetBookSpell__Fii", SN_NOWARN)
set_name(0x8003F5A4, "GetStaffPower__FiiiUc", SN_NOWARN)
set_name(0x8003F794, "GetStaffSpell__FiiUc", SN_NOWARN)
set_name(0x8003FA48, "GetItemAttrs__Fiii", SN_NOWARN)
set_name(0x8003FF94, "RndPL__Fii", SN_NOWARN)
set_name(0x8003FFCC, "PLVal__Fiiiii", SN_NOWARN)
set_name(0x80040040, "SaveItemPower__Fiiiiiii", SN_NOWARN)
set_name(0x8004176C, "GetItemPower__FiiilUc", SN_NOWARN)
set_name(0x80041BD4, "GetItemBonus__FiiiiUc", SN_NOWARN)
set_name(0x80041CD0, "SetupItem__Fi", SN_NOWARN)
set_name(0x80041DD8, "RndItem__Fi", SN_NOWARN)
set_name(0x8004201C, "RndUItem__Fi", SN_NOWARN)
set_name(0x8004225C, "RndAllItems__Fv", SN_NOWARN)
set_name(0x800423D0, "RndTypeItems__Fii", SN_NOWARN)
set_name(0x800424D0, "CheckUnique__FiiiUc", SN_NOWARN)
set_name(0x80042680, "GetUniqueItem__Fii", SN_NOWARN)
set_name(0x80042928, "SpawnUnique__Fiii", SN_NOWARN)
set_name(0x80042A60, "ItemRndDur__Fi", SN_NOWARN)
set_name(0x80042AF0, "SetupAllItems__FiiiiiUcUcUc", SN_NOWARN)
set_name(0x80042DFC, "SpawnItem__FiiiUc", SN_NOWARN)
set_name(0x80043044, "CreateItem__Fiii", SN_NOWARN)
set_name(0x80043174, "CreateRndItem__FiiUcUcUc", SN_NOWARN)
set_name(0x800432BC, "SetupAllUseful__Fiii", SN_NOWARN)
set_name(0x80043394, "CreateRndUseful__FiiiUc", SN_NOWARN)
set_name(0x80043454, "CreateTypeItem__FiiUciiUcUc", SN_NOWARN)
set_name(0x80043598, "RecreateEar__FiUsiUciiiiii", SN_NOWARN)
set_name(0x80043784, "SpawnQuestItem__Fiiiii", SN_NOWARN)
set_name(0x800439F8, "SpawnRock__Fv", SN_NOWARN)
set_name(0x80043BB8, "RespawnItem__FiUc", SN_NOWARN)
set_name(0x80043D70, "DeleteItem__Fii", SN_NOWARN)
set_name(0x80043DC4, "ItemDoppel__Fv", SN_NOWARN)
set_name(0x80043E8C, "ProcessItems__Fv", SN_NOWARN)
set_name(0x80044094, "FreeItemGFX__Fv", SN_NOWARN)
set_name(0x8004409C, "GetItemStr__Fi", SN_NOWARN)
set_name(0x80044244, "CheckIdentify__Fii", SN_NOWARN)
set_name(0x80044334, "RepairItem__FP10ItemStructi", SN_NOWARN)
set_name(0x80044404, "DoRepair__Fii", SN_NOWARN)
set_name(0x800444C8, "RechargeItem__FP10ItemStructi", SN_NOWARN)
set_name(0x80044538, "DoRecharge__Fii", SN_NOWARN)
set_name(0x80044638, "PrintItemOil__Fc", SN_NOWARN)
set_name(0x8004472C, "PrintItemPower__FcPC10ItemStruct", SN_NOWARN)
set_name(0x80044DE8, "PrintUString__FiiUcPcc", SN_NOWARN)
set_name(0x80044DF0, "PrintItemMisc__FPC10ItemStruct", SN_NOWARN)
set_name(0x80044F7C, "PrintItemDetails__FPC10ItemStruct", SN_NOWARN)
set_name(0x800452EC, "PrintItemDur__FPC10ItemStruct", SN_NOWARN)
set_name(0x800455FC, "CastScroll__Fii", SN_NOWARN)
set_name(0x80045614, "UseItem__Fiii", SN_NOWARN)
set_name(0x80045C2C, "StoreStatOk__FP10ItemStruct", SN_NOWARN)
set_name(0x80045CC0, "PremiumItemOk__Fi", SN_NOWARN)
set_name(0x80045D3C, "RndPremiumItem__Fii", SN_NOWARN)
set_name(0x80045E44, "SpawnOnePremium__Fii", SN_NOWARN)
set_name(0x80046064, "SpawnPremium__Fi", SN_NOWARN)
set_name(0x800462A8, "WitchBookLevel__Fi", SN_NOWARN)
set_name(0x800463F8, "SpawnStoreGold__Fv", SN_NOWARN)
set_name(0x8004647C, "RecalcStoreStats__Fv", SN_NOWARN)
set_name(0x8004661C, "ItemNoFlippy__Fv", SN_NOWARN)
set_name(0x80046680, "CreateSpellBook__FiiiUcUc", SN_NOWARN)
set_name(0x80046810, "CreateMagicArmor__FiiiiUcUc", SN_NOWARN)
set_name(0x8004698C, "CreateMagicWeapon__FiiiiUcUc", SN_NOWARN)
set_name(0x80046B08, "DrawUniqueInfo__Fv", SN_NOWARN)
set_name(0x80046C7C, "MakeItemStr__FP10ItemStructUsUs", SN_NOWARN)
set_name(0x80046E7C, "veclen2__Fii", SN_NOWARN)
set_name(0x80046EE4, "set_light_bands__Fv", SN_NOWARN)
set_name(0x80046F30, "SetLightFX__FiisssUcUcUc", SN_NOWARN)
set_name(0x80046F9C, "DoLighting__Fiiii", SN_NOWARN)
set_name(0x80047C4C, "DoUnLight__Fv", SN_NOWARN)
set_name(0x80047E94, "DoUnVision__Fiii", SN_NOWARN)
set_name(0x80047F58, "DoVision__FiiiUcUc", SN_NOWARN)
set_name(0x80048468, "FreeLightTable__Fv", SN_NOWARN)
set_name(0x80048470, "InitLightTable__Fv", SN_NOWARN)
set_name(0x80048478, "MakeLightTable__Fv", SN_NOWARN)
set_name(0x80048480, "InitLightMax__Fv", SN_NOWARN)
set_name(0x800484A4, "InitLighting__Fv", SN_NOWARN)
set_name(0x800484E8, "AddLight__Fiii", SN_NOWARN)
set_name(0x80048554, "AddUnLight__Fi", SN_NOWARN)
set_name(0x80048584, "ChangeLightRadius__Fii", SN_NOWARN)
set_name(0x800485B0, "ChangeLightXY__Fiii", SN_NOWARN)
set_name(0x800485EC, "light_fix__Fi", SN_NOWARN)
set_name(0x800485F4, "ChangeLightOff__Fiii", SN_NOWARN)
set_name(0x80048628, "ChangeLight__Fiiii", SN_NOWARN)
set_name(0x80048660, "ChangeLightColour__Fii", SN_NOWARN)
set_name(0x80048688, "LightLevel__Fi", SN_NOWARN)
set_name(0x800486A0, "ProcessLightList__Fv", SN_NOWARN)
set_name(0x800487C4, "SavePreLighting__Fv", SN_NOWARN)
set_name(0x800487CC, "InitVision__Fv", SN_NOWARN)
set_name(0x8004881C, "AddVision__FiiiUc", SN_NOWARN)
set_name(0x80048898, "ChangeVisionRadius__Fii", SN_NOWARN)
set_name(0x8004894C, "ChangeVisionXY__Fiii", SN_NOWARN)
set_name(0x800489CC, "ProcessVisionList__Fv", SN_NOWARN)
set_name(0x80048BCC, "FreeQuestText__Fv", SN_NOWARN)
set_name(0x80048BD4, "InitQuestText__Fv", SN_NOWARN)
set_name(0x80048BE0, "CalcTextSpeed__FPCc", SN_NOWARN)
set_name(0x80048D30, "InitQTextMsg__Fi", SN_NOWARN)
set_name(0x80048F88, "DrawQTextBack__Fv", SN_NOWARN)
set_name(0x80049110, "DrawQTextTSK__FP4TASK", SN_NOWARN)
set_name(0x80049274, "DrawQText__Fv", SN_NOWARN)
set_name(0x8004967C, "_GLOBAL__D_QBack", SN_NOWARN)
set_name(0x800496A4, "_GLOBAL__I_QBack", SN_NOWARN)
set_name(0x800496CC, "SetRGB__6DialogUcUcUc_addr_800496CC", SN_NOWARN)
set_name(0x800496EC, "SetBorder__6Dialogi_addr_800496EC", SN_NOWARN)
set_name(0x800496F4, "___6Dialog_addr_800496F4", SN_NOWARN)
set_name(0x8004971C, "__6Dialog_addr_8004971C", SN_NOWARN)
set_name(0x80049778, "GetDown__C4CPad_addr_80049778", SN_NOWARN)
set_name(0x800497A0, "nullmissile__Fiiiiiicii", SN_NOWARN)
set_name(0x800497A8, "FuncNULL__FP13MissileStructiii", SN_NOWARN)
set_name(0x800497B0, "delta_init__Fv", SN_NOWARN)
set_name(0x80049808, "delta_kill_monster__FiUcUcUc", SN_NOWARN)
set_name(0x800498A4, "delta_monster_hp__FilUc", SN_NOWARN)
set_name(0x80049928, "delta_sync_golem__FPC9TCmdGolemiUc", SN_NOWARN)
set_name(0x800499B8, "delta_leave_sync__FUc", SN_NOWARN)
set_name(0x80049CE4, "delta_sync_object__FiUcUc", SN_NOWARN)
set_name(0x80049D44, "delta_get_item__FPC9TCmdGItemUc", SN_NOWARN)
set_name(0x80049F08, "delta_put_item__FPC9TCmdPItemiiUc", SN_NOWARN)
set_name(0x8004A090, "delta_portal_inited__Fi", SN_NOWARN)
set_name(0x8004A0B4, "delta_quest_inited__Fi", SN_NOWARN)
set_name(0x8004A0D8, "DeltaAddItem__Fi", SN_NOWARN)
set_name(0x8004A2EC, "DeltaExportData__FPc", SN_NOWARN)
set_name(0x8004A384, "DeltaImportData__FPc", SN_NOWARN)
set_name(0x8004A428, "DeltaSaveLevel__Fv", SN_NOWARN)
set_name(0x8004A524, "NetSendCmd__FUcUc", SN_NOWARN)
set_name(0x8004A54C, "NetSendCmdGolem__FUcUcUcUclUc", SN_NOWARN)
set_name(0x8004A598, "NetSendCmdLoc__FUcUcUcUc", SN_NOWARN)
set_name(0x8004A5C8, "NetSendCmdLocParam1__FUcUcUcUcUs", SN_NOWARN)
set_name(0x8004A600, "NetSendCmdLocParam2__FUcUcUcUcUsUs", SN_NOWARN)
set_name(0x8004A640, "NetSendCmdLocParam3__FUcUcUcUcUsUsUs", SN_NOWARN)
set_name(0x8004A688, "NetSendCmdParam1__FUcUcUs", SN_NOWARN)
set_name(0x8004A6B4, "NetSendCmdParam2__FUcUcUsUs", SN_NOWARN)
set_name(0x8004A6E4, "NetSendCmdParam3__FUcUcUsUsUs", SN_NOWARN)
set_name(0x8004A71C, "NetSendCmdQuest__FUcUc", SN_NOWARN)
set_name(0x8004A790, "NetSendCmdGItem__FUcUcUcUcUc", SN_NOWARN)
set_name(0x8004A8C4, "NetSendCmdGItem2__FUcUcUcUcPC9TCmdGItem", SN_NOWARN)
set_name(0x8004A940, "NetSendCmdReq2__FUcUcUcPC9TCmdGItem", SN_NOWARN)
set_name(0x8004A998, "NetSendCmdExtra__FPC9TCmdGItem", SN_NOWARN)
set_name(0x8004AA00, "NetSendCmdPItem__FUcUcUcUc", SN_NOWARN)
set_name(0x8004AB08, "NetSendCmdChItem__FUcUc", SN_NOWARN)
set_name(0x8004ABAC, "NetSendCmdDelItem__FUcUc", SN_NOWARN)
set_name(0x8004ABDC, "NetSendCmdDItem__FUci", SN_NOWARN)
set_name(0x8004ACF0, "i_own_level__Fi", SN_NOWARN)
set_name(0x8004ACF8, "NetSendCmdDamage__FUcUcUl", SN_NOWARN)
set_name(0x8004AD2C, "delta_open_portal__FiUcUcUcUcUc", SN_NOWARN)
set_name(0x8004AD88, "delta_close_portal__Fi", SN_NOWARN)
set_name(0x8004ADC8, "check_update_plr__Fi", SN_NOWARN)
set_name(0x8004ADD0, "On_WALKXY__FPC4TCmdi", SN_NOWARN)
set_name(0x8004AE50, "On_ADDSTR__FPC4TCmdi", SN_NOWARN)
set_name(0x8004AE80, "On_ADDMAG__FPC4TCmdi", SN_NOWARN)
set_name(0x8004AEB0, "On_ADDDEX__FPC4TCmdi", SN_NOWARN)
set_name(0x8004AEE0, "On_ADDVIT__FPC4TCmdi", SN_NOWARN)
set_name(0x8004AF10, "On_SBSPELL__FPC4TCmdi", SN_NOWARN)
set_name(0x8004AF84, "On_GOTOGETITEM__FPC4TCmdi", SN_NOWARN)
set_name(0x8004B00C, "On_REQUESTGITEM__FPC4TCmdi", SN_NOWARN)
set_name(0x8004B14C, "On_GETITEM__FPC4TCmdi", SN_NOWARN)
set_name(0x8004B31C, "On_GOTOAGETITEM__FPC4TCmdi", SN_NOWARN)
set_name(0x8004B3A4, "On_REQUESTAGITEM__FPC4TCmdi", SN_NOWARN)
set_name(0x8004B4D8, "On_AGETITEM__FPC4TCmdi", SN_NOWARN)
set_name(0x8004B6A0, "On_ITEMEXTRA__FPC4TCmdi", SN_NOWARN)
set_name(0x8004B6EC, "On_PUTITEM__FPC4TCmdi", SN_NOWARN)
set_name(0x8004B804, "On_SYNCPUTITEM__FPC4TCmdi", SN_NOWARN)
set_name(0x8004B904, "On_RESPAWNITEM__FPC4TCmdi", SN_NOWARN)
set_name(0x8004B95C, "On_SATTACKXY__FPC4TCmdi", SN_NOWARN)
set_name(0x8004B9E8, "On_SPELLXYD__FPC4TCmdi", SN_NOWARN)
set_name(0x8004BAD0, "On_SPELLXY__FPC4TCmdi", SN_NOWARN)
set_name(0x8004BBA8, "On_TSPELLXY__FPC4TCmdi", SN_NOWARN)
set_name(0x8004BC84, "On_OPOBJXY__FPC4TCmdi", SN_NOWARN)
set_name(0x8004BD64, "On_DISARMXY__FPC4TCmdi", SN_NOWARN)
set_name(0x8004BE44, "On_OPOBJT__FPC4TCmdi", SN_NOWARN)
set_name(0x8004BE90, "On_ATTACKID__FPC4TCmdi", SN_NOWARN)
set_name(0x8004BFC4, "On_SPELLID__FPC4TCmdi", SN_NOWARN)
set_name(0x8004C08C, "On_SPELLPID__FPC4TCmdi", SN_NOWARN)
set_name(0x8004C14C, "On_TSPELLID__FPC4TCmdi", SN_NOWARN)
set_name(0x8004C210, "On_TSPELLPID__FPC4TCmdi", SN_NOWARN)
set_name(0x8004C2D4, "On_KNOCKBACK__FPC4TCmdi", SN_NOWARN)
set_name(0x8004C31C, "On_RESURRECT__FPC4TCmdi", SN_NOWARN)
set_name(0x8004C354, "On_HEALOTHER__FPC4TCmdi", SN_NOWARN)
set_name(0x8004C37C, "On_TALKXY__FPC4TCmdi", SN_NOWARN)
set_name(0x8004C404, "On_NEWLVL__FPC4TCmdi", SN_NOWARN)
set_name(0x8004C434, "On_WARP__FPC4TCmdi", SN_NOWARN)
set_name(0x8004C528, "On_MONSTDEATH__FPC4TCmdi", SN_NOWARN)
set_name(0x8004C594, "On_KILLGOLEM__FPC4TCmdi", SN_NOWARN)
set_name(0x8004C600, "On_AWAKEGOLEM__FPC4TCmdi", SN_NOWARN)
set_name(0x8004C718, "On_MONSTDAMAGE__FPC4TCmdi", SN_NOWARN)
set_name(0x8004C804, "On_PLRDEAD__FPC4TCmdi", SN_NOWARN)
set_name(0x8004C84C, "On_PLRDAMAGE__FPC4TCmdi", SN_NOWARN)
set_name(0x8004CA08, "On_OPENDOOR__FPC4TCmdi", SN_NOWARN)
set_name(0x8004CA84, "On_CLOSEDOOR__FPC4TCmdi", SN_NOWARN)
set_name(0x8004CB00, "On_OPERATEOBJ__FPC4TCmdi", SN_NOWARN)
set_name(0x8004CB7C, "On_PLROPOBJ__FPC4TCmdi", SN_NOWARN)
set_name(0x8004CBF8, "On_BREAKOBJ__FPC4TCmdi", SN_NOWARN)
set_name(0x8004CC70, "On_CHANGEPLRITEMS__FPC4TCmdi", SN_NOWARN)
set_name(0x8004CC78, "On_DELPLRITEMS__FPC4TCmdi", SN_NOWARN)
set_name(0x8004CC80, "On_PLRLEVEL__FPC4TCmdi", SN_NOWARN)
set_name(0x8004CC88, "On_DROPITEM__FPC4TCmdi", SN_NOWARN)
set_name(0x8004CCE0, "On_PLAYER_JOINLEVEL__FPC4TCmdi", SN_NOWARN)
set_name(0x8004CF58, "On_ACTIVATEPORTAL__FPC4TCmdi", SN_NOWARN)
set_name(0x8004D0E8, "On_DEACTIVATEPORTAL__FPC4TCmdi", SN_NOWARN)
set_name(0x8004D138, "On_RETOWN__FPC4TCmdi", SN_NOWARN)
set_name(0x8004D180, "On_SETSTR__FPC4TCmdi", SN_NOWARN)
set_name(0x8004D1C0, "On_SETDEX__FPC4TCmdi", SN_NOWARN)
set_name(0x8004D200, "On_SETMAG__FPC4TCmdi", SN_NOWARN)
set_name(0x8004D240, "On_SETVIT__FPC4TCmdi", SN_NOWARN)
set_name(0x8004D280, "On_SYNCQUEST__FPC4TCmdi", SN_NOWARN)
set_name(0x8004D2C8, "On_ENDSHIELD__FPC4TCmdi", SN_NOWARN)
set_name(0x8004D3A4, "ParseCmd__FiPC4TCmd", SN_NOWARN)
set_name(0x8004D7C4, "GetDLevel__Fib", SN_NOWARN)
set_name(0x8004D80C, "ReleaseDLevel__FP6DLevel", SN_NOWARN)
set_name(0x8004D838, "MSG_ClearOutCompMap__Fv", SN_NOWARN)
set_name(0x8004D860, "_GLOBAL__D_deltaload", SN_NOWARN)
set_name(0x8004D888, "_GLOBAL__I_deltaload", SN_NOWARN)
set_name(0x8004D8E8, "__10CrunchComp", SN_NOWARN)
set_name(0x8004D920, "__7PakComp", SN_NOWARN)
set_name(0x8004D958, "__6NoComp", SN_NOWARN)
set_name(0x8004D990, "GetSize__14CompressedLevs", SN_NOWARN)
set_name(0x8004D9CC, "__9CompClass", SN_NOWARN)
set_name(0x8004D9E0, "DoDecomp__C10CrunchCompPUcPCUcii", SN_NOWARN)
set_name(0x8004DA08, "DoComp__C10CrunchCompPUcPCUci", SN_NOWARN)
set_name(0x8004DA30, "DoDecomp__C7PakCompPUcPCUcii", SN_NOWARN)
set_name(0x8004DA54, "DoComp__C7PakCompPUcPCUci", SN_NOWARN)
set_name(0x8004DA7C, "DoDecomp__C6NoCompPUcPCUcii", SN_NOWARN)
set_name(0x8004DAA8, "DoComp__C6NoCompPUcPCUci", SN_NOWARN)
set_name(0x8004DAE0, "NetSendLoPri__FPCUcUc", SN_NOWARN)
set_name(0x8004DB0C, "InitLevelType__Fi", SN_NOWARN)
set_name(0x8004DB58, "SetupLocalCoords__Fv", SN_NOWARN)
set_name(0x8004DCE8, "InitNewSeed__Fl", SN_NOWARN)
set_name(0x8004DD5C, "NetInit__FUcPUc", SN_NOWARN)
set_name(0x8004DFB0, "PostAddL1Door__Fiiii", SN_NOWARN)
set_name(0x8004E0E8, "PostAddL2Door__Fiiii", SN_NOWARN)
set_name(0x8004E234, "PostAddArmorStand__Fi", SN_NOWARN)
set_name(0x8004E2BC, "PostTorchLocOK__Fii", SN_NOWARN)
set_name(0x8004E2FC, "PostAddObjLight__Fii", SN_NOWARN)
set_name(0x8004E3A0, "PostObjObjAddSwitch__Fiiii", SN_NOWARN)
set_name(0x8004E430, "InitObjectGFX__Fv", SN_NOWARN)
set_name(0x8004E64C, "FreeObjectGFX__Fv", SN_NOWARN)
set_name(0x8004E658, "DeleteObject__Fii", SN_NOWARN)
set_name(0x8004E710, "SetupObject__Fiiii", SN_NOWARN)
set_name(0x8004E994, "SetObjMapRange__Fiiiiii", SN_NOWARN)
set_name(0x8004E9F4, "SetBookMsg__Fii", SN_NOWARN)
set_name(0x8004EA1C, "AddObject__Fiii", SN_NOWARN)
set_name(0x8004EB28, "PostAddObject__Fiii", SN_NOWARN)
set_name(0x8004EC34, "Obj_Light__Fii", SN_NOWARN)
set_name(0x8004EE44, "Obj_Circle__Fi", SN_NOWARN)
set_name(0x8004F180, "Obj_StopAnim__Fi", SN_NOWARN)
set_name(0x8004F1E4, "DrawExpl__Fiiiiiccc", SN_NOWARN)
set_name(0x8004F4C0, "DrawObjExpl__FP12ObjectStructiii", SN_NOWARN)
set_name(0x8004F530, "Obj_Door__Fi", SN_NOWARN)
set_name(0x8004F6C4, "Obj_Sarc__Fi", SN_NOWARN)
set_name(0x8004F710, "ActivateTrapLine__Fii", SN_NOWARN)
set_name(0x8004F834, "Obj_FlameTrap__Fi", SN_NOWARN)
set_name(0x8004FB04, "Obj_Trap__Fi", SN_NOWARN)
set_name(0x8004FE54, "Obj_BCrossDamage__Fi", SN_NOWARN)
set_name(0x800500E4, "ProcessObjects__Fv", SN_NOWARN)
set_name(0x800503C0, "ObjSetMicro__Fiii", SN_NOWARN)
set_name(0x800503F8, "ObjSetMini__Fiii", SN_NOWARN)
set_name(0x800504E0, "ObjL1Special__Fiiii", SN_NOWARN)
set_name(0x800504E8, "ObjL2Special__Fiiii", SN_NOWARN)
set_name(0x800504F0, "DoorSet__Fiii", SN_NOWARN)
set_name(0x80050770, "RedoPlayerVision__Fv", SN_NOWARN)
set_name(0x80050814, "OperateL1RDoor__FiiUc", SN_NOWARN)
set_name(0x80050BB8, "OperateL1LDoor__FiiUc", SN_NOWARN)
set_name(0x80050F90, "OperateL2RDoor__FiiUc", SN_NOWARN)
set_name(0x80051328, "OperateL2LDoor__FiiUc", SN_NOWARN)
set_name(0x800516C0, "OperateL3RDoor__FiiUc", SN_NOWARN)
set_name(0x800519C8, "OperateL3LDoor__FiiUc", SN_NOWARN)
set_name(0x80051CD0, "MonstCheckDoors__Fi", SN_NOWARN)
set_name(0x800521CC, "PostAddL1Objs__Fiiii", SN_NOWARN)
set_name(0x80052304, "PostAddL2Objs__Fiiii", SN_NOWARN)
set_name(0x80052418, "ObjChangeMap__Fiiii", SN_NOWARN)
set_name(0x800525D0, "DRLG_MRectTrans__Fiiii", SN_NOWARN)
set_name(0x8005267C, "ObjChangeMapResync__Fiiii", SN_NOWARN)
set_name(0x80052800, "OperateL1Door__FiiUc", SN_NOWARN)
set_name(0x8005295C, "OperateLever__Fii", SN_NOWARN)
set_name(0x80052B48, "OperateBook__Fii", SN_NOWARN)
set_name(0x80053070, "OperateBookLever__Fii", SN_NOWARN)
set_name(0x80053600, "OperateSChambBk__Fii", SN_NOWARN)
set_name(0x80053840, "OperateChest__FiiUc", SN_NOWARN)
set_name(0x80053C10, "OperateMushPatch__Fii", SN_NOWARN)
set_name(0x80053DDC, "OperateInnSignChest__Fii", SN_NOWARN)
set_name(0x80053F90, "OperateSlainHero__FiiUc", SN_NOWARN)
set_name(0x800541E4, "OperateTrapLvr__Fi", SN_NOWARN)
set_name(0x800543B4, "OperateSarc__FiiUc", SN_NOWARN)
set_name(0x8005456C, "OperateL2Door__FiiUc", SN_NOWARN)
set_name(0x800546C8, "OperateL3Door__FiiUc", SN_NOWARN)
set_name(0x80054824, "LoadMapObjs__FPUcii", SN_NOWARN)
set_name(0x8005492C, "OperatePedistal__Fii", SN_NOWARN)
set_name(0x80054E44, "TryDisarm__Fii", SN_NOWARN)
set_name(0x80055008, "ItemMiscIdIdx__Fi", SN_NOWARN)
set_name(0x80055078, "OperateShrine__Fiii", SN_NOWARN)
set_name(0x80057648, "OperateSkelBook__FiiUc", SN_NOWARN)
set_name(0x800577C4, "OperateBookCase__FiiUc", SN_NOWARN)
set_name(0x800579C8, "OperateDecap__FiiUc", SN_NOWARN)
set_name(0x80057AB0, "OperateArmorStand__FiiUc", SN_NOWARN)
set_name(0x80057C20, "FindValidShrine__Fi", SN_NOWARN)
set_name(0x80057D10, "OperateGoatShrine__Fiii", SN_NOWARN)
set_name(0x80057DB8, "OperateCauldron__Fiii", SN_NOWARN)
set_name(0x80057E5C, "OperateFountains__Fii", SN_NOWARN)
set_name(0x80058408, "OperateWeaponRack__FiiUc", SN_NOWARN)
set_name(0x800585B4, "OperateStoryBook__Fii", SN_NOWARN)
set_name(0x800586A4, "OperateLazStand__Fii", SN_NOWARN)
set_name(0x80058808, "OperateObject__FiiUc", SN_NOWARN)
set_name(0x80058C40, "SyncOpL1Door__Fiii", SN_NOWARN)
set_name(0x80058D54, "SyncOpL2Door__Fiii", SN_NOWARN)
set_name(0x80058E68, "SyncOpL3Door__Fiii", SN_NOWARN)
set_name(0x80058F7C, "SyncOpObject__Fiii", SN_NOWARN)
set_name(0x8005917C, "BreakCrux__Fi", SN_NOWARN)
set_name(0x8005936C, "BreakBarrel__FiiiUcUc", SN_NOWARN)
set_name(0x800598C0, "BreakObject__Fii", SN_NOWARN)
set_name(0x80059A20, "SyncBreakObj__Fii", SN_NOWARN)
set_name(0x80059A7C, "SyncL1Doors__Fi", SN_NOWARN)
set_name(0x80059B94, "SyncCrux__Fi", SN_NOWARN)
set_name(0x80059CCC, "SyncLever__Fi", SN_NOWARN)
set_name(0x80059D48, "SyncQSTLever__Fi", SN_NOWARN)
set_name(0x80059E54, "SyncPedistal__Fi", SN_NOWARN)
set_name(0x80059FB0, "SyncL2Doors__Fi", SN_NOWARN)
set_name(0x8005A118, "SyncL3Doors__Fi", SN_NOWARN)
set_name(0x8005A244, "SyncObjectAnim__Fi", SN_NOWARN)
set_name(0x8005A384, "GetObjectStr__Fi", SN_NOWARN)
set_name(0x8005A7A0, "RestoreObjectLight__Fv", SN_NOWARN)
set_name(0x8005A9BC, "GetNumOfFrames__7TextDatii_addr_8005A9BC", SN_NOWARN)
set_name(0x8005A9F4, "GetCreature__7TextDati_addr_8005A9F4", SN_NOWARN)
set_name(0x8005AA6C, "GetNumOfCreatures__7TextDat_addr_8005AA6C", SN_NOWARN)
set_name(0x8005AA80, "FindPath__FPFiii_UciiiiiPc", SN_NOWARN)
set_name(0x8005AA88, "game_2_ui_class__FPC12PlayerStruct", SN_NOWARN)
set_name(0x8005AAB4, "game_2_ui_player__FPC12PlayerStructP11_uiheroinfoUc", SN_NOWARN)
set_name(0x8005AB68, "SetupLocalPlayer__Fv", SN_NOWARN)
set_name(0x8005AB88, "ismyplr__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005ABCC, "plrind__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005ABE0, "InitPlayerGFX__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005AC00, "FreePlayerGFX__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005AC08, "NewPlrAnim__FP12PlayerStructiii", SN_NOWARN)
set_name(0x8005AC24, "ClearPlrPVars__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005AC40, "SetPlrAnims__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005AE7C, "CreatePlayer__FP12PlayerStructc", SN_NOWARN)
set_name(0x8005B298, "CalcStatDiff__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005B300, "NextPlrLevel__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005B470, "AddPlrExperience__FP12PlayerStructil", SN_NOWARN)
set_name(0x8005B67C, "AddPlrMonstExper__Filc", SN_NOWARN)
set_name(0x8005B700, "InitPlayer__FP12PlayerStructUc", SN_NOWARN)
set_name(0x8005BAA0, "InitMultiView__Fv", SN_NOWARN)
set_name(0x8005BAA8, "CheckLeighSolid__Fii", SN_NOWARN)
set_name(0x8005BB40, "SolidLoc__Fii", SN_NOWARN)
set_name(0x8005BBC8, "PlrClrTrans__Fii", SN_NOWARN)
set_name(0x8005BC5C, "PlrDoTrans__Fii", SN_NOWARN)
set_name(0x8005BD50, "SetPlayerOld__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005BD64, "StartStand__FP12PlayerStructi", SN_NOWARN)
set_name(0x8005BDF0, "StartWalkStand__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005BE54, "PM_ChangeLightOff__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005BE90, "PM_ChangeOffset__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005BEBC, "StartAttack__FP12PlayerStructi", SN_NOWARN)
set_name(0x8005BFF4, "StartPlrBlock__FP12PlayerStructi", SN_NOWARN)
set_name(0x8005C08C, "StartSpell__FP12PlayerStructiii", SN_NOWARN)
set_name(0x8005C228, "RemovePlrFromMap__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005C348, "StartPlrHit__FP12PlayerStructiUc", SN_NOWARN)
set_name(0x8005C468, "RespawnDeadItem__FP10ItemStructii", SN_NOWARN)
set_name(0x8005C604, "PlrDeadItem__FP12PlayerStructP10ItemStructii", SN_NOWARN)
set_name(0x8005C7CC, "StartPlayerKill__FP12PlayerStructi", SN_NOWARN)
set_name(0x8005CB14, "DropHalfPlayersGold__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005CF5C, "StartPlrKill__FP12PlayerStructi", SN_NOWARN)
set_name(0x8005D0A8, "SyncPlrKill__FP12PlayerStructi", SN_NOWARN)
set_name(0x8005D0C8, "RemovePlrMissiles__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005D3B0, "InitLevelChange__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005D474, "StartNewLvl__FP12PlayerStructii", SN_NOWARN)
set_name(0x8005D5B8, "RestartTownLvl__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005D648, "StartWarpLvl__FP12PlayerStructi", SN_NOWARN)
set_name(0x8005D704, "PM_DoStand__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005D70C, "ChkPlrOffsets__Fiiii", SN_NOWARN)
set_name(0x8005D794, "PM_DoWalk__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005DB00, "WeaponDur__FP12PlayerStructi", SN_NOWARN)
set_name(0x8005DCA0, "PlrHitMonst__FP12PlayerStructi", SN_NOWARN)
set_name(0x8005E2D0, "PlrHitPlr__FP12PlayerStructc", SN_NOWARN)
set_name(0x8005E680, "PlrHitObj__FP12PlayerStructii", SN_NOWARN)
set_name(0x8005E710, "PM_DoAttack__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005EA9C, "PM_DoRangeAttack__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005EB9C, "ShieldDur__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005EC60, "PM_DoBlock__FP12PlayerStruct", SN_NOWARN)
set_name(0x8005ED00, "do_spell_anim__FiiiP12PlayerStruct", SN_NOWARN)
set_name(0x8005FCB8, "PM_DoSpell__FP12PlayerStruct", SN_NOWARN)
set_name(0x80060040, "ArmorDur__FP12PlayerStruct", SN_NOWARN)
set_name(0x80060140, "PM_DoGotHit__FP12PlayerStruct", SN_NOWARN)
set_name(0x800601BC, "PM_DoDeath__FP12PlayerStruct", SN_NOWARN)
set_name(0x8006030C, "PM_DoNewLvl__FP12PlayerStruct", SN_NOWARN)
set_name(0x80060314, "CheckNewPath__FP12PlayerStruct", SN_NOWARN)
set_name(0x80060754, "PlrDeathModeOK__Fi", SN_NOWARN)
set_name(0x800607BC, "ValidatePlayer__Fv", SN_NOWARN)
set_name(0x80060CA4, "CheckCheatStats__FP12PlayerStruct", SN_NOWARN)
set_name(0x80060D40, "ProcessPlayers__Fv", SN_NOWARN)
set_name(0x80061074, "ClrPlrPath__FP12PlayerStruct", SN_NOWARN)
set_name(0x8006109C, "PosOkPlayer__FP12PlayerStructii", SN_NOWARN)
set_name(0x80061244, "MakePlrPath__FP12PlayerStructiiUc", SN_NOWARN)
set_name(0x8006124C, "CheckPlrSpell__Fv", SN_NOWARN)
set_name(0x8006165C, "SyncInitPlrPos__FP12PlayerStruct", SN_NOWARN)
set_name(0x80061784, "SyncInitPlr__FP12PlayerStruct", SN_NOWARN)
set_name(0x800617B4, "CheckStats__Fi", SN_NOWARN)
set_name(0x80061950, "ModifyPlrStr__Fii", SN_NOWARN)
set_name(0x80061A6C, "ModifyPlrMag__Fii", SN_NOWARN)
set_name(0x80061B58, "ModifyPlrDex__Fii", SN_NOWARN)
set_name(0x80061C3C, "ModifyPlrVit__Fii", SN_NOWARN)
set_name(0x80061D18, "SetPlayerHitPoints__FP12PlayerStructi", SN_NOWARN)
set_name(0x80061D5C, "SetPlrStr__Fii", SN_NOWARN)
set_name(0x80061E38, "SetPlrMag__Fii", SN_NOWARN)
set_name(0x80061EA8, "SetPlrDex__Fii", SN_NOWARN)
set_name(0x80061F84, "SetPlrVit__Fii", SN_NOWARN)
set_name(0x80061FF0, "InitDungMsgs__FP12PlayerStruct", SN_NOWARN)
set_name(0x80061FF8, "PlayDungMsgs__Fv", SN_NOWARN)
set_name(0x80062328, "CreatePlrItems__FP12PlayerStruct", SN_NOWARN)
set_name(0x80062350, "WorldToOffset__FP12PlayerStructii", SN_NOWARN)
set_name(0x80062394, "SetSpdbarGoldCurs__FP12PlayerStructi", SN_NOWARN)
set_name(0x800623C8, "GetSpellLevel__FP12PlayerStructi", SN_NOWARN)
set_name(0x800623FC, "BreakObject__FP12PlayerStructi", SN_NOWARN)
set_name(0x80062430, "CalcPlrInv__FP12PlayerStructUc", SN_NOWARN)
set_name(0x80062464, "RemoveSpdBarItem__FP12PlayerStructi", SN_NOWARN)
set_name(0x80062498, "M_StartKill__FiP12PlayerStruct", SN_NOWARN)
set_name(0x800624D0, "SetGoldCurs__FP12PlayerStructi", SN_NOWARN)
set_name(0x80062504, "HealStart__FP12PlayerStruct", SN_NOWARN)
set_name(0x8006252C, "HealotherStart__FP12PlayerStruct", SN_NOWARN)
set_name(0x80062554, "CalculateGold__FP12PlayerStruct", SN_NOWARN)
set_name(0x8006257C, "M_StartHit__FiP12PlayerStructi", SN_NOWARN)
set_name(0x800625C4, "TeleStart__FP12PlayerStruct", SN_NOWARN)
set_name(0x800625EC, "PhaseStart__FP12PlayerStruct", SN_NOWARN)
set_name(0x80062614, "RemoveInvItem__FP12PlayerStructi", SN_NOWARN)
set_name(0x80062648, "PhaseEnd__FP12PlayerStruct", SN_NOWARN)
set_name(0x80062670, "OperateObject__FP12PlayerStructiUc", SN_NOWARN)
set_name(0x800626B4, "TryDisarm__FP12PlayerStructi", SN_NOWARN)
set_name(0x800626E8, "TalkToTowner__FP12PlayerStructi", SN_NOWARN)
set_name(0x8006271C, "PosOkPlayer__Fiii", SN_NOWARN)
set_name(0x80062768, "CalcStatDiff__Fi", SN_NOWARN)
set_name(0x800627B4, "StartNewLvl__Fiii", SN_NOWARN)
set_name(0x80062800, "CreatePlayer__Fic", SN_NOWARN)
set_name(0x80062854, "StartStand__Fii", SN_NOWARN)
set_name(0x800628A0, "SetPlayerHitPoints__Fii", SN_NOWARN)
set_name(0x800628EC, "MakePlrPath__FiiiUc", SN_NOWARN)
set_name(0x8006293C, "StartWarpLvl__Fii", SN_NOWARN)
set_name(0x80062988, "SyncPlrKill__Fii", SN_NOWARN)
set_name(0x800629D4, "StartPlrKill__Fii", SN_NOWARN)
set_name(0x80062A20, "NewPlrAnim__Fiiii", SN_NOWARN)
set_name(0x80062A6C, "AddPlrExperience__Fiil", SN_NOWARN)
set_name(0x80062AB8, "StartPlrBlock__Fii", SN_NOWARN)
set_name(0x80062B04, "StartPlrHit__FiiUc", SN_NOWARN)
set_name(0x80062B54, "StartSpell__Fiiii", SN_NOWARN)
set_name(0x80062BA0, "InitPlayer__FiUc", SN_NOWARN)
set_name(0x80062BF0, "PM_ChangeLightOff__Fi", SN_NOWARN)
set_name(0x80062C3C, "CheckNewPath__Fi", SN_NOWARN)
set_name(0x80062C88, "FreePlayerGFX__Fi", SN_NOWARN)
set_name(0x80062CD4, "InitDungMsgs__Fi", SN_NOWARN)
set_name(0x80062D20, "InitPlayerGFX__Fi", SN_NOWARN)
set_name(0x80062D6C, "SyncInitPlrPos__Fi", SN_NOWARN)
set_name(0x80062DB8, "SetPlrAnims__Fi", SN_NOWARN)
set_name(0x80062E04, "ClrPlrPath__Fi", SN_NOWARN)
set_name(0x80062E50, "SyncInitPlr__Fi", SN_NOWARN)
set_name(0x80062E9C, "RestartTownLvl__Fi", SN_NOWARN)
set_name(0x80062EE8, "SetPlayerOld__Fi", SN_NOWARN)
set_name(0x80062F34, "GetGoldSeed__FP12PlayerStructP10ItemStruct", SN_NOWARN)
set_name(0x80062F68, "PRIM_GetPrim__FPP8POLY_FT4_addr_80062F68", SN_NOWARN)
set_name(0x80062FE4, "GetPlayer__7CPlayeri_addr_80062FE4", SN_NOWARN)
set_name(0x80063034, "GetLastOtPos__C7CPlayer_addr_80063034", SN_NOWARN)
set_name(0x80063040, "GetLastScrY__C7CPlayer", SN_NOWARN)
set_name(0x8006304C, "GetLastScrX__C7CPlayer", SN_NOWARN)
set_name(0x80063058, "TSK_Lava2Water__FP4TASK", SN_NOWARN)
set_name(0x800632A4, "CheckQuests__Fv", SN_NOWARN)
set_name(0x80063764, "ForceQuests__Fv", SN_NOWARN)
set_name(0x80063908, "QuestStatus__Fi", SN_NOWARN)
set_name(0x8006399C, "CheckQuestKill__FiUc", SN_NOWARN)
set_name(0x80063F7C, "SetReturnLvlPos__Fv", SN_NOWARN)
set_name(0x8006408C, "GetReturnLvlPos__Fv", SN_NOWARN)
set_name(0x800640E0, "ResyncMPQuests__Fv", SN_NOWARN)
set_name(0x8006421C, "ResyncQuests__Fv", SN_NOWARN)
set_name(0x8006477C, "PrintQLString__FiiUcPcc", SN_NOWARN)
set_name(0x800649A8, "DrawQuestLog__Fv", SN_NOWARN)
set_name(0x80064B70, "DrawQuestLogTSK__FP4TASK", SN_NOWARN)
set_name(0x80064C08, "StartQuestlog__Fv", SN_NOWARN)
set_name(0x80064D20, "QuestlogUp__Fv", SN_NOWARN)
set_name(0x80064D74, "QuestlogDown__Fv", SN_NOWARN)
set_name(0x80064DE0, "RemoveQLog__Fv", SN_NOWARN)
set_name(0x80064E58, "QuestlogEnter__Fv", SN_NOWARN)
set_name(0x80064F1C, "QuestlogESC__Fv", SN_NOWARN)
set_name(0x80064F44, "SetMultiQuest__FiiUci", SN_NOWARN)
set_name(0x80064FC4, "_GLOBAL__D_questlog", SN_NOWARN)
set_name(0x80064FEC, "_GLOBAL__I_questlog", SN_NOWARN)
set_name(0x80065014, "GetBlockTexDat__7CBlocks", SN_NOWARN)
set_name(0x80065020, "SetRGB__6DialogUcUcUc_addr_80065020", SN_NOWARN)
set_name(0x80065040, "SetBack__6Dialogi_addr_80065040", SN_NOWARN)
set_name(0x80065048, "SetBorder__6Dialogi_addr_80065048", SN_NOWARN)
set_name(0x80065050, "___6Dialog_addr_80065050", SN_NOWARN)
set_name(0x80065078, "__6Dialog_addr_80065078", SN_NOWARN)
set_name(0x800650D4, "GetPal__7TextDati_addr_800650D4", SN_NOWARN)
set_name(0x800650F0, "GetFr__7TextDati_addr_800650F0", SN_NOWARN)
set_name(0x8006510C, "DrawView__Fii", SN_NOWARN)
set_name(0x800652D4, "DrawAndBlit__Fv", SN_NOWARN)
set_name(0x80065400, "FreeStoreMem__Fv", SN_NOWARN)
set_name(0x80065408, "DrawSTextBack__Fv", SN_NOWARN)
set_name(0x80065478, "PrintSString__FiiUcPcci", SN_NOWARN)
set_name(0x8006586C, "DrawSLine__Fi", SN_NOWARN)
set_name(0x80065900, "ClearSText__Fii", SN_NOWARN)
set_name(0x80065998, "AddSLine__Fi", SN_NOWARN)
set_name(0x800659E8, "AddSTextVal__Fii", SN_NOWARN)
set_name(0x80065A10, "AddSText__FiiUcPccUc", SN_NOWARN)
set_name(0x80065AC4, "PrintStoreItem__FPC10ItemStructic", SN_NOWARN)
set_name(0x80065F4C, "StoreAutoPlace__Fv", SN_NOWARN)
set_name(0x8006656C, "S_StartSmith__Fv", SN_NOWARN)
set_name(0x800666F4, "S_ScrollSBuy__Fi", SN_NOWARN)
set_name(0x800668AC, "S_StartSBuy__Fv", SN_NOWARN)
set_name(0x800669DC, "S_ScrollSPBuy__Fi", SN_NOWARN)
set_name(0x80066BFC, "S_StartSPBuy__Fv", SN_NOWARN)
set_name(0x80066D4C, "SmithSellOk__Fi", SN_NOWARN)
set_name(0x80066E30, "S_ScrollSSell__Fi", SN_NOWARN)
set_name(0x80067058, "S_StartSSell__Fv", SN_NOWARN)
set_name(0x80067488, "SmithRepairOk__Fi", SN_NOWARN)
set_name(0x8006752C, "AddStoreHoldRepair__FP10ItemStructi", SN_NOWARN)
set_name(0x8006770C, "S_StartSRepair__Fv", SN_NOWARN)
set_name(0x80067BDC, "S_StartWitch__Fv", SN_NOWARN)
set_name(0x80067D1C, "S_ScrollWBuy__Fi", SN_NOWARN)
set_name(0x80067EF4, "S_StartWBuy__Fv", SN_NOWARN)
set_name(0x80068020, "WitchSellOk__Fi", SN_NOWARN)
set_name(0x80068144, "S_StartWSell__Fv", SN_NOWARN)
set_name(0x8006879C, "WitchRechargeOk__Fi", SN_NOWARN)
set_name(0x80068824, "AddStoreHoldRecharge__FG10ItemStructi", SN_NOWARN)
set_name(0x800689A4, "S_StartWRecharge__Fv", SN_NOWARN)
set_name(0x80068DC4, "S_StartNoMoney__Fv", SN_NOWARN)
set_name(0x80068E2C, "S_StartNoRoom__Fv", SN_NOWARN)
set_name(0x80068E8C, "S_StartConfirm__Fv", SN_NOWARN)
set_name(0x80069204, "S_StartBoy__Fv", SN_NOWARN)
set_name(0x80069394, "S_StartBBoy__Fv", SN_NOWARN)
set_name(0x8006951C, "S_StartHealer__Fv", SN_NOWARN)
set_name(0x800696F0, "S_ScrollHBuy__Fi", SN_NOWARN)
set_name(0x8006985C, "S_StartHBuy__Fv", SN_NOWARN)
set_name(0x8006997C, "S_StartStory__Fv", SN_NOWARN)
set_name(0x80069A6C, "IdItemOk__FP10ItemStruct", SN_NOWARN)
set_name(0x80069AA0, "AddStoreHoldId__FG10ItemStructi", SN_NOWARN)
set_name(0x80069B74, "S_StartSIdentify__Fv", SN_NOWARN)
set_name(0x8006A5D4, "S_StartIdShow__Fv", SN_NOWARN)
set_name(0x8006A7A8, "S_StartTalk__Fv", SN_NOWARN)
set_name(0x8006A9D8, "S_StartTavern__Fv", SN_NOWARN)
set_name(0x8006AAD0, "S_StartBarMaid__Fv", SN_NOWARN)
set_name(0x8006ABA4, "S_StartDrunk__Fv", SN_NOWARN)
set_name(0x8006AC78, "StartStore__Fc", SN_NOWARN)
set_name(0x8006AF60, "DrawSText__Fv", SN_NOWARN)
set_name(0x8006AFA0, "DrawSTextTSK__FP4TASK", SN_NOWARN)
set_name(0x8006B068, "DoThatDrawSText__Fv", SN_NOWARN)
set_name(0x8006B214, "STextESC__Fv", SN_NOWARN)
set_name(0x8006B388, "STextUp__Fv", SN_NOWARN)
set_name(0x8006B520, "STextDown__Fv", SN_NOWARN)
set_name(0x8006B6D0, "S_SmithEnter__Fv", SN_NOWARN)
set_name(0x8006B7A4, "SetGoldCurs__Fii", SN_NOWARN)
set_name(0x8006B820, "SetSpdbarGoldCurs__Fii", SN_NOWARN)
set_name(0x8006B89C, "TakePlrsMoney__Fl", SN_NOWARN)
set_name(0x8006BCE8, "SmithBuyItem__Fv", SN_NOWARN)
set_name(0x8006BEDC, "S_SBuyEnter__Fv", SN_NOWARN)
set_name(0x8006C100, "SmithBuyPItem__Fv", SN_NOWARN)
set_name(0x8006C288, "S_SPBuyEnter__Fv", SN_NOWARN)
set_name(0x8006C4B8, "StoreGoldFit__Fi", SN_NOWARN)
set_name(0x8006C770, "PlaceStoreGold__Fl", SN_NOWARN)
set_name(0x8006C9D4, "StoreSellItem__Fv", SN_NOWARN)
set_name(0x8006CCC8, "S_SSellEnter__Fv", SN_NOWARN)
set_name(0x8006CDCC, "SmithRepairItem__Fv", SN_NOWARN)
set_name(0x8006D03C, "S_SRepairEnter__Fv", SN_NOWARN)
set_name(0x8006D198, "S_WitchEnter__Fv", SN_NOWARN)
set_name(0x8006D248, "WitchBuyItem__Fv", SN_NOWARN)
set_name(0x8006D448, "S_WBuyEnter__Fv", SN_NOWARN)
set_name(0x8006D634, "S_WSellEnter__Fv", SN_NOWARN)
set_name(0x8006D738, "WitchRechargeItem__Fv", SN_NOWARN)
set_name(0x8006D8B0, "S_WRechargeEnter__Fv", SN_NOWARN)
set_name(0x8006DA0C, "S_BoyEnter__Fv", SN_NOWARN)
set_name(0x8006DB44, "BoyBuyItem__Fv", SN_NOWARN)
set_name(0x8006DBC8, "HealerBuyItem__Fv", SN_NOWARN)
set_name(0x8006DE6C, "S_BBuyEnter__Fv", SN_NOWARN)
set_name(0x8006E054, "StoryIdItem__Fv", SN_NOWARN)
set_name(0x8006E3A0, "S_ConfirmEnter__Fv", SN_NOWARN)
set_name(0x8006E4BC, "S_HealerEnter__Fv", SN_NOWARN)
set_name(0x8006E554, "S_HBuyEnter__Fv", SN_NOWARN)
set_name(0x8006E760, "S_StoryEnter__Fv", SN_NOWARN)
set_name(0x8006E7F8, "S_SIDEnter__Fv", SN_NOWARN)
set_name(0x8006E974, "S_TalkEnter__Fv", SN_NOWARN)
set_name(0x8006EB6C, "S_TavernEnter__Fv", SN_NOWARN)
set_name(0x8006EBDC, "S_BarmaidEnter__Fv", SN_NOWARN)
set_name(0x8006EC4C, "S_DrunkEnter__Fv", SN_NOWARN)
set_name(0x8006ECBC, "STextEnter__Fv", SN_NOWARN)
set_name(0x8006EE80, "CheckStoreBtn__Fv", SN_NOWARN)
set_name(0x8006EF6C, "ReleaseStoreBtn__Fv", SN_NOWARN)
set_name(0x8006EF80, "_GLOBAL__D_pSTextBoxCels", SN_NOWARN)
set_name(0x8006EFA8, "_GLOBAL__I_pSTextBoxCels", SN_NOWARN)
set_name(0x8006EFD0, "GetDown__C4CPad_addr_8006EFD0", SN_NOWARN)
set_name(0x8006EFF8, "SetRGB__6DialogUcUcUc_addr_8006EFF8", SN_NOWARN)
set_name(0x8006F018, "SetBorder__6Dialogi_addr_8006F018", SN_NOWARN)
set_name(0x8006F020, "___6Dialog_addr_8006F020", SN_NOWARN)
set_name(0x8006F048, "__6Dialog_addr_8006F048", SN_NOWARN)
set_name(0x8006F0A4, "T_DrawView__Fii", SN_NOWARN)
set_name(0x8006F254, "T_FillSector__FPUcT0iiiib", SN_NOWARN)
set_name(0x8006F44C, "T_FillTile__FPUciii", SN_NOWARN)
set_name(0x8006F53C, "T_Pass3__Fv", SN_NOWARN)
set_name(0x8006F8F0, "CreateTown__Fi", SN_NOWARN)
set_name(0x8006FA58, "GRL_LoadFileInMemSig__FPCcPUl", SN_NOWARN)
set_name(0x8006FB3C, "GRL_StripDir__FPcPCc", SN_NOWARN)
set_name(0x8006FBD4, "InitVPTriggers__Fv", SN_NOWARN)
set_name(0x8006FC1C, "ForceTownTrig__Fv", SN_NOWARN)
set_name(0x8006FF34, "ForceL1Trig__Fv", SN_NOWARN)
set_name(0x800701E4, "ForceL2Trig__Fv", SN_NOWARN)
set_name(0x80070644, "ForceL3Trig__Fv", SN_NOWARN)
set_name(0x80070AC0, "ForceL4Trig__Fv", SN_NOWARN)
set_name(0x80070FCC, "Freeupstairs__Fv", SN_NOWARN)
set_name(0x8007108C, "ForceSKingTrig__Fv", SN_NOWARN)
set_name(0x80071180, "ForceSChambTrig__Fv", SN_NOWARN)
set_name(0x80071274, "ForcePWaterTrig__Fv", SN_NOWARN)
set_name(0x80071368, "CheckTrigForce__Fv", SN_NOWARN)
set_name(0x80071670, "FadeGameOut__Fv", SN_NOWARN)
set_name(0x8007170C, "IsTrigger__Fii", SN_NOWARN)
set_name(0x80071770, "CheckTriggers__Fi", SN_NOWARN)
set_name(0x80071C8C, "GetManaAmount__Fii", SN_NOWARN)
set_name(0x80071F54, "UseMana__Fii", SN_NOWARN)
set_name(0x80072098, "CheckSpell__FiicUc", SN_NOWARN)
set_name(0x80072138, "CastSpell__Fiiiiiiii", SN_NOWARN)
set_name(0x800723E4, "DoResurrect__Fii", SN_NOWARN)
set_name(0x80072698, "DoHealOther__Fii", SN_NOWARN)
set_name(0x800728FC, "snd_update__FUc", SN_NOWARN)
set_name(0x80072904, "snd_get_volume__FPCcPl", SN_NOWARN)
set_name(0x8007296C, "snd_stop_snd__FP4TSnd", SN_NOWARN)
set_name(0x8007298C, "snd_play_snd__FP4TSFXll", SN_NOWARN)
set_name(0x800729EC, "snd_play_msnd__FUsll", SN_NOWARN)
set_name(0x80072A7C, "snd_init__FUl", SN_NOWARN)
set_name(0x80072ACC, "music_stop__Fv", SN_NOWARN)
set_name(0x80072B10, "music_fade__Fv", SN_NOWARN)
set_name(0x80072B50, "music_start__Fi", SN_NOWARN)
set_name(0x80072BD4, "music_hold__Fv", SN_NOWARN)
set_name(0x80072C34, "music_release__Fv", SN_NOWARN)
set_name(0x80072C84, "snd_playing__Fi", SN_NOWARN)
set_name(0x80072CA4, "ClrCursor__Fi", SN_NOWARN)
set_name(0x80072D04, "flyabout__7GamePad", SN_NOWARN)
set_name(0x800731C0, "CloseInvChr__Fv", SN_NOWARN)
set_name(0x80073210, "LeftOf__Fi", SN_NOWARN)
set_name(0x80073228, "RightOf__Fi", SN_NOWARN)
set_name(0x80073244, "WorldToOffset__Fiii", SN_NOWARN)
set_name(0x800732F0, "pad_UpIsUpRight__Fic", SN_NOWARN)
set_name(0x800733B4, "__7GamePadi", SN_NOWARN)
set_name(0x800734A8, "SetMoveStyle__7GamePadc", SN_NOWARN)
set_name(0x800734B0, "SetDownButton__7GamePadiPFi_v", SN_NOWARN)
set_name(0x800734F4, "SetComboDownButton__7GamePadiPFi_v", SN_NOWARN)
set_name(0x80073538, "SetAllButtons__7GamePadP11KEY_ASSIGNS", SN_NOWARN)
set_name(0x80073798, "GetAllButtons__7GamePadP11KEY_ASSIGNS", SN_NOWARN)
set_name(0x80073948, "GetActionButton__7GamePadPFi_v", SN_NOWARN)
set_name(0x800739A4, "SetUpAction__7GamePadPFi_vT1", SN_NOWARN)
set_name(0x800739E0, "RunFunc__7GamePadi", SN_NOWARN)
set_name(0x80073AA4, "ButtonDown__7GamePadi", SN_NOWARN)
set_name(0x80073EB0, "TestButtons__7GamePad", SN_NOWARN)
set_name(0x80073FF4, "CheckCentre__FP12PlayerStructi", SN_NOWARN)
set_name(0x800740E8, "CheckDirs__7GamePadi", SN_NOWARN)
set_name(0x80074200, "CheckSide__7GamePadi", SN_NOWARN)
set_name(0x80074254, "CheckBodge__7GamePadi", SN_NOWARN)
set_name(0x80074660, "walk__7GamePadc", SN_NOWARN)
set_name(0x80074974, "check_around_player__7GamePad", SN_NOWARN)
set_name(0x80074D54, "show_combos__7GamePad", SN_NOWARN)
set_name(0x80074F20, "Handle__7GamePad", SN_NOWARN)
set_name(0x800755A0, "GamePadTask__FP4TASK", SN_NOWARN)
set_name(0x8007566C, "PostGamePad__Fiiii", SN_NOWARN)
set_name(0x8007577C, "Init_GamePad__Fv", SN_NOWARN)
set_name(0x800757AC, "InitGamePadVars__Fv", SN_NOWARN)
set_name(0x8007583C, "SetWalkStyle__Fii", SN_NOWARN)
set_name(0x800758AC, "GetPadStyle__Fi", SN_NOWARN)
set_name(0x800758D0, "_GLOBAL__I_flyflag", SN_NOWARN)
set_name(0x80075908, "MoveToScrollTarget__7CBlocks_addr_80075908", SN_NOWARN)
set_name(0x8007591C, "GetDown__C4CPad_addr_8007591C", SN_NOWARN)
set_name(0x80075944, "GetUp__C4CPad_addr_80075944", SN_NOWARN)
set_name(0x8007596C, "GetCur__C4CPad_addr_8007596C", SN_NOWARN)
set_name(0x80075994, "DoGameTestStuff__Fv", SN_NOWARN)
set_name(0x800759C0, "DoInitGameStuff__Fv", SN_NOWARN)
set_name(0x800759F4, "SMemAlloc", SN_NOWARN)
set_name(0x80075A14, "SMemFree", SN_NOWARN)
set_name(0x80075A34, "GRL_InitGwin__Fv", SN_NOWARN)
set_name(0x80075A40, "GRL_SetWindowProc__FPFUlUilUl_Ul", SN_NOWARN)
set_name(0x80075A50, "GRL_CallWindowProc__FUlUilUl", SN_NOWARN)
set_name(0x80075A78, "GRL_PostMessage__FUlUilUl", SN_NOWARN)
set_name(0x80075B24, "Msg2Txt__Fi", SN_NOWARN)
set_name(0x80075B6C, "LANG_GetLang__Fv", SN_NOWARN)
set_name(0x80075B78, "LANG_SetDb__F10LANG_DB_NO", SN_NOWARN)
set_name(0x80075CE4, "GetStr__Fi", SN_NOWARN)
set_name(0x80075D4C, "LANG_ReloadMainTXT__Fv", SN_NOWARN)
set_name(0x80075D80, "LANG_SetLang__F9LANG_TYPE", SN_NOWARN)
set_name(0x80075EE4, "DumpCurrentText__Fv", SN_NOWARN)
set_name(0x80075F3C, "CalcNumOfStrings__FPPc", SN_NOWARN)
set_name(0x80075F48, "GetLangFileName__F9LANG_TYPEPc", SN_NOWARN)
set_name(0x80076010, "GetLangFileNameExt__F9LANG_TYPE", SN_NOWARN)
set_name(0x80076090, "TempPrintMissile__FiiiiiiiiccUcUcUcc", SN_NOWARN)
set_name(0x80076468, "FuncTOWN__FP13MissileStructiii", SN_NOWARN)
set_name(0x80076500, "FuncRPORTAL__FP13MissileStructiii", SN_NOWARN)
set_name(0x8007657C, "FuncFIREBOLT__FP13MissileStructiii", SN_NOWARN)
set_name(0x80076614, "FuncHBOLT__FP13MissileStructiii", SN_NOWARN)
set_name(0x800766C4, "FuncLIGHTNING__FP13MissileStructiii", SN_NOWARN)
set_name(0x80076728, "FuncGUARDIAN__FP13MissileStructiii", SN_NOWARN)
set_name(0x80076840, "FuncFIREWALL__FP13MissileStructiii", SN_NOWARN)
set_name(0x800768D8, "FuncFIREMOVE__FP13MissileStructiii", SN_NOWARN)
set_name(0x80076970, "FuncFLAME__FP13MissileStructiii", SN_NOWARN)
set_name(0x800769D8, "FuncARROW__FP13MissileStructiii", SN_NOWARN)
set_name(0x80076A78, "FuncFARROW__FP13MissileStructiii", SN_NOWARN)
set_name(0x80076B58, "FuncLARROW__FP13MissileStructiii", SN_NOWARN)
set_name(0x80076C30, "FuncMAGMABALL__FP13MissileStructiii", SN_NOWARN)
set_name(0x80076CC0, "FuncBONESPIRIT__FP13MissileStructiii", SN_NOWARN)
set_name(0x80076DDC, "FuncACID__FP13MissileStructiii", SN_NOWARN)
set_name(0x80076E78, "FuncACIDSPLAT__FP13MissileStructiii", SN_NOWARN)
set_name(0x80076EE0, "FuncACIDPUD__FP13MissileStructiii", SN_NOWARN)
set_name(0x80076F48, "FuncFLARE__FP13MissileStructiii", SN_NOWARN)
set_name(0x800770AC, "FuncFLAREXP__FP13MissileStructiii", SN_NOWARN)
set_name(0x800771F0, "FuncCBOLT__FP13MissileStructiii", SN_NOWARN)
set_name(0x80077258, "FuncBOOM__FP13MissileStructiii", SN_NOWARN)
set_name(0x800772B0, "FuncELEMENT__FP13MissileStructiii", SN_NOWARN)
set_name(0x8007737C, "FuncMISEXP__FP13MissileStructiii", SN_NOWARN)
set_name(0x800773E0, "FuncRHINO__FP13MissileStructiii", SN_NOWARN)
set_name(0x80077444, "FuncFLASH__FP13MissileStructiii", SN_NOWARN)
set_name(0x8007796C, "FuncMANASHIELD__FP13MissileStructiii", SN_NOWARN)
set_name(0x80077A14, "FuncFLASH2__FP13MissileStructiii", SN_NOWARN)
set_name(0x80077A1C, "FuncRESURRECTBEAM__FP13MissileStructiii", SN_NOWARN)
set_name(0x80077A50, "FuncWEAPEXP__FP13MissileStructiii", SN_NOWARN)
set_name(0x80077A74, "PRIM_GetPrim__FPP8POLY_FT4_addr_80077A74", SN_NOWARN)
set_name(0x80077AF0, "GetPlayer__7CPlayeri_addr_80077AF0", SN_NOWARN)
set_name(0x80077B40, "GetLastOtPos__C7CPlayer_addr_80077B40", SN_NOWARN)
set_name(0x80077B4C, "GetLastScrY__C7CPlayer_addr_80077B4C", SN_NOWARN)
set_name(0x80077B58, "GetLastScrX__C7CPlayer_addr_80077B58", SN_NOWARN)
set_name(0x80077B64, "GetNumOfFrames__7TextDat_addr_80077B64", SN_NOWARN)
set_name(0x80077B78, "GetFr__7TextDati_addr_80077B78", SN_NOWARN)
set_name(0x80077B94, "ML_Init__Fv", SN_NOWARN)
set_name(0x80077BCC, "ML_GetList__Fi", SN_NOWARN)
set_name(0x80077C4C, "ML_SetRandomList__Fi", SN_NOWARN)
set_name(0x80077CE4, "ML_SetList__Fii", SN_NOWARN)
set_name(0x80077D94, "ML_GetPresetMonsters__FiPiUl", SN_NOWARN)
set_name(0x80077F50, "DefaultObjPrint__FP12ObjectStructiiP7TextDatiii", SN_NOWARN)
set_name(0x800780E4, "LightObjPrint__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x8007819C, "DoorObjPrint__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078428, "DrawLightSpark__Fiii", SN_NOWARN)
set_name(0x80078500, "PrintOBJ_L1LIGHT__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078588, "PrintOBJ_SKFIRE__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800785B4, "PrintOBJ_LEVER__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800785E0, "PrintOBJ_CHEST1__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x8007860C, "PrintOBJ_CHEST2__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078638, "PrintOBJ_CHEST3__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078664, "PrintOBJ_CANDLE1__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078688, "PrintOBJ_CANDLE2__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800786AC, "PrintOBJ_CANDLEO__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800786D8, "PrintOBJ_BANNERL__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078704, "PrintOBJ_BANNERM__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078730, "PrintOBJ_BANNERR__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x8007875C, "PrintOBJ_SKPILE__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078788, "PrintOBJ_SKSTICK1__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800787B4, "PrintOBJ_SKSTICK2__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800787E0, "PrintOBJ_SKSTICK3__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x8007880C, "PrintOBJ_SKSTICK4__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078838, "PrintOBJ_SKSTICK5__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078864, "PrintOBJ_CRUX1__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078890, "PrintOBJ_CRUX2__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800788BC, "PrintOBJ_CRUX3__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800788E8, "PrintOBJ_STAND__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078914, "PrintOBJ_ANGEL__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078940, "PrintOBJ_BOOK2L__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x8007896C, "PrintOBJ_BCROSS__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078998, "PrintOBJ_NUDEW2R__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800789C4, "PrintOBJ_SWITCHSKL__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800789F0, "PrintOBJ_TNUDEM1__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078A1C, "PrintOBJ_TNUDEM2__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078A48, "PrintOBJ_TNUDEM3__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078A74, "PrintOBJ_TNUDEM4__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078AA0, "PrintOBJ_TNUDEW1__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078ACC, "PrintOBJ_TNUDEW2__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078AF8, "PrintOBJ_TNUDEW3__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078B24, "PrintOBJ_TORTURE1__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078B50, "PrintOBJ_TORTURE2__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078B7C, "PrintOBJ_TORTURE3__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078BA8, "PrintOBJ_TORTURE4__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078BD4, "PrintOBJ_TORTURE5__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078C00, "PrintOBJ_BOOK2R__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078C2C, "PrintTorchStick__Fiiii", SN_NOWARN)
set_name(0x80078CC0, "PrintOBJ_TORCHL__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078D50, "PrintOBJ_TORCHR__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078DE0, "PrintOBJ_TORCHL2__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078E70, "PrintOBJ_TORCHR2__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078F00, "PrintOBJ_SARC__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078F2C, "PrintOBJ_FLAMEHOLE__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078F58, "PrintOBJ_FLAMELVR__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078F84, "PrintOBJ_WATER__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078FB0, "PrintOBJ_BOOKLVR__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80078FDC, "PrintOBJ_TRAPL__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079008, "PrintOBJ_TRAPR__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079034, "PrintOBJ_BOOKSHELF__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079060, "PrintOBJ_WEAPRACK__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x8007908C, "PrintOBJ_BARREL__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800790B8, "PrintOBJ_BARRELEX__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079210, "PrintOBJ_SHRINEL__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800792DC, "PrintOBJ_SHRINER__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800793A8, "PrintOBJ_SKELBOOK__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800793D4, "PrintOBJ_BOOKCASEL__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079400, "PrintOBJ_BOOKCASER__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x8007942C, "PrintOBJ_BOOKSTAND__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079458, "PrintOBJ_BOOKCANDLE__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x8007947C, "PrintOBJ_BLOODFTN__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800794A8, "PrintOBJ_DECAP__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800794D4, "PrintOBJ_TCHEST1__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079500, "PrintOBJ_TCHEST2__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x8007952C, "PrintOBJ_TCHEST3__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079558, "PrintOBJ_BLINDBOOK__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079584, "PrintOBJ_BLOODBOOK__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800795B0, "PrintOBJ_PEDISTAL__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800795DC, "PrintOBJ_PURIFYINGFTN__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079608, "PrintOBJ_ARMORSTAND__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079634, "PrintOBJ_ARMORSTANDN__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079660, "PrintOBJ_GOATSHRINE__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x8007968C, "PrintOBJ_CAULDRON__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800796B8, "PrintOBJ_MURKYFTN__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800796E4, "PrintOBJ_TEARFTN__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079710, "PrintOBJ_ALTBOY__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x8007973C, "PrintOBJ_MCIRCLE1__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x800798D0, "PrintOBJ_STORYBOOK__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079A58, "PrintOBJ_STORYCANDLE__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079A7C, "PrintOBJ_STEELTOME__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079AA8, "PrintOBJ_WARARMOR__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079AD4, "PrintOBJ_WARWEAP__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079B00, "PrintOBJ_TBCROSS__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079B2C, "PrintOBJ_WEAPONRACK__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079B58, "PrintOBJ_WEAPONRACKN__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079B84, "PrintOBJ_MUSHPATCH__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079BB0, "PrintOBJ_LAZSTAND__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079BDC, "PrintOBJ_SLAINHERO__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079C08, "PrintOBJ_SIGNCHEST__FP12ObjectStructiiP7TextDati", SN_NOWARN)
set_name(0x80079C34, "PRIM_GetCopy__FP8POLY_FT4_addr_80079C34", SN_NOWARN)
set_name(0x80079C70, "PRIM_CopyPrim__FP8POLY_FT4T0_addr_80079C70", SN_NOWARN)
set_name(0x80079C98, "PRIM_GetPrim__FPP8POLY_FT4_addr_80079C98", SN_NOWARN)
set_name(0x80079D14, "GetBlockTexDat__7CBlocks_addr_80079D14", SN_NOWARN)
set_name(0x80079D20, "GetNumOfFrames__7TextDatii_addr_80079D20", SN_NOWARN)
set_name(0x80079D58, "GetCreature__7TextDati_addr_80079D58", SN_NOWARN)
set_name(0x80079DD0, "GetNumOfCreatures__7TextDat_addr_80079DD0", SN_NOWARN)
set_name(0x80079DE4, "GetFr__7TextDati_addr_80079DE4", SN_NOWARN)
set_name(0x80079E00, "gamemenu_on__Fv", SN_NOWARN)
set_name(0x80079E08, "gamemenu_off__Fv", SN_NOWARN)
set_name(0x80079E10, "LoadPalette__FPCc", SN_NOWARN)
set_name(0x80079E18, "LoadRndLvlPal__Fi", SN_NOWARN)
set_name(0x80079E20, "ResetPal__Fv", SN_NOWARN)
set_name(0x80079E28, "SetFadeLevel__Fi", SN_NOWARN)
set_name(0x80079E58, "GetFadeState__Fv", SN_NOWARN)
set_name(0x80079E64, "SetPolyXY__FP8POLY_GT4PUc", SN_NOWARN)
set_name(0x80079F80, "SmearScreen__Fv", SN_NOWARN)
set_name(0x80079F88, "DrawFadedScreen__Fv", SN_NOWARN)
set_name(0x80079FDC, "BlackPalette__Fv", SN_NOWARN)
set_name(0x8007A098, "PaletteFadeInTask__FP4TASK", SN_NOWARN)
set_name(0x8007A128, "PaletteFadeIn__Fi", SN_NOWARN)
set_name(0x8007A180, "PaletteFadeOutTask__FP4TASK", SN_NOWARN)
set_name(0x8007A230, "PaletteFadeOut__Fi", SN_NOWARN)
set_name(0x8007A284, "M_CheckEFlag__Fi", SN_NOWARN)
set_name(0x8007A2A4, "M_ClearSquares__Fi", SN_NOWARN)
set_name(0x8007A410, "IsSkel__Fi", SN_NOWARN)
set_name(0x8007A44C, "NewMonsterAnim__FiR10AnimStructii", SN_NOWARN)
set_name(0x8007A498, "M_Ranged__Fi", SN_NOWARN)
set_name(0x8007A4E0, "M_Talker__Fi", SN_NOWARN)
set_name(0x8007A540, "M_Enemy__Fi", SN_NOWARN)
set_name(0x8007AAB4, "ClearMVars__Fi", SN_NOWARN)
set_name(0x8007AB28, "InitMonster__Fiiiii", SN_NOWARN)
set_name(0x8007AF74, "AddMonster__FiiiiUc", SN_NOWARN)
set_name(0x8007B024, "M_StartStand__Fii", SN_NOWARN)
set_name(0x8007B168, "M_UpdateLeader__Fi", SN_NOWARN)
set_name(0x8007B260, "ActivateSpawn__Fiiii", SN_NOWARN)
set_name(0x8007B308, "SpawnSkeleton__Fiii", SN_NOWARN)
set_name(0x8007B4F8, "M_StartSpStand__Fii", SN_NOWARN)
set_name(0x8007B5D8, "PosOkMonst__Fiii", SN_NOWARN)
set_name(0x8007B854, "CanPut__Fii", SN_NOWARN)
set_name(0x8007BB5C, "GetAutomapType__FiiUc", SN_NOWARN)
set_name(0x8007BE58, "SetAutomapView__Fii", SN_NOWARN)
set_name(0x8007C2A8, "lAddMissile__Fiiici", SN_NOWARN)
set_name(0x8007C47C, "AddWarpMissile__Fiii", SN_NOWARN)
set_name(0x8007C5D8, "SyncPortals__Fv", SN_NOWARN)
set_name(0x8007C6E0, "AddInTownPortal__Fi", SN_NOWARN)
set_name(0x8007C718, "ActivatePortal__FiiiiiUc", SN_NOWARN)
set_name(0x8007C788, "DeactivatePortal__Fi", SN_NOWARN)
set_name(0x8007C7A8, "PortalOnLevel__Fi", SN_NOWARN)
set_name(0x8007C7E0, "DelMis__Fii", SN_NOWARN)
set_name(0x8007C840, "RemovePortalMissile__Fi", SN_NOWARN)
set_name(0x8007C9BC, "SetCurrentPortal__Fi", SN_NOWARN)
set_name(0x8007C9C8, "GetPortalLevel__Fv", SN_NOWARN)
set_name(0x8007CB6C, "GetPortalLvlPos__Fv", SN_NOWARN)
set_name(0x8007CC1C, "__13CompLevelMapsRC9CompClass", SN_NOWARN)
set_name(0x8007CC88, "___13CompLevelMaps", SN_NOWARN)
set_name(0x8007CD18, "Init__13CompLevelMaps", SN_NOWARN)
set_name(0x8007CD48, "InitAllMaps__13CompLevelMaps", SN_NOWARN)
set_name(0x8007CD9C, "GetMap__13CompLevelMapsi", SN_NOWARN)
set_name(0x8007CE18, "ReleaseMap__13CompLevelMapsP6DLevel", SN_NOWARN)
set_name(0x8007CEB8, "CompressAll__13CompLevelMaps", SN_NOWARN)
set_name(0x8007CF2C, "ImportData__13CompLevelMapsP14CompressedLevs", SN_NOWARN)
set_name(0x8007CFD8, "ExportData__13CompLevelMapsPUc", SN_NOWARN)
set_name(0x8007D084, "MakeSureMapXDecomped__13CompLevelMapsi", SN_NOWARN)
set_name(0x8007D130, "Init__4AMap", SN_NOWARN)
set_name(0x8007D19C, "WriteCompressed__4AMapPUcRC9CompClass", SN_NOWARN)
set_name(0x8007D210, "SetCompData__4AMapPCUci", SN_NOWARN)
set_name(0x8007D300, "GetMap__4AMap", SN_NOWARN)
set_name(0x8007D420, "ReleaseMap__4AMapP6DLevel", SN_NOWARN)
set_name(0x8007D4B0, "CompressMap__4AMapRC9CompClass", SN_NOWARN)
set_name(0x8007D648, "DecompressMap__4AMapRC9CompClass", SN_NOWARN)
set_name(0x8007D77C, "CheckMapNum__13CompLevelMapsi", SN_NOWARN)
set_name(0x8007D7B0, "IsCompressed__4AMap", SN_NOWARN)
set_name(0x8007D7BC, "___4AMap", SN_NOWARN)
set_name(0x8007D804, "__4AMap", SN_NOWARN)
set_name(0x8007D838, "GO_DoGameOver__Fv", SN_NOWARN)
set_name(0x8007D87C, "GameOverTask__FP4TASK", SN_NOWARN)
set_name(0x8007D938, "PrintGameOver__Fv", SN_NOWARN)
set_name(0x8007DA18, "SetRGB__6DialogUcUcUc_addr_8007DA18", SN_NOWARN)
set_name(0x8007DA38, "SetBack__6Dialogi_addr_8007DA38", SN_NOWARN)
set_name(0x8007DA40, "SetBorder__6Dialogi_addr_8007DA40", SN_NOWARN)
set_name(0x8007DA48, "___6Dialog_addr_8007DA48", SN_NOWARN)
set_name(0x8007DA70, "__6Dialog_addr_8007DA70", SN_NOWARN)
set_name(0x8007DACC, "VER_InitVersion__Fv", SN_NOWARN)
set_name(0x8007DB10, "VER_GetVerString__Fv", SN_NOWARN)
set_name(0x8007DB20, "CharPair2Num__FPc", SN_NOWARN)
set_name(0x8007DB48, "FindGetItem__FiUsi", SN_NOWARN)
set_name(0x8001FCA4, "TICK_InitModule", SN_NOWARN)
set_name(0x8001FCC4, "TICK_Set", SN_NOWARN)
set_name(0x8001FCD4, "TICK_Get", SN_NOWARN)
set_name(0x8001FCE4, "TICK_Update", SN_NOWARN)
set_name(0x8001FD04, "TICK_GetAge", SN_NOWARN)
set_name(0x8001FD30, "TICK_GetDateString", SN_NOWARN)
set_name(0x8001FD40, "TICK_GetTimeString", SN_NOWARN)
set_name(0x8001FD50, "GU_InitModule", SN_NOWARN)
set_name(0x8001FD7C, "GU_SetRndSeed", SN_NOWARN)
set_name(0x8001FDAC, "GU_GetRnd", SN_NOWARN)
set_name(0x8001FE3C, "GU_GetSRnd", SN_NOWARN)
set_name(0x8001FE5C, "GU_GetRndRange", SN_NOWARN)
set_name(0x8001FE98, "GU_AlignVal", SN_NOWARN)
set_name(0x8001FEBC, "main", SN_NOWARN)
set_name(0x8001FF0C, "GTIMSYS_GetTimer", SN_NOWARN)
set_name(0x8001FF30, "GTIMSYS_ResetTimer", SN_NOWARN)
set_name(0x8001FF54, "GTIMSYS_InitTimer", SN_NOWARN)
set_name(0x80020188, "DoEpi", SN_NOWARN)
set_name(0x800201D8, "DoPro", SN_NOWARN)
set_name(0x80020228, "TSK_OpenModule", SN_NOWARN)
set_name(0x8002029C, "TSK_AddTask", SN_NOWARN)
set_name(0x80020484, "TSK_DoTasks", SN_NOWARN)
set_name(0x80020644, "TSK_Sleep", SN_NOWARN)
set_name(0x80020720, "ReturnToSchedulerIfCurrentTask", SN_NOWARN)
set_name(0x800207A8, "TSK_Die", SN_NOWARN)
set_name(0x800207D4, "TSK_Kill", SN_NOWARN)
set_name(0x80020824, "TSK_GetFirstActive", SN_NOWARN)
set_name(0x80020834, "TSK_IsStackCorrupted", SN_NOWARN)
set_name(0x800208B0, "TSK_JumpAndResetStack", SN_NOWARN)
set_name(0x800208F8, "TSK_RepointProc", SN_NOWARN)
set_name(0x8002093C, "TSK_GetCurrentTask", SN_NOWARN)
set_name(0x8002094C, "TSK_IsCurrentTask", SN_NOWARN)
set_name(0x80020964, "TSK_Exist", SN_NOWARN)
set_name(0x800209BC, "TSK_SetExecFilter", SN_NOWARN)
set_name(0x800209D4, "TSK_ClearExecFilter", SN_NOWARN)
set_name(0x800209F8, "TSK_KillTasks", SN_NOWARN)
set_name(0x80020AF8, "TSK_IterateTasks", SN_NOWARN)
set_name(0x80020B70, "TSK_MakeTaskInactive", SN_NOWARN)
set_name(0x80020B84, "TSK_MakeTaskActive", SN_NOWARN)
set_name(0x80020B98, "TSK_MakeTaskImmortal", SN_NOWARN)
set_name(0x80020BAC, "TSK_MakeTaskMortal", SN_NOWARN)
set_name(0x80020BC0, "TSK_IsTaskActive", SN_NOWARN)
set_name(0x80020BD4, "TSK_IsTaskMortal", SN_NOWARN)
set_name(0x80020BE8, "DetachFromList", SN_NOWARN)
set_name(0x80020C34, "AddToList", SN_NOWARN)
set_name(0x80020C54, "LoTskKill", SN_NOWARN)
set_name(0x80020CC4, "ExecuteTask", SN_NOWARN)
set_name(0x80020D14, "TSK_SetDoTasksPrologue", SN_NOWARN)
set_name(0x80020D2C, "TSK_SetDoTasksEpilogue", SN_NOWARN)
set_name(0x80020D44, "TSK_SetTaskPrologue", SN_NOWARN)
set_name(0x80020D5C, "TSK_SetTaskEpilogue", SN_NOWARN)
set_name(0x80020D74, "TSK_SetEpiProFilter", SN_NOWARN)
set_name(0x80020D8C, "TSK_ClearEpiProFilter", SN_NOWARN)
set_name(0x80020DC0, "TSK_SetExtraStackProtection", SN_NOWARN)
set_name(0x80020DD0, "TSK_SetStackFloodCallback", SN_NOWARN)
set_name(0x80020DE8, "TSK_SetExtraStackSize", SN_NOWARN)
set_name(0x80020E10, "ExtraMarkStack", SN_NOWARN)
set_name(0x80020E3C, "CheckExtraStack", SN_NOWARN)
set_name(0x80020E78, "GSYS_GetWorkMemInfo", SN_NOWARN)
set_name(0x80020E88, "GSYS_SetStackAndJump", SN_NOWARN)
set_name(0x80020EC4, "GSYS_MarkStack", SN_NOWARN)
set_name(0x80020ED4, "GSYS_IsStackCorrupted", SN_NOWARN)
set_name(0x80020EEC, "GSYS_InitMachine", SN_NOWARN)
set_name(0x80020F40, "GSYS_CheckPtr", SN_NOWARN)
set_name(0x80020F74, "GSYS_IsStackOutOfBounds", SN_NOWARN)
set_name(0x80020FF0, "GAL_SetErrorChecking", SN_NOWARN)
set_name(0x80021000, "GAL_SplitBlock", SN_NOWARN)
set_name(0x8002113C, "GAL_InitModule", SN_NOWARN)
set_name(0x800211F4, "GAL_AddMemType", SN_NOWARN)
set_name(0x80021314, "GAL_Alloc", SN_NOWARN)
set_name(0x800214AC, "GAL_Lock", SN_NOWARN)
set_name(0x80021514, "GAL_Unlock", SN_NOWARN)
set_name(0x80021598, "GAL_Free", SN_NOWARN)
set_name(0x80021640, "GAL_GetFreeMem", SN_NOWARN)
set_name(0x800216B4, "GAL_GetUsedMem", SN_NOWARN)
set_name(0x80021728, "GAL_LargestFreeBlock", SN_NOWARN)
set_name(0x800217A4, "AttachHdrToList", SN_NOWARN)
set_name(0x800217C4, "DetachHdrFromList", SN_NOWARN)
set_name(0x80021810, "IsActiveValidHandle", SN_NOWARN)
set_name(0x80021848, "AlignPtr", SN_NOWARN)
set_name(0x80021878, "AlignSize", SN_NOWARN)
set_name(0x800218A8, "FindClosestSizedBlock", SN_NOWARN)
set_name(0x80021900, "FindHighestMemBlock", SN_NOWARN)
set_name(0x80021968, "FindLowestMemBlock", SN_NOWARN)
set_name(0x800219D0, "GetMemInitInfoBlockFromType", SN_NOWARN)
set_name(0x80021A0C, "MergeToEmptyList", SN_NOWARN)
set_name(0x80021AE0, "GAL_AllocAt", SN_NOWARN)
set_name(0x80021BBC, "LoAlloc", SN_NOWARN)
set_name(0x80021D54, "FindBlockInTheseBounds", SN_NOWARN)
set_name(0x80021DC0, "GetFreeMemHdrBlock", SN_NOWARN)
set_name(0x80021E48, "ReleaseMemHdrBlock", SN_NOWARN)
set_name(0x80021E88, "GAL_IterateEmptyMem", SN_NOWARN)
set_name(0x80021F0C, "GAL_IterateUsedMem", SN_NOWARN)
set_name(0x80021FA8, "GAL_SetMemName", SN_NOWARN)
set_name(0x80022018, "GAL_TotalMem", SN_NOWARN)
set_name(0x8002206C, "GAL_MemBase", SN_NOWARN)
set_name(0x800220C0, "GAL_DefragMem", SN_NOWARN)
set_name(0x80022144, "GSetError", SN_NOWARN)
set_name(0x800221A0, "GAL_CheckMem", SN_NOWARN)
set_name(0x8002229C, "CheckCollisions", SN_NOWARN)
set_name(0x80022348, "AreBlocksColliding", SN_NOWARN)
set_name(0x800223A0, "GAL_GetErrorText", SN_NOWARN)
set_name(0x800223D0, "GAL_GetLastErrorCode", SN_NOWARN)
set_name(0x800223E0, "GAL_GetLastErrorText", SN_NOWARN)
set_name(0x80022408, "GAL_HowManyEmptyRegions", SN_NOWARN)
set_name(0x80022470, "GAL_HowManyUsedRegions", SN_NOWARN)
set_name(0x800224D8, "GAL_SetTimeStamp", SN_NOWARN)
set_name(0x800224E8, "GAL_IncTimeStamp", SN_NOWARN)
set_name(0x80022508, "GAL_GetTimeStamp", SN_NOWARN)
set_name(0x80022518, "GAL_AlignSizeToType", SN_NOWARN)
set_name(0x80022568, "GAL_AllocMultiStruct", SN_NOWARN)
set_name(0x800225B8, "GAL_ProcessMultiStruct", SN_NOWARN)
set_name(0x80022664, "GAL_GetSize", SN_NOWARN)
set_name(0x800226C0, "GazDefragMem", SN_NOWARN)
set_name(0x80022828, "PutBlocksInRegionIntoList", SN_NOWARN)
set_name(0x800228CC, "CollideRegions", SN_NOWARN)
set_name(0x80022900, "DeleteEmptyBlocks", SN_NOWARN)
set_name(0x8002296C, "GetRegion", SN_NOWARN)
set_name(0x80022A64, "FindNextBlock", SN_NOWARN)
set_name(0x80022AA0, "ShuffleBlocks", SN_NOWARN)
set_name(0x80022B30, "PutAllLockedBlocksOntoList", SN_NOWARN)
set_name(0x80022BAC, "SortMemHdrListByAddr", SN_NOWARN)
set_name(0x80022C60, "GraftMemHdrList", SN_NOWARN)
set_name(0x80022CBC, "GAL_MemDump", SN_NOWARN)
set_name(0x80022D30, "GAL_SetVerbosity", SN_NOWARN)
set_name(0x80022D40, "CountFreeBlocks", SN_NOWARN)
set_name(0x80022D6C, "SetBlockName", SN_NOWARN)
set_name(0x80022DB4, "GAL_GetNumFreeHeaders", SN_NOWARN)
set_name(0x80022DC4, "GAL_GetLastTypeAlloced", SN_NOWARN)
set_name(0x80022DD4, "GAL_SetAllocFilter", SN_NOWARN)
set_name(0x80022DEC, "GAL_SortUsedRegionsBySize", SN_NOWARN)
set_name(0x80022E40, "SortSize", SN_NOWARN)
set_name(0x80022E50, "SortMemHdrList", SN_NOWARN)
set_name(0x80022F40, "DBG_OpenModule", SN_NOWARN)
set_name(0x80022F48, "DBG_PollHost", SN_NOWARN)
set_name(0x80022F50, "DBG_Halt", SN_NOWARN)
set_name(0x80022F58, "DBG_SendMessage", SN_NOWARN)
set_name(0x80022F70, "DBG_SetMessageHandler", SN_NOWARN)
set_name(0x80022F80, "DBG_Error", SN_NOWARN)
set_name(0x80022FB4, "DBG_SetErrorFunc", SN_NOWARN)
set_name(0x80022FC4, "SendPsyqString", SN_NOWARN)
set_name(0x80022FCC, "DBG_SetPollRoutine", SN_NOWARN)
set_name(0x800252A8, "vsprintf", SN_NOWARN)
set_name(0x800252F4, "_doprnt", SN_NOWARN)
set_name(0x8011C058, "NumOfMonsterListLevels", SN_NOWARN)
set_name(0x800ABBF4, "AllLevels", SN_NOWARN)
set_name(0x8011BD3C, "NumsLEV1M1A", SN_NOWARN)
set_name(0x8011BD40, "NumsLEV1M1B", SN_NOWARN)
set_name(0x8011BD44, "NumsLEV1M1C", SN_NOWARN)
set_name(0x8011BD4C, "NumsLEV2M2A", SN_NOWARN)
set_name(0x8011BD50, "NumsLEV2M2B", SN_NOWARN)
set_name(0x8011BD54, "NumsLEV2M2C", SN_NOWARN)
set_name(0x8011BD58, "NumsLEV2M2D", SN_NOWARN)
set_name(0x8011BD5C, "NumsLEV2M2QA", SN_NOWARN)
set_name(0x8011BD60, "NumsLEV2M2QB", SN_NOWARN)
set_name(0x8011BD64, "NumsLEV3M3A", SN_NOWARN)
set_name(0x8011BD68, "NumsLEV3M3B", SN_NOWARN)
set_name(0x8011BD6C, "NumsLEV3M3C", SN_NOWARN)
set_name(0x8011BD70, "NumsLEV3M3QA", SN_NOWARN)
set_name(0x8011BD74, "NumsLEV4M4A", SN_NOWARN)
set_name(0x8011BD78, "NumsLEV4M4B", SN_NOWARN)
set_name(0x8011BD7C, "NumsLEV4M4C", SN_NOWARN)
set_name(0x8011BD80, "NumsLEV4M4D", SN_NOWARN)
set_name(0x8011BD84, "NumsLEV4M4QA", SN_NOWARN)
set_name(0x8011BD88, "NumsLEV4M4QB", SN_NOWARN)
set_name(0x8011BD90, "NumsLEV4M4QC", SN_NOWARN)
set_name(0x8011BD98, "NumsLEV5M5A", SN_NOWARN)
set_name(0x8011BD9C, "NumsLEV5M5B", SN_NOWARN)
set_name(0x8011BDA0, "NumsLEV5M5C", SN_NOWARN)
set_name(0x8011BDA4, "NumsLEV5M5D", SN_NOWARN)
set_name(0x8011BDA8, "NumsLEV5M5E", SN_NOWARN)
set_name(0x8011BDAC, "NumsLEV5M5F", SN_NOWARN)
set_name(0x8011BDB0, "NumsLEV5M5QA", SN_NOWARN)
set_name(0x8011BDB4, "NumsLEV6M6A", SN_NOWARN)
set_name(0x8011BDBC, "NumsLEV6M6B", SN_NOWARN)
set_name(0x8011BDC0, "NumsLEV6M6C", SN_NOWARN)
set_name(0x8011BDC4, "NumsLEV6M6D", SN_NOWARN)
set_name(0x8011BDC8, "NumsLEV6M6E", SN_NOWARN)
set_name(0x8011BDCC, "NumsLEV6M6QA", SN_NOWARN)
set_name(0x8011BDD0, "NumsLEV7M7A", SN_NOWARN)
set_name(0x8011BDD4, "NumsLEV7M7B", SN_NOWARN)
set_name(0x8011BDD8, "NumsLEV7M7C", SN_NOWARN)
set_name(0x8011BDDC, "NumsLEV7M7D", SN_NOWARN)
set_name(0x8011BDE0, "NumsLEV7M7E", SN_NOWARN)
set_name(0x8011BDE4, "NumsLEV8M8A", SN_NOWARN)
set_name(0x8011BDE8, "NumsLEV8M8B", SN_NOWARN)
set_name(0x8011BDEC, "NumsLEV8M8C", SN_NOWARN)
set_name(0x8011BDF0, "NumsLEV8M8D", SN_NOWARN)
set_name(0x8011BDF4, "NumsLEV8M8E", SN_NOWARN)
set_name(0x8011BDF8, "NumsLEV8M8QA", SN_NOWARN)
set_name(0x8011BDFC, "NumsLEV9M9A", SN_NOWARN)
set_name(0x8011BE00, "NumsLEV9M9B", SN_NOWARN)
set_name(0x8011BE04, "NumsLEV9M9C", SN_NOWARN)
set_name(0x8011BE08, "NumsLEV9M9D", SN_NOWARN)
set_name(0x8011BE0C, "NumsLEV10M10A", SN_NOWARN)
set_name(0x8011BE10, "NumsLEV10M10B", SN_NOWARN)
set_name(0x8011BE14, "NumsLEV10M10C", SN_NOWARN)
set_name(0x8011BE18, "NumsLEV10M10D", SN_NOWARN)
set_name(0x8011BE1C, "NumsLEV10M10QA", SN_NOWARN)
set_name(0x8011BE20, "NumsLEV11M11A", SN_NOWARN)
set_name(0x8011BE24, "NumsLEV11M11B", SN_NOWARN)
set_name(0x8011BE28, "NumsLEV11M11C", SN_NOWARN)
set_name(0x8011BE2C, "NumsLEV11M11D", SN_NOWARN)
set_name(0x8011BE30, "NumsLEV11M11E", SN_NOWARN)
set_name(0x8011BE34, "NumsLEV12M12A", SN_NOWARN)
set_name(0x8011BE38, "NumsLEV12M12B", SN_NOWARN)
set_name(0x8011BE3C, "NumsLEV12M12C", SN_NOWARN)
set_name(0x8011BE40, "NumsLEV12M12D", SN_NOWARN)
set_name(0x8011BE44, "NumsLEV13M13A", SN_NOWARN)
set_name(0x8011BE48, "NumsLEV13M13B", SN_NOWARN)
set_name(0x8011BE4C, "NumsLEV13M13C", SN_NOWARN)
set_name(0x8011BE50, "NumsLEV13M13D", SN_NOWARN)
set_name(0x8011BE54, "NumsLEV13M13QB", SN_NOWARN)
set_name(0x8011BE58, "NumsLEV14M14A", SN_NOWARN)
set_name(0x8011BE5C, "NumsLEV14M14B", SN_NOWARN)
set_name(0x8011BE60, "NumsLEV14M14C", SN_NOWARN)
set_name(0x8011BE64, "NumsLEV14M14D", SN_NOWARN)
set_name(0x8011BE68, "NumsLEV14M14E", SN_NOWARN)
set_name(0x8011BE6C, "NumsLEV14M14QB", SN_NOWARN)
set_name(0x8011BE70, "NumsLEV15M15A", SN_NOWARN)
set_name(0x8011BE74, "NumsLEV15M15B", SN_NOWARN)
set_name(0x8011BE78, "NumsLEV15M15C", SN_NOWARN)
set_name(0x8011BE7C, "NumsLEV15M15QA", SN_NOWARN)
set_name(0x8011BE80, "NumsLEV16M16D", SN_NOWARN)
set_name(0x800AB714, "ChoiceListLEV1", SN_NOWARN)
set_name(0x800AB744, "ChoiceListLEV2", SN_NOWARN)
set_name(0x800AB7A4, "ChoiceListLEV3", SN_NOWARN)
set_name(0x800AB7E4, "ChoiceListLEV4", SN_NOWARN)
set_name(0x800AB854, "ChoiceListLEV5", SN_NOWARN)
set_name(0x800AB8C4, "ChoiceListLEV6", SN_NOWARN)
set_name(0x800AB924, "ChoiceListLEV7", SN_NOWARN)
set_name(0x800AB974, "ChoiceListLEV8", SN_NOWARN)
set_name(0x800AB9D4, "ChoiceListLEV9", SN_NOWARN)
set_name(0x800ABA14, "ChoiceListLEV10", SN_NOWARN)
set_name(0x800ABA64, "ChoiceListLEV11", SN_NOWARN)
set_name(0x800ABAB4, "ChoiceListLEV12", SN_NOWARN)
set_name(0x800ABAF4, "ChoiceListLEV13", SN_NOWARN)
set_name(0x800ABB44, "ChoiceListLEV14", SN_NOWARN)
set_name(0x800ABBA4, "ChoiceListLEV15", SN_NOWARN)
set_name(0x800ABBE4, "ChoiceListLEV16", SN_NOWARN)
set_name(0x8011D910, "GameTaskPtr", SN_NOWARN)
set_name(0x800ABC74, "AllArgs", SN_NOWARN)
set_name(0x8011C068, "ArgsSoFar", SN_NOWARN)
set_name(0x8011C078, "ThisOt", SN_NOWARN)
set_name(0x8011C07C, "ThisPrimAddr", SN_NOWARN)
set_name(0x8011D914, "hndPrimBuffers", SN_NOWARN)
set_name(0x8011D918, "PrimBuffers", SN_NOWARN)
set_name(0x8011D91C, "BufferDepth", SN_NOWARN)
set_name(0x8011D91D, "WorkRamId", SN_NOWARN)
set_name(0x8011D91E, "ScrNum", SN_NOWARN)
set_name(0x8011D920, "Screens", SN_NOWARN)
set_name(0x8011D924, "PbToClear", SN_NOWARN)
set_name(0x8011D928, "BufferNum", SN_NOWARN)
set_name(0x8011C080, "AddrToAvoid", SN_NOWARN)
set_name(0x8011D929, "LastBuffer", SN_NOWARN)
set_name(0x8011D92C, "DispEnvToPut", SN_NOWARN)
set_name(0x8011D930, "ThisOtSize", SN_NOWARN)
set_name(0x8011C084, "ScrRect", SN_NOWARN)
set_name(0x8011D934, "VidWait", SN_NOWARN)
set_name(0x8011DDB0, "screen", SN_NOWARN)
set_name(0x8011D938, "VbFunc", SN_NOWARN)
set_name(0x8011D93C, "VidTick", SN_NOWARN)
set_name(0x8011D940, "VXOff", SN_NOWARN)
set_name(0x8011D944, "VYOff", SN_NOWARN)
set_name(0x8011C098, "Gaz", SN_NOWARN)
set_name(0x8011C09C, "LastFmem", SN_NOWARN)
set_name(0x8011C08C, "GSYS_MemStart", SN_NOWARN)
set_name(0x8011C090, "GSYS_MemEnd", SN_NOWARN)
set_name(0x800ABFBC, "PsxMem", SN_NOWARN)
set_name(0x800ABFE4, "PsxFastMem", SN_NOWARN)
set_name(0x8011C094, "LowestFmem", SN_NOWARN)
set_name(0x8011C0AC, "FileSYS", SN_NOWARN)
set_name(0x8011D948, "FileSystem", SN_NOWARN)
set_name(0x8011D94C, "OverlayFileSystem", SN_NOWARN)
set_name(0x8011C0C6, "DavesPad", SN_NOWARN)
set_name(0x8011C0C8, "DavesPadDeb", SN_NOWARN)
set_name(0x800AC00C, "_6FileIO_FileToLoad", SN_NOWARN)
set_name(0x8011DE90, "MyFT4", SN_NOWARN)
set_name(0x800AC8B0, "AllDats", SN_NOWARN)
set_name(0x8011C118, "TpW", SN_NOWARN)
set_name(0x8011C11C, "TpH", SN_NOWARN)
set_name(0x8011C120, "TpXDest", SN_NOWARN)
set_name(0x8011C124, "TpYDest", SN_NOWARN)
set_name(0x800ACE6C, "MyGT4", SN_NOWARN)
set_name(0x800ACEA0, "MyGT3", SN_NOWARN)
set_name(0x800AC040, "DatPool", SN_NOWARN)
set_name(0x8011C134, "ChunkGot", SN_NOWARN)
set_name(0x800ACEC8, "STREAM_DIR", SN_NOWARN)
set_name(0x800ACED8, "STREAM_BIN", SN_NOWARN)
set_name(0x800ACEE8, "EAC_DirectoryCache", SN_NOWARN)
set_name(0x8011C148, "BL_NoLumpFiles", SN_NOWARN)
set_name(0x8011C14C, "BL_NoStreamFiles", SN_NOWARN)
set_name(0x8011C150, "LFileTab", SN_NOWARN)
set_name(0x8011C154, "SFileTab", SN_NOWARN)
set_name(0x8011C158, "FileLoaded", SN_NOWARN)
set_name(0x8011C17C, "NoTAllocs", SN_NOWARN)
set_name(0x800AD078, "MemBlock", SN_NOWARN)
set_name(0x8011D958, "CanPause", SN_NOWARN)
set_name(0x8011D95C, "Paused", SN_NOWARN)
set_name(0x8011D960, "InActivePad", SN_NOWARN)
set_name(0x8011DEB8, "PBack", SN_NOWARN)
set_name(0x800AD2E0, "RawPadData0", SN_NOWARN)
set_name(0x800AD304, "RawPadData1", SN_NOWARN)
set_name(0x800AD328, "demo_buffer", SN_NOWARN)
set_name(0x8011C198, "demo_pad_time", SN_NOWARN)
set_name(0x8011C19C, "demo_pad_count", SN_NOWARN)
set_name(0x800AD208, "Pad0", SN_NOWARN)
set_name(0x800AD274, "Pad1", SN_NOWARN)
set_name(0x8011C1A0, "demo_finish", SN_NOWARN)
set_name(0x8011C1A4, "cac_pad", SN_NOWARN)
set_name(0x8011C1C4, "CharFt4", SN_NOWARN)
set_name(0x8011C1C8, "CharFrm", SN_NOWARN)
set_name(0x8011C1B1, "WHITER", SN_NOWARN)
set_name(0x8011C1B2, "WHITEG", SN_NOWARN)
set_name(0x8011C1B3, "WHITEB", SN_NOWARN)
set_name(0x8011C1B4, "BLUER", SN_NOWARN)
set_name(0x8011C1B5, "BLUEG", SN_NOWARN)
set_name(0x8011C1B6, "BLUEB", SN_NOWARN)
set_name(0x8011C1B7, "REDR", SN_NOWARN)
set_name(0x8011C1B8, "REDG", SN_NOWARN)
set_name(0x8011C1B9, "REDB", SN_NOWARN)
set_name(0x8011C1BA, "GOLDR", SN_NOWARN)
set_name(0x8011C1BB, "GOLDG", SN_NOWARN)
set_name(0x8011C1BC, "GOLDB", SN_NOWARN)
set_name(0x800AD6AC, "MediumFont", SN_NOWARN)
set_name(0x800AD8C8, "LargeFont", SN_NOWARN)
set_name(0x8011C1C0, "buttoncol", SN_NOWARN)
set_name(0x800ADAE4, "LFontTab", SN_NOWARN)
set_name(0x800ADC4C, "LFont", SN_NOWARN)
set_name(0x800ADC5C, "MFontTab", SN_NOWARN)
set_name(0x800ADEC8, "MFont", SN_NOWARN)
set_name(0x8011C1DD, "DialogRed", SN_NOWARN)
set_name(0x8011C1DE, "DialogGreen", SN_NOWARN)
set_name(0x8011C1DF, "DialogBlue", SN_NOWARN)
set_name(0x8011C1E0, "DialogTRed", SN_NOWARN)
set_name(0x8011C1E1, "DialogTGreen", SN_NOWARN)
set_name(0x8011C1E2, "DialogTBlue", SN_NOWARN)
set_name(0x8011C1E4, "DialogTData", SN_NOWARN)
set_name(0x8011C1E8, "DialogBackGfx", SN_NOWARN)
set_name(0x8011C1EC, "DialogBackW", SN_NOWARN)
set_name(0x8011C1F0, "DialogBackH", SN_NOWARN)
set_name(0x8011C1F4, "DialogBorderGfx", SN_NOWARN)
set_name(0x8011C1F8, "DialogBorderTLW", SN_NOWARN)
set_name(0x8011C1FC, "DialogBorderTLH", SN_NOWARN)
set_name(0x8011C200, "DialogBorderTRW", SN_NOWARN)
set_name(0x8011C204, "DialogBorderTRH", SN_NOWARN)
set_name(0x8011C208, "DialogBorderBLW", SN_NOWARN)
set_name(0x8011C20C, "DialogBorderBLH", SN_NOWARN)
set_name(0x8011C210, "DialogBorderBRW", SN_NOWARN)
set_name(0x8011C214, "DialogBorderBRH", SN_NOWARN)
set_name(0x8011C218, "DialogBorderTW", SN_NOWARN)
set_name(0x8011C21C, "DialogBorderTH", SN_NOWARN)
set_name(0x8011C220, "DialogBorderBW", SN_NOWARN)
set_name(0x8011C224, "DialogBorderBH", SN_NOWARN)
set_name(0x8011C228, "DialogBorderLW", SN_NOWARN)
set_name(0x8011C22C, "DialogBorderLH", SN_NOWARN)
set_name(0x8011C230, "DialogBorderRW", SN_NOWARN)
set_name(0x8011C234, "DialogBorderRH", SN_NOWARN)
set_name(0x8011C238, "DialogBevelGfx", SN_NOWARN)
set_name(0x8011C23C, "DialogBevelCW", SN_NOWARN)
set_name(0x8011C240, "DialogBevelCH", SN_NOWARN)
set_name(0x8011C244, "DialogBevelLRW", SN_NOWARN)
set_name(0x8011C248, "DialogBevelLRH", SN_NOWARN)
set_name(0x8011C24C, "DialogBevelUDW", SN_NOWARN)
set_name(0x8011C250, "DialogBevelUDH", SN_NOWARN)
set_name(0x8011C254, "MY_DialogOTpos", SN_NOWARN)
set_name(0x8011D964, "DialogGBack", SN_NOWARN)
set_name(0x8011D965, "GShadeX", SN_NOWARN)
set_name(0x8011D966, "GShadeY", SN_NOWARN)
set_name(0x8011D96C, "RandBTab", SN_NOWARN)
set_name(0x800ADF18, "Cxy", SN_NOWARN)
set_name(0x8011C1D7, "BORDERR", SN_NOWARN)
set_name(0x8011C1D8, "BORDERG", SN_NOWARN)
set_name(0x8011C1D9, "BORDERB", SN_NOWARN)
set_name(0x8011C1DA, "BACKR", SN_NOWARN)
set_name(0x8011C1DB, "BACKG", SN_NOWARN)
set_name(0x8011C1DC, "BACKB", SN_NOWARN)
set_name(0x800ADED8, "GShadeTab", SN_NOWARN)
set_name(0x8011C1D5, "GShadePX", SN_NOWARN)
set_name(0x8011C1D6, "GShadePY", SN_NOWARN)
set_name(0x8011C261, "PlayDemoFlag", SN_NOWARN)
set_name(0x8011DEC8, "rgbb", SN_NOWARN)
set_name(0x8011DEF8, "rgbt", SN_NOWARN)
set_name(0x8011D974, "blockr", SN_NOWARN)
set_name(0x8011D978, "blockg", SN_NOWARN)
set_name(0x8011D97C, "blockb", SN_NOWARN)
set_name(0x8011D980, "InfraFlag", SN_NOWARN)
set_name(0x8011D984, "blank_bit", SN_NOWARN)
set_name(0x8011C275, "P1ObjSelCount", SN_NOWARN)
set_name(0x8011C276, "P2ObjSelCount", SN_NOWARN)
set_name(0x8011C277, "P12ObjSelCount", SN_NOWARN)
set_name(0x8011C278, "P1ItemSelCount", SN_NOWARN)
set_name(0x8011C279, "P2ItemSelCount", SN_NOWARN)
set_name(0x8011C27A, "P12ItemSelCount", SN_NOWARN)
set_name(0x8011C27B, "P1MonstSelCount", SN_NOWARN)
set_name(0x8011C27C, "P2MonstSelCount", SN_NOWARN)
set_name(0x8011C27D, "P12MonstSelCount", SN_NOWARN)
set_name(0x8011C27E, "P1ObjSelCol", SN_NOWARN)
set_name(0x8011C280, "P2ObjSelCol", SN_NOWARN)
set_name(0x8011C282, "P12ObjSelCol", SN_NOWARN)
set_name(0x8011C284, "P1ItemSelCol", SN_NOWARN)
set_name(0x8011C286, "P2ItemSelCol", SN_NOWARN)
set_name(0x8011C288, "P12ItemSelCol", SN_NOWARN)
set_name(0x8011C28A, "P1MonstSelCol", SN_NOWARN)
set_name(0x8011C28C, "P2MonstSelCol", SN_NOWARN)
set_name(0x8011C28E, "P12MonstSelCol", SN_NOWARN)
set_name(0x8011C290, "CurrentBlocks", SN_NOWARN)
set_name(0x800ADF88, "TownConv", SN_NOWARN)
set_name(0x8011C2AC, "CurrentOverlay", SN_NOWARN)
set_name(0x8011197C, "HaltTab", SN_NOWARN)
set_name(0x8011DF28, "FrontEndOver", SN_NOWARN)
set_name(0x8011DF38, "PregameOver", SN_NOWARN)
set_name(0x8011DF48, "GameOver", SN_NOWARN)
set_name(0x8011DF58, "FmvOver", SN_NOWARN)
set_name(0x8011D988, "OWorldX", SN_NOWARN)
set_name(0x8011D98C, "OWorldY", SN_NOWARN)
set_name(0x8011D990, "WWorldX", SN_NOWARN)
set_name(0x8011D994, "WWorldY", SN_NOWARN)
set_name(0x801119F8, "TxyAdd", SN_NOWARN)
set_name(0x8011C2D0, "GXAdj2", SN_NOWARN)
set_name(0x8011D998, "TimePerFrame", SN_NOWARN)
set_name(0x8011D99C, "CpuStart", SN_NOWARN)
set_name(0x8011D9A0, "CpuTime", SN_NOWARN)
set_name(0x8011D9A4, "DrawTime", SN_NOWARN)
set_name(0x8011D9A8, "DrawStart", SN_NOWARN)
set_name(0x8011D9AC, "LastCpuTime", SN_NOWARN)
set_name(0x8011D9B0, "LastDrawTime", SN_NOWARN)
set_name(0x8011D9B4, "DrawArea", SN_NOWARN)
set_name(0x8011C2D8, "ProfOn", SN_NOWARN)
set_name(0x800ADF9C, "LevPals", SN_NOWARN)
set_name(0x80111B54, "Level2Bgdata", SN_NOWARN)
set_name(0x8011C2F4, "CutScreen", SN_NOWARN)
set_name(0x800ADFB0, "DefP1PanelXY", SN_NOWARN)
set_name(0x800AE004, "DefP1PanelXY2", SN_NOWARN)
set_name(0x800AE058, "DefP2PanelXY", SN_NOWARN)
set_name(0x800AE0AC, "DefP2PanelXY2", SN_NOWARN)
set_name(0x800AE100, "SpeedBarGfxTable", SN_NOWARN)
set_name(0x8011C304, "hof", SN_NOWARN)
set_name(0x8011C308, "mof", SN_NOWARN)
set_name(0x800AE1C8, "SFXTab", SN_NOWARN)
set_name(0x800AE2C8, "STR_Buffer", SN_NOWARN)
set_name(0x8011C33C, "Time", SN_NOWARN)
set_name(0x8011C340, "CDWAIT", SN_NOWARN)
set_name(0x800C02C8, "voice_attr", SN_NOWARN)
set_name(0x800C0308, "STRSave", SN_NOWARN)
set_name(0x8011D9B8, "SavePause", SN_NOWARN)
set_name(0x8011C315, "NoActiveStreams", SN_NOWARN)
set_name(0x8011C318, "STRInit", SN_NOWARN)
set_name(0x8011C31C, "frame_rate", SN_NOWARN)
set_name(0x8011C320, "CDAngle", SN_NOWARN)
set_name(0x8011C360, "SFXNotPlayed", SN_NOWARN)
set_name(0x8011C361, "SFXNotInBank", SN_NOWARN)
set_name(0x8011DF68, "spu_management", SN_NOWARN)
set_name(0x8011E078, "rev_attr", SN_NOWARN)
set_name(0x8011D9C0, "NoSfx", SN_NOWARN)
set_name(0x8011E098, "CHStatus", SN_NOWARN)
set_name(0x8011C350, "BankOffsets", SN_NOWARN)
set_name(0x8011C354, "OffsetHandle", SN_NOWARN)
set_name(0x8011C358, "BankBase", SN_NOWARN)
set_name(0x80111F54, "SFXRemapTab", SN_NOWARN)
set_name(0x8011C35C, "NoSNDRemaps", SN_NOWARN)
set_name(0x800C0388, "ThePals", SN_NOWARN)
set_name(0x80112000, "InitialPositions", SN_NOWARN)
set_name(0x8011C3A4, "demo_level", SN_NOWARN)
set_name(0x8011E0C8, "buff", SN_NOWARN)
set_name(0x8011C3A8, "old_val", SN_NOWARN)
set_name(0x8011C3AC, "DemoTask", SN_NOWARN)
set_name(0x8011C3B0, "DemoGameTask", SN_NOWARN)
set_name(0x8011C3B4, "tonys", SN_NOWARN)
set_name(0x8011C38C, "demo_load", SN_NOWARN)
set_name(0x8011C390, "demo_record_load", SN_NOWARN)
set_name(0x8011C394, "level_record", SN_NOWARN)
set_name(0x8011C398, "demo_fade_finished", SN_NOWARN)
set_name(0x8011C39B, "demo_which", SN_NOWARN)
set_name(0x800C05B4, "demolevel", SN_NOWARN)
set_name(0x8011C399, "quest_cheat_num", SN_NOWARN)
set_name(0x8011C39A, "cheat_quest_flag", SN_NOWARN)
set_name(0x8011C388, "moo_moo", SN_NOWARN)
set_name(0x800C0574, "quest_seed", SN_NOWARN)
set_name(0x8011C39C, "demo_flash", SN_NOWARN)
set_name(0x8011C3A0, "tonys_Task", SN_NOWARN)
set_name(0x8011C510, "DoShowPanel", SN_NOWARN)
set_name(0x8011C514, "DoDrawBg", SN_NOWARN)
set_name(0x8011D9C4, "GlueFinished", SN_NOWARN)
set_name(0x8011D9C8, "DoHomingScroll", SN_NOWARN)
set_name(0x8011D9CC, "TownerGfx", SN_NOWARN)
set_name(0x8011D9D0, "CurrentMonsterList", SN_NOWARN)
set_name(0x800C05C8, "PlayerInfo", SN_NOWARN)
set_name(0x8011C518, "ArmourChar", SN_NOWARN)
set_name(0x801120F4, "WepChar", SN_NOWARN)
set_name(0x8011C51C, "CharChar", SN_NOWARN)
set_name(0x8011D9D4, "ctrl_select_line", SN_NOWARN)
set_name(0x8011D9D5, "ctrl_select_side", SN_NOWARN)
set_name(0x8011D9D6, "ckeyheld", SN_NOWARN)
set_name(0x8011D9DC, "CtrlRect", SN_NOWARN)
set_name(0x8011C530, "ctrlflag", SN_NOWARN)
set_name(0x800C0A3C, "txt_actions", SN_NOWARN)
set_name(0x800C0994, "pad_txt", SN_NOWARN)
set_name(0x8011C52C, "toppos", SN_NOWARN)
set_name(0x8011E0E8, "CtrlBack", SN_NOWARN)
set_name(0x800C0B6C, "controller_defaults", SN_NOWARN)
set_name(0x8011C59C, "gr_scrxoff", SN_NOWARN)
set_name(0x8011C5A0, "gr_scryoff", SN_NOWARN)
set_name(0x8011C5A8, "water_clut", SN_NOWARN)
set_name(0x8011C5AB, "visible_level", SN_NOWARN)
set_name(0x8011C599, "last_type", SN_NOWARN)
set_name(0x8011C5AD, "daylight", SN_NOWARN)
set_name(0x8011C5AA, "cow_in_sight", SN_NOWARN)
set_name(0x8011C5A4, "water_count", SN_NOWARN)
set_name(0x8011C5AC, "lastrnd", SN_NOWARN)
set_name(0x8011C5B0, "call_clock", SN_NOWARN)
set_name(0x8011C5C0, "TitleAnimCount", SN_NOWARN)
set_name(0x8011C5C4, "flametick", SN_NOWARN)
set_name(0x800C0C04, "ypos", SN_NOWARN)
set_name(0x800C0C1C, "frmlist", SN_NOWARN)
set_name(0x800C0C34, "xoff", SN_NOWARN)
set_name(0x8011C5C8, "startx", SN_NOWARN)
set_name(0x8011C5CC, "jaffflag", SN_NOWARN)
set_name(0x800C0C6C, "SpellFXDat", SN_NOWARN)
set_name(0x8011E0F8, "PartArray", SN_NOWARN)
set_name(0x8011D9E4, "partOtPos", SN_NOWARN)
set_name(0x8011C5EC, "SetParticle", SN_NOWARN)
set_name(0x8011C5F0, "p1partexecnum", SN_NOWARN)
set_name(0x8011C5F4, "p2partexecnum", SN_NOWARN)
set_name(0x800C0C4C, "JumpArray", SN_NOWARN)
set_name(0x8011C5F8, "partjumpflag", SN_NOWARN)
set_name(0x8011C5FC, "partglowflag", SN_NOWARN)
set_name(0x8011C600, "partcolour", SN_NOWARN)
set_name(0x8011C604, "anyfuckingmenus", SN_NOWARN)
set_name(0x800C0CFC, "SplTarget", SN_NOWARN)
set_name(0x8011C625, "select_flag", SN_NOWARN)
set_name(0x8011D9E8, "SelectRect", SN_NOWARN)
set_name(0x8011D9F0, "item_select", SN_NOWARN)
set_name(0x8011C628, "QSpell", SN_NOWARN)
set_name(0x8011C62C, "_spltotype", SN_NOWARN)
set_name(0x8011C630, "force_attack", SN_NOWARN)
set_name(0x8011C618, "gplayer", SN_NOWARN)
set_name(0x8011E338, "SelectBack", SN_NOWARN)
set_name(0x8011C61C, "mana_order", SN_NOWARN)
set_name(0x8011C620, "health_order", SN_NOWARN)
set_name(0x8011C624, "birdcheck", SN_NOWARN)
set_name(0x8011E348, "DecRequestors", SN_NOWARN)
set_name(0x8011D9F4, "progress", SN_NOWARN)
set_name(0x80112284, "Level2CutScreen", SN_NOWARN)
set_name(0x8011E370, "Scr", SN_NOWARN)
set_name(0x8011C650, "CutScreenTSK", SN_NOWARN)
set_name(0x8011C654, "GameLoading", SN_NOWARN)
set_name(0x8011E3F0, "LBack", SN_NOWARN)
set_name(0x800C0D2C, "block_buf", SN_NOWARN)
set_name(0x8011C670, "card_ev0", SN_NOWARN)
set_name(0x8011C674, "card_ev1", SN_NOWARN)
set_name(0x8011C678, "card_ev2", SN_NOWARN)
set_name(0x8011C67C, "card_ev3", SN_NOWARN)
set_name(0x8011C680, "card_ev10", SN_NOWARN)
set_name(0x8011C684, "card_ev11", SN_NOWARN)
set_name(0x8011C688, "card_ev12", SN_NOWARN)
set_name(0x8011C68C, "card_ev13", SN_NOWARN)
set_name(0x8011C690, "card_dirty", SN_NOWARN)
set_name(0x8011C698, "MemcardTask", SN_NOWARN)
set_name(0x8011D9F8, "card_event", SN_NOWARN)
set_name(0x8011C66C, "mem_card_event_handler", SN_NOWARN)
set_name(0x8011C664, "MemCardActive", SN_NOWARN)
set_name(0x8011C668, "never_hooked_events", SN_NOWARN)
set_name(0x8011C6F4, "MasterVol", SN_NOWARN)
set_name(0x8011C6F8, "MusicVol", SN_NOWARN)
set_name(0x8011C6FC, "SoundVol", SN_NOWARN)
set_name(0x8011C700, "VideoVol", SN_NOWARN)
set_name(0x8011C704, "SpeechVol", SN_NOWARN)
set_name(0x8011D9FC, "Slider", SN_NOWARN)
set_name(0x8011DA00, "sw", SN_NOWARN)
set_name(0x8011DA04, "sx", SN_NOWARN)
set_name(0x8011DA08, "sy", SN_NOWARN)
set_name(0x8011DA0C, "Adjust", SN_NOWARN)
set_name(0x8011DA0D, "qspin", SN_NOWARN)
set_name(0x8011DA0E, "lqspin", SN_NOWARN)
set_name(0x8011DA10, "OrigLang", SN_NOWARN)
set_name(0x8011DA14, "OldLang", SN_NOWARN)
set_name(0x8011DA18, "NewLang", SN_NOWARN)
set_name(0x8011C708, "save_blocks", SN_NOWARN)
set_name(0x8011C70C, "Savefilename", SN_NOWARN)
set_name(0x8011C710, "ReturnMenu", SN_NOWARN)
set_name(0x8011DA1C, "ORect", SN_NOWARN)
set_name(0x8011DA24, "McState", SN_NOWARN)
set_name(0x8011C714, "they_pressed", SN_NOWARN)
set_name(0x8011DA2C, "Seed", SN_NOWARN)
set_name(0x8011C6C8, "optionsflag", SN_NOWARN)
set_name(0x8011C6BC, "cmenu", SN_NOWARN)
set_name(0x8011C6D4, "options_pad", SN_NOWARN)
set_name(0x8011C6C4, "allspellsflag", SN_NOWARN)
set_name(0x800C189C, "Circle", SN_NOWARN)
set_name(0x8011C6A8, "goldcheat", SN_NOWARN)
set_name(0x8011C6D8, "OptionsSeed", SN_NOWARN)
set_name(0x8011C6DC, "OptionsSetSeed", SN_NOWARN)
set_name(0x8011C6AC, "Qfromoptions", SN_NOWARN)
set_name(0x8011C6B0, "Spacing", SN_NOWARN)
set_name(0x8011C6B4, "cs", SN_NOWARN)
set_name(0x8011C6B8, "lastcs", SN_NOWARN)
set_name(0x8011C6C0, "MemcardOverlay", SN_NOWARN)
set_name(0x8011C6CC, "saveflag", SN_NOWARN)
set_name(0x8011C6D0, "loadflag", SN_NOWARN)
set_name(0x8011C6E0, "PadFrig", SN_NOWARN)
set_name(0x800C0DAC, "MainMenu", SN_NOWARN)
set_name(0x800C0E84, "GameMenu", SN_NOWARN)
set_name(0x800C0F8C, "SoundMenu", SN_NOWARN)
set_name(0x800C101C, "CentreMenu", SN_NOWARN)
set_name(0x800C10C4, "LangMenu", SN_NOWARN)
set_name(0x800C1154, "QuitMenu", SN_NOWARN)
set_name(0x800C11B4, "AreYouSureMenu", SN_NOWARN)
set_name(0x800C1214, "MemcardMenu", SN_NOWARN)
set_name(0x800C12BC, "MemcardLoadGameMenu", SN_NOWARN)
set_name(0x800C131C, "MemcardSaveGameMenu", SN_NOWARN)
set_name(0x800C137C, "MemcardSaveOptionsMenu", SN_NOWARN)
set_name(0x800C13DC, "MemcardLoadOptionsMenu", SN_NOWARN)
set_name(0x800C143C, "MemcardCharacterMenu", SN_NOWARN)
set_name(0x800C149C, "MemcardSelectCard1", SN_NOWARN)
set_name(0x800C1544, "MemcardSelectCard2", SN_NOWARN)
set_name(0x800C15EC, "MemcardFormatMenu", SN_NOWARN)
set_name(0x800C164C, "CheatMenu", SN_NOWARN)
set_name(0x800C173C, "InfoMenu", SN_NOWARN)
set_name(0x800C176C, "MonstViewMenu", SN_NOWARN)
set_name(0x800C17B4, "SeedMenu", SN_NOWARN)
set_name(0x800C17FC, "MenuList", SN_NOWARN)
set_name(0x8011C6E4, "debounce", SN_NOWARN)
set_name(0x8011C6E8, "KeyPos", SN_NOWARN)
set_name(0x800C191C, "KeyTab", SN_NOWARN)
set_name(0x8011C6EC, "SeedPos", SN_NOWARN)
set_name(0x800C1930, "BirdList", SN_NOWARN)
set_name(0x8011DA34, "last_seenx", SN_NOWARN)
set_name(0x8011DA3C, "last_seeny", SN_NOWARN)
set_name(0x8011C721, "hop_height", SN_NOWARN)
set_name(0x8011C724, "perches", SN_NOWARN)
set_name(0x800C1AB0, "FmvTab", SN_NOWARN)
set_name(0x8011C764, "indsize", SN_NOWARN)
set_name(0x8011C744, "kanjbuff", SN_NOWARN)
set_name(0x8011C748, "kindex", SN_NOWARN)
set_name(0x8011C74C, "hndKanjBuff", SN_NOWARN)
set_name(0x8011C750, "hndKanjIndex", SN_NOWARN)
set_name(0x8011DA44, "HelpRect", SN_NOWARN)
set_name(0x8011DA4C, "HelpTop", SN_NOWARN)
set_name(0x8011E400, "HelpBack", SN_NOWARN)
set_name(0x8011C774, "helpflag", SN_NOWARN)
set_name(0x800C1AF0, "HelpList", SN_NOWARN)
set_name(0x8011C7C8, "FeBackX", SN_NOWARN)
set_name(0x8011C7CC, "FeBackY", SN_NOWARN)
set_name(0x8011C7D0, "FeBackW", SN_NOWARN)
set_name(0x8011C7D4, "FeBackH", SN_NOWARN)
set_name(0x8011C7D8, "FeFlag", SN_NOWARN)
set_name(0x800C20F8, "FeBuffer", SN_NOWARN)
set_name(0x8011C7DC, "FePlayerNo", SN_NOWARN)
set_name(0x8011DA50, "CStruct", SN_NOWARN)
set_name(0x8011C7E0, "FeBufferCount", SN_NOWARN)
set_name(0x8011C7E4, "FeNoOfPlayers", SN_NOWARN)
set_name(0x8011C7E8, "FeNoOfPads", SN_NOWARN)
set_name(0x8011C7EC, "FeChrClass", SN_NOWARN)
set_name(0x800C2878, "FePlayerName", SN_NOWARN)
set_name(0x8011C7F4, "FeCurMenu", SN_NOWARN)
set_name(0x8011C7F8, "FePlayerNameFlag", SN_NOWARN)
set_name(0x8011C7FC, "FeCount", SN_NOWARN)
set_name(0x8011C800, "fileselect", SN_NOWARN)
set_name(0x8011C804, "BookMenu", SN_NOWARN)
set_name(0x8011C808, "FeAttractMode", SN_NOWARN)
set_name(0x8011C80C, "FMVPress", SN_NOWARN)
set_name(0x8011C790, "FeTData", SN_NOWARN)
set_name(0x8011C7A8, "JustQuitQText", SN_NOWARN)
set_name(0x8011C798, "LoadedChar", SN_NOWARN)
set_name(0x8011C794, "FlameTData", SN_NOWARN)
set_name(0x8011C7A0, "FeIsAVirgin", SN_NOWARN)
set_name(0x8011C7A4, "FeMenuDelay", SN_NOWARN)
set_name(0x800C1BF8, "DummyMenu", SN_NOWARN)
set_name(0x800C1C14, "FeMainMenu", SN_NOWARN)
set_name(0x800C1C30, "FeNewGameMenu", SN_NOWARN)
set_name(0x800C1C4C, "FeNewP1ClassMenu", SN_NOWARN)
set_name(0x800C1C68, "FeNewP1NameMenu", SN_NOWARN)
set_name(0x800C1C84, "FeNewP2ClassMenu", SN_NOWARN)
set_name(0x800C1CA0, "FeNewP2NameMenu", SN_NOWARN)
set_name(0x800C1CBC, "FeDifficultyMenu", SN_NOWARN)
set_name(0x800C1CD8, "FeBackgroundMenu", SN_NOWARN)
set_name(0x800C1CF4, "FeBook1Menu", SN_NOWARN)
set_name(0x800C1D10, "FeBook2Menu", SN_NOWARN)
set_name(0x800C1D2C, "FeLoadCharMenu", SN_NOWARN)
set_name(0x800C1D48, "FeLoadChar1Menu", SN_NOWARN)
set_name(0x800C1D64, "FeLoadChar2Menu", SN_NOWARN)
set_name(0x8011C7AC, "fadeval", SN_NOWARN)
set_name(0x800C1D80, "FeMainMenuTable", SN_NOWARN)
set_name(0x800C1DF8, "FeNewGameMenuTable", SN_NOWARN)
set_name(0x800C1E40, "FePlayerClassMenuTable", SN_NOWARN)
set_name(0x800C1EB8, "FeNameEngMenuTable", SN_NOWARN)
set_name(0x800C1F00, "FeMemcardMenuTable", SN_NOWARN)
set_name(0x800C1F48, "FeDifficultyMenuTable", SN_NOWARN)
set_name(0x800C1FA8, "FeBackgroundMenuTable", SN_NOWARN)
set_name(0x800C2008, "FeBook1MenuTable", SN_NOWARN)
set_name(0x800C2080, "FeBook2MenuTable", SN_NOWARN)
set_name(0x8011C7B8, "DrawBackOn", SN_NOWARN)
set_name(0x8011C7BC, "AttractTitleDelay", SN_NOWARN)
set_name(0x8011C7C0, "AttractMainDelay", SN_NOWARN)
set_name(0x8011C7C4, "FMVEndPad", SN_NOWARN)
set_name(0x8011C840, "InCredits", SN_NOWARN)
set_name(0x8011C844, "CreditTitleNo", SN_NOWARN)
set_name(0x8011C848, "CreditSubTitleNo", SN_NOWARN)
set_name(0x8011C85C, "card_status", SN_NOWARN)
set_name(0x8011C864, "card_usable", SN_NOWARN)
set_name(0x8011C86C, "card_files", SN_NOWARN)
set_name(0x8011C874, "card_changed", SN_NOWARN)
set_name(0x8011C8B8, "AlertTxt", SN_NOWARN)
set_name(0x8011C8BC, "current_card", SN_NOWARN)
set_name(0x8011C8C0, "DoLoadedGame", SN_NOWARN)
set_name(0x8011C8C4, "LoadType", SN_NOWARN)
set_name(0x8011C8C8, "McMenuPos", SN_NOWARN)
set_name(0x8011C8CC, "McCurMenu", SN_NOWARN)
set_name(0x8011C8B4, "fileinfoflag", SN_NOWARN)
set_name(0x8011C888, "DiabloGameFile", SN_NOWARN)
set_name(0x8011C88C, "DiabloOptionFile", SN_NOWARN)
set_name(0x8011C8AC, "McState_addr_8011C8AC", SN_NOWARN)
set_name(0x8011C9A4, "mdec_audio_buffer", SN_NOWARN)
set_name(0x8011C9AC, "mdec_audio_sec", SN_NOWARN)
set_name(0x8011C9B0, "mdec_audio_offs", SN_NOWARN)
set_name(0x8011C9B4, "mdec_audio_playing", SN_NOWARN)
set_name(0x8011C9B8, "mdec_audio_rate_shift", SN_NOWARN)
set_name(0x8011C9BC, "vlcbuf", SN_NOWARN)
set_name(0x8011C9C4, "slice_size", SN_NOWARN)
set_name(0x8011C9C8, "slice", SN_NOWARN)
set_name(0x8011C9D0, "slice_inc", SN_NOWARN)
set_name(0x8011C9D4, "area_pw", SN_NOWARN)
set_name(0x8011C9D8, "area_ph", SN_NOWARN)
set_name(0x8011C9DC, "tmdc_pol_dirty", SN_NOWARN)
set_name(0x8011C9E0, "num_pol", SN_NOWARN)
set_name(0x8011C9E8, "mdec_cx", SN_NOWARN)
set_name(0x8011C9EC, "mdec_cy", SN_NOWARN)
set_name(0x8011C9F0, "mdec_w", SN_NOWARN)
set_name(0x8011C9F4, "mdec_h", SN_NOWARN)
set_name(0x8011C9F8, "mdec_pw", SN_NOWARN)
set_name(0x8011CA00, "mdec_ph", SN_NOWARN)
set_name(0x8011CA08, "move_x", SN_NOWARN)
set_name(0x8011CA0C, "move_y", SN_NOWARN)
set_name(0x8011CA10, "move_scale", SN_NOWARN)
set_name(0x8011CA14, "stream_frames", SN_NOWARN)
set_name(0x8011CA18, "last_stream_frame", SN_NOWARN)
set_name(0x8011CA1C, "mdec_framecount", SN_NOWARN)
set_name(0x8011CA20, "mdec_speed", SN_NOWARN)
set_name(0x8011CA24, "mdec_stream_starting", SN_NOWARN)
set_name(0x8011CA28, "mdec_last_frame", SN_NOWARN)
set_name(0x8011CA2C, "mdec_sectors_per_frame", SN_NOWARN)
set_name(0x8011CA30, "vlctab", SN_NOWARN)
set_name(0x8011CA34, "mdc_buftop", SN_NOWARN)
set_name(0x8011CA38, "mdc_bufstart", SN_NOWARN)
set_name(0x8011CA3C, "mdc_bufleft", SN_NOWARN)
set_name(0x8011CA40, "mdc_buftotal", SN_NOWARN)
set_name(0x8011CA44, "ordertab_length", SN_NOWARN)
set_name(0x8011CA48, "time_in_frames", SN_NOWARN)
set_name(0x8011CA4C, "stream_chunksize", SN_NOWARN)
set_name(0x8011CA50, "stream_bufsize", SN_NOWARN)
set_name(0x8011CA54, "stream_subsec", SN_NOWARN)
set_name(0x8011CA58, "stream_secnum", SN_NOWARN)
set_name(0x8011CA5C, "stream_last_sector", SN_NOWARN)
set_name(0x8011CA60, "stream_startsec", SN_NOWARN)
set_name(0x8011CA64, "stream_opened", SN_NOWARN)
set_name(0x8011CA68, "stream_last_chunk", SN_NOWARN)
set_name(0x8011CA6C, "stream_got_chunks", SN_NOWARN)
set_name(0x8011CA70, "last_sector", SN_NOWARN)
set_name(0x8011CA74, "cdstream_resetsec", SN_NOWARN)
set_name(0x8011CA78, "last_handler_event", SN_NOWARN)
set_name(0x8011C944, "user_start", SN_NOWARN)
set_name(0x8011C8DC, "vlc_tab", SN_NOWARN)
set_name(0x8011C8E0, "vlc_buf", SN_NOWARN)
set_name(0x8011C8E4, "img_buf", SN_NOWARN)
set_name(0x8011C8E8, "vbuf", SN_NOWARN)
set_name(0x8011C8EC, "last_fn", SN_NOWARN)
set_name(0x8011C8F0, "last_mdc", SN_NOWARN)
set_name(0x8011C8F4, "slnum", SN_NOWARN)
set_name(0x8011C8F8, "slices_to_do", SN_NOWARN)
set_name(0x8011C8FC, "mbuf", SN_NOWARN)
set_name(0x8011C900, "mfn", SN_NOWARN)
set_name(0x8011C904, "last_move_mbuf", SN_NOWARN)
set_name(0x8011C908, "move_request", SN_NOWARN)
set_name(0x8011C90C, "mdec_scale", SN_NOWARN)
set_name(0x8011C910, "do_brightness", SN_NOWARN)
set_name(0x8011C914, "frame_decoded", SN_NOWARN)
set_name(0x8011C918, "mdec_streaming", SN_NOWARN)
set_name(0x8011C91C, "mdec_stream_size", SN_NOWARN)
set_name(0x8011C920, "first_stream_frame", SN_NOWARN)
set_name(0x8011C924, "stream_frames_played", SN_NOWARN)
set_name(0x8011C928, "num_mdcs", SN_NOWARN)
set_name(0x8011C92C, "mdec_head", SN_NOWARN)
set_name(0x8011C930, "mdec_tail", SN_NOWARN)
set_name(0x8011C934, "mdec_waiting_tail", SN_NOWARN)
set_name(0x8011C938, "mdecs_queued", SN_NOWARN)
set_name(0x8011C93C, "mdecs_waiting", SN_NOWARN)
set_name(0x8011C940, "sfx_volume", SN_NOWARN)
set_name(0x8011C948, "DiabEnd", SN_NOWARN)
set_name(0x8011C94C, "FMVName", SN_NOWARN)
set_name(0x8011C950, "stream_buf", SN_NOWARN)
set_name(0x8011C954, "stream_bufh", SN_NOWARN)
set_name(0x8011C958, "stream_chunks_in", SN_NOWARN)
set_name(0x8011C95C, "stream_chunks_total", SN_NOWARN)
set_name(0x8011C960, "stream_in", SN_NOWARN)
set_name(0x8011C964, "stream_out", SN_NOWARN)
set_name(0x8011C968, "stream_stalled", SN_NOWARN)
set_name(0x8011C96C, "stream_ending", SN_NOWARN)
set_name(0x8011C970, "stream_open", SN_NOWARN)
set_name(0x8011C974, "stream_handler_installed", SN_NOWARN)
set_name(0x8011C978, "stream_chunks_borrowed", SN_NOWARN)
set_name(0x8011C97C, "_get_count", SN_NOWARN)
set_name(0x8011C980, "_discard_count", SN_NOWARN)
set_name(0x8011C984, "old_cdready_handler", SN_NOWARN)
set_name(0x8011C988, "cdready_calls", SN_NOWARN)
set_name(0x8011C98C, "cdready_errors", SN_NOWARN)
set_name(0x8011C990, "cdready_out_of_sync", SN_NOWARN)
set_name(0x8011C994, "cdstream_resetting", SN_NOWARN)
set_name(0x8011C998, "sector_dma", SN_NOWARN)
set_name(0x8011C99C, "sector_dma_in", SN_NOWARN)
set_name(0x8011C9A0, "first_handler_event", SN_NOWARN)
set_name(0x8011CAF8, "pStatusPanel", SN_NOWARN)
set_name(0x8011CAFC, "pGBoxBuff", SN_NOWARN)
set_name(0x8011CB00, "dropGoldFlag", SN_NOWARN)
set_name(0x8011CB04, "_pinfoflag", SN_NOWARN)
set_name(0x800C2D90, "_infostr", SN_NOWARN)
set_name(0x8011CB08, "_infoclr", SN_NOWARN)
set_name(0x800C2F90, "tempstr", SN_NOWARN)
set_name(0x8011CB0A, "drawhpflag", SN_NOWARN)
set_name(0x8011CB0B, "drawmanaflag", SN_NOWARN)
set_name(0x8011CB0C, "chrflag", SN_NOWARN)
set_name(0x8011CB0D, "drawbtnflag", SN_NOWARN)
set_name(0x8011CB0E, "panbtndown", SN_NOWARN)
set_name(0x8011CB0F, "panelflag", SN_NOWARN)
set_name(0x8011CB10, "chrbtndown", SN_NOWARN)
set_name(0x8011CB11, "lvlbtndown", SN_NOWARN)
set_name(0x8011CB12, "sbookflag", SN_NOWARN)
set_name(0x8011CB13, "talkflag", SN_NOWARN)
set_name(0x8011CB14, "dropGoldValue", SN_NOWARN)
set_name(0x8011CB18, "initialDropGoldValue", SN_NOWARN)
set_name(0x8011CB1C, "initialDropGoldIndex", SN_NOWARN)
set_name(0x8011CB20, "pPanelButtons", SN_NOWARN)
set_name(0x8011CB24, "pPanelText", SN_NOWARN)
set_name(0x8011CB28, "pManaBuff", SN_NOWARN)
set_name(0x8011CB2C, "pLifeBuff", SN_NOWARN)
set_name(0x8011CB30, "pChrPanel", SN_NOWARN)
set_name(0x8011CB34, "pChrButtons", SN_NOWARN)
set_name(0x8011CB38, "pSpellCels", SN_NOWARN)
set_name(0x8011E460, "_panelstr", SN_NOWARN)
set_name(0x8011E860, "_pstrjust", SN_NOWARN)
set_name(0x8011DA60, "_pnumlines", SN_NOWARN)
set_name(0x8011CB3C, "InfoBoxRect", SN_NOWARN)
set_name(0x8011CB40, "CSRect", SN_NOWARN)
set_name(0x8011DA70, "_pSpell", SN_NOWARN)
set_name(0x8011DA78, "_pSplType", SN_NOWARN)
set_name(0x8011CB48, "numpanbtns", SN_NOWARN)
set_name(0x8011CB4C, "pDurIcons", SN_NOWARN)
set_name(0x8011CB50, "drawdurflag", SN_NOWARN)
set_name(0x8011DA80, "chrbtn", SN_NOWARN)
set_name(0x8011CB51, "chrbtnactive", SN_NOWARN)
set_name(0x8011CB54, "pSpellBkCel", SN_NOWARN)
set_name(0x8011CB58, "pSBkBtnCel", SN_NOWARN)
set_name(0x8011CB5C, "pSBkIconCels", SN_NOWARN)
set_name(0x8011CB60, "sbooktab", SN_NOWARN)
set_name(0x8011CB64, "cur_spel", SN_NOWARN)
set_name(0x8011DA88, "talkofs", SN_NOWARN)
set_name(0x8011E8B0, "sgszTalkMsg", SN_NOWARN)
set_name(0x8011DA8C, "sgbTalkSavePos", SN_NOWARN)
set_name(0x8011DA8D, "sgbNextTalkSave", SN_NOWARN)
set_name(0x8011DA8E, "sgbPlrTalkTbl", SN_NOWARN)
set_name(0x8011DA90, "pTalkPanel", SN_NOWARN)
set_name(0x8011DA94, "pMultiBtns", SN_NOWARN)
set_name(0x8011DA98, "pTalkBtns", SN_NOWARN)
set_name(0x8011DA9C, "talkbtndown", SN_NOWARN)
set_name(0x800C28A4, "SpellITbl", SN_NOWARN)
set_name(0x8011CA85, "DrawLevelUpFlag", SN_NOWARN)
set_name(0x8011CAAC, "_spselflag", SN_NOWARN)
set_name(0x8011CAA8, "spspelstate", SN_NOWARN)
set_name(0x8011CAC8, "initchr", SN_NOWARN)
set_name(0x8011CA88, "SPLICONNO", SN_NOWARN)
set_name(0x8011CA8C, "SPLICONY", SN_NOWARN)
set_name(0x8011DA68, "SPLICONRIGHT", SN_NOWARN)
set_name(0x8011CA90, "scx", SN_NOWARN)
set_name(0x8011CA94, "scy", SN_NOWARN)
set_name(0x8011CA98, "scx1", SN_NOWARN)
set_name(0x8011CA9C, "scy1", SN_NOWARN)
set_name(0x8011CAA0, "scx2", SN_NOWARN)
set_name(0x8011CAA4, "scy2", SN_NOWARN)
set_name(0x8011CAB4, "SpellCol", SN_NOWARN)
set_name(0x800C2890, "SpellColors", SN_NOWARN)
set_name(0x800C28CC, "SpellPages", SN_NOWARN)
set_name(0x8011CAB8, "lus", SN_NOWARN)
set_name(0x8011CABC, "CsNo", SN_NOWARN)
set_name(0x8011CAC0, "plusanim", SN_NOWARN)
set_name(0x8011E8A0, "CSBack", SN_NOWARN)
set_name(0x8011CAC4, "CS_XOFF", SN_NOWARN)
set_name(0x800C2930, "CS_Tab", SN_NOWARN)
set_name(0x8011CACC, "NoCSEntries", SN_NOWARN)
set_name(0x8011CAD0, "SPALOFF", SN_NOWARN)
set_name(0x8011CAD4, "paloffset1", SN_NOWARN)
set_name(0x8011CAD8, "paloffset2", SN_NOWARN)
set_name(0x8011CADC, "paloffset3", SN_NOWARN)
set_name(0x8011CAE0, "paloffset4", SN_NOWARN)
set_name(0x8011CAE4, "pinc1", SN_NOWARN)
set_name(0x8011CAE8, "pinc2", SN_NOWARN)
set_name(0x8011CAEC, "pinc3", SN_NOWARN)
set_name(0x8011CAF0, "pinc4", SN_NOWARN)
set_name(0x8011CB78, "_pcurs", SN_NOWARN)
set_name(0x8011CB80, "cursW", SN_NOWARN)
set_name(0x8011CB84, "cursH", SN_NOWARN)
set_name(0x8011CB88, "icursW", SN_NOWARN)
set_name(0x8011CB8C, "icursH", SN_NOWARN)
set_name(0x8011CB90, "icursW28", SN_NOWARN)
set_name(0x8011CB94, "icursH28", SN_NOWARN)
set_name(0x8011CB98, "cursmx", SN_NOWARN)
set_name(0x8011CB9C, "cursmy", SN_NOWARN)
set_name(0x8011CBA0, "_pcursmonst", SN_NOWARN)
set_name(0x8011CBA8, "_pcursobj", SN_NOWARN)
set_name(0x8011CBAC, "_pcursitem", SN_NOWARN)
set_name(0x8011CBB0, "_pcursinvitem", SN_NOWARN)
set_name(0x8011CBB4, "_pcursplr", SN_NOWARN)
set_name(0x8011CB74, "sel_data", SN_NOWARN)
set_name(0x800C3090, "dead", SN_NOWARN)
set_name(0x8011CBB8, "spurtndx", SN_NOWARN)
set_name(0x8011CBBC, "stonendx", SN_NOWARN)
set_name(0x8011CBC0, "pSquareCel", SN_NOWARN)
set_name(0x8011CC20, "ghInst", SN_NOWARN)
set_name(0x8011CC24, "svgamode", SN_NOWARN)
set_name(0x8011CC28, "MouseX", SN_NOWARN)
set_name(0x8011CC2C, "MouseY", SN_NOWARN)
set_name(0x8011CC30, "gv1", SN_NOWARN)
set_name(0x8011CC34, "gv2", SN_NOWARN)
set_name(0x8011CC38, "gv3", SN_NOWARN)
set_name(0x8011CC3C, "gv4", SN_NOWARN)
set_name(0x8011CC40, "gv5", SN_NOWARN)
set_name(0x8011CC44, "gbProcessPlayers", SN_NOWARN)
set_name(0x800C3204, "DebugMonsters", SN_NOWARN)
set_name(0x800C322C, "pMegaTiles", SN_NOWARN)
set_name(0x800C3CDC, "glSeedTbl", SN_NOWARN)
set_name(0x800C3D20, "gnLevelTypeTbl", SN_NOWARN)
set_name(0x8011CC45, "gbDoEnding", SN_NOWARN)
set_name(0x8011CC46, "gbRunGame", SN_NOWARN)
set_name(0x8011CC47, "gbRunGameResult", SN_NOWARN)
set_name(0x8011CC48, "gbGameLoopStartup", SN_NOWARN)
set_name(0x8011E900, "glEndSeed", SN_NOWARN)
set_name(0x8011E950, "glMid1Seed", SN_NOWARN)
set_name(0x8011E9A0, "glMid2Seed", SN_NOWARN)
set_name(0x8011E9F0, "glMid3Seed", SN_NOWARN)
set_name(0x8011DAA0, "sg_previousFilter", SN_NOWARN)
set_name(0x800C3D64, "CreateEnv", SN_NOWARN)
set_name(0x8011CC4C, "Passedlvldir", SN_NOWARN)
set_name(0x8011CC50, "TempStack", SN_NOWARN)
set_name(0x8011CBD0, "ghMainWnd", SN_NOWARN)
set_name(0x8011CBD4, "fullscreen", SN_NOWARN)
set_name(0x8011CBD8, "force_redraw", SN_NOWARN)
set_name(0x8011CBEC, "PauseMode", SN_NOWARN)
set_name(0x8011CBED, "FriendlyMode", SN_NOWARN)
set_name(0x8011CBDD, "visiondebug", SN_NOWARN)
set_name(0x8011CBDF, "light4flag", SN_NOWARN)
set_name(0x8011CBE0, "leveldebug", SN_NOWARN)
set_name(0x8011CBE1, "monstdebug", SN_NOWARN)
set_name(0x8011CBE8, "debugmonsttypes", SN_NOWARN)
set_name(0x8011CBDC, "cineflag", SN_NOWARN)
set_name(0x8011CBDE, "scrollflag", SN_NOWARN)
set_name(0x8011CBE2, "trigdebug", SN_NOWARN)
set_name(0x8011CBE4, "setseed", SN_NOWARN)
set_name(0x8011CBF0, "sgnTimeoutCurs", SN_NOWARN)
set_name(0x8011CBF4, "sgbMouseDown", SN_NOWARN)
set_name(0x800C4430, "towner", SN_NOWARN)
set_name(0x8011CC68, "numtowners", SN_NOWARN)
set_name(0x8011CC6C, "storeflag", SN_NOWARN)
set_name(0x8011CC6D, "boyloadflag", SN_NOWARN)
set_name(0x8011CC6E, "bannerflag", SN_NOWARN)
set_name(0x8011CC70, "pCowCels", SN_NOWARN)
set_name(0x8011DAA4, "sgdwCowClicks", SN_NOWARN)
set_name(0x8011DAA8, "sgnCowMsg", SN_NOWARN)
set_name(0x800C4170, "Qtalklist", SN_NOWARN)
set_name(0x8011CC60, "CowPlaying", SN_NOWARN)
set_name(0x800C3D94, "AnimOrder", SN_NOWARN)
set_name(0x800C410C, "TownCowX", SN_NOWARN)
set_name(0x800C4118, "TownCowY", SN_NOWARN)
set_name(0x800C4124, "TownCowDir", SN_NOWARN)
set_name(0x800C4130, "cowoffx", SN_NOWARN)
set_name(0x800C4150, "cowoffy", SN_NOWARN)
set_name(0x8011CC88, "sfxdelay", SN_NOWARN)
set_name(0x8011CC8C, "sfxdnum", SN_NOWARN)
set_name(0x8011CC80, "sghStream", SN_NOWARN)
set_name(0x800C5230, "sgSFX", SN_NOWARN)
set_name(0x8011CC84, "sgpStreamSFX", SN_NOWARN)
set_name(0x8011CC90, "orgseed", SN_NOWARN)
set_name(0x8011DAAC, "sglGameSeed", SN_NOWARN)
set_name(0x8011CC94, "SeedCount", SN_NOWARN)
set_name(0x8011DAB0, "sgMemCrit", SN_NOWARN)
set_name(0x8011DAB4, "sgnWidth", SN_NOWARN)
set_name(0x8011CCA2, "msgflag", SN_NOWARN)
set_name(0x8011CCA3, "msgdelay", SN_NOWARN)
set_name(0x800C6258, "msgtable", SN_NOWARN)
set_name(0x800C61A8, "MsgStrings", SN_NOWARN)
set_name(0x8011CCA1, "msgcnt", SN_NOWARN)
set_name(0x8011DAB8, "sgdwProgress", SN_NOWARN)
set_name(0x8011DABC, "sgdwXY", SN_NOWARN)
set_name(0x800C62A8, "AllItemsUseable", SN_NOWARN)
set_name(0x80112964, "AllItemsList", SN_NOWARN)
set_name(0x80113D04, "PL_Prefix", SN_NOWARN)
set_name(0x80114A24, "PL_Suffix", SN_NOWARN)
set_name(0x80115924, "UniqueItemList", SN_NOWARN)
set_name(0x800C64BC, "item", SN_NOWARN)
set_name(0x800CB0BC, "itemactive", SN_NOWARN)
set_name(0x800CB13C, "itemavail", SN_NOWARN)
set_name(0x800CB1BC, "UniqueItemFlag", SN_NOWARN)
set_name(0x8011CCDC, "uitemflag", SN_NOWARN)
set_name(0x8011DAC0, "tem", SN_NOWARN)
set_name(0x8011EA38, "curruitem", SN_NOWARN)
set_name(0x8011EAD8, "itemhold", SN_NOWARN)
set_name(0x8011CCE0, "ScrollType", SN_NOWARN)
set_name(0x800CB23C, "ItemStr", SN_NOWARN)
set_name(0x800CB27C, "SufStr", SN_NOWARN)
set_name(0x8011CCBC, "numitems", SN_NOWARN)
set_name(0x8011CCC0, "gnNumGetRecords", SN_NOWARN)
set_name(0x800C6418, "ItemInvSnds", SN_NOWARN)
set_name(0x800C6348, "ItemCAnimTbl", SN_NOWARN)
set_name(0x8011774C, "SinTab", SN_NOWARN)
set_name(0x8011778C, "Item2Frm", SN_NOWARN)
set_name(0x800C63F4, "ItemAnimLs", SN_NOWARN)
set_name(0x8011CCC4, "ItemAnimSnds", SN_NOWARN)
set_name(0x8011CCC8, "idoppely", SN_NOWARN)
set_name(0x8011CCCC, "ScrollFlag", SN_NOWARN)
set_name(0x800C64A4, "premiumlvladd", SN_NOWARN)
set_name(0x800CC068, "LightList", SN_NOWARN)
set_name(0x800CC1A8, "lightactive", SN_NOWARN)
set_name(0x8011CD00, "numlights", SN_NOWARN)
set_name(0x8011CD04, "lightmax", SN_NOWARN)
set_name(0x800CC1D0, "VisionList", SN_NOWARN)
set_name(0x8011CD08, "numvision", SN_NOWARN)
set_name(0x8011CD0C, "dovision", SN_NOWARN)
set_name(0x8011CD10, "visionid", SN_NOWARN)
set_name(0x8011DAC4, "disp_mask", SN_NOWARN)
set_name(0x8011DAC8, "weird", SN_NOWARN)
set_name(0x8011DACC, "disp_tab_r", SN_NOWARN)
set_name(0x8011DAD0, "dispy_r", SN_NOWARN)
set_name(0x8011DAD4, "disp_tab_g", SN_NOWARN)
set_name(0x8011DAD8, "dispy_g", SN_NOWARN)
set_name(0x8011DADC, "disp_tab_b", SN_NOWARN)
set_name(0x8011DAE0, "dispy_b", SN_NOWARN)
set_name(0x8011DAE4, "radius", SN_NOWARN)
set_name(0x8011DAE8, "bright", SN_NOWARN)
set_name(0x8011EAE8, "mult_tab", SN_NOWARN)
set_name(0x8011CCF0, "lightflag", SN_NOWARN)
set_name(0x800CBD7C, "vCrawlTable", SN_NOWARN)
set_name(0x800CC030, "RadiusAdj", SN_NOWARN)
set_name(0x800CB2BC, "CrawlTable", SN_NOWARN)
set_name(0x8011CCF4, "restore_r", SN_NOWARN)
set_name(0x8011CCF8, "restore_g", SN_NOWARN)
set_name(0x8011CCFC, "restore_b", SN_NOWARN)
set_name(0x800CC048, "radius_tab", SN_NOWARN)
set_name(0x800CC058, "bright_tab", SN_NOWARN)
set_name(0x8011CD31, "qtextflag", SN_NOWARN)
set_name(0x8011CD34, "qtextSpd", SN_NOWARN)
set_name(0x8011DAEC, "pMedTextCels", SN_NOWARN)
set_name(0x8011DAF0, "pTextBoxCels", SN_NOWARN)
set_name(0x8011DAF4, "qtextptr", SN_NOWARN)
set_name(0x8011DAF8, "qtexty", SN_NOWARN)
set_name(0x8011DAFC, "qtextDelay", SN_NOWARN)
set_name(0x8011DB00, "sgLastScroll", SN_NOWARN)
set_name(0x8011DB04, "scrolltexty", SN_NOWARN)
set_name(0x8011DB08, "sglMusicVolumeSave", SN_NOWARN)
set_name(0x800CC3A0, "BookName", SN_NOWARN)
set_name(0x800CC3F0, "MtPrevText", SN_NOWARN)
set_name(0x8011CD20, "qtbodge", SN_NOWARN)
set_name(0x800CC390, "QBack", SN_NOWARN)
set_name(0x800CC440, "missiledata", SN_NOWARN)
set_name(0x800CCBB0, "misfiledata", SN_NOWARN)
set_name(0x800CCAA0, "MissPrintRoutines", SN_NOWARN)
set_name(0x800CCDFC, "sgLocals", SN_NOWARN)
set_name(0x8011EB68, "sgJunk", SN_NOWARN)
set_name(0x8011DB0D, "sgbRecvCmd", SN_NOWARN)
set_name(0x8011DB10, "sgdwRecvOffset", SN_NOWARN)
set_name(0x8011DB14, "sgbDeltaChunks", SN_NOWARN)
set_name(0x8011DB15, "sgbDeltaChanged", SN_NOWARN)
set_name(0x8011DB18, "sgdwOwnerWait", SN_NOWARN)
set_name(0x8011DB1C, "sgpMegaPkt", SN_NOWARN)
set_name(0x8011DB20, "sgpCurrPkt", SN_NOWARN)
set_name(0x8011DB24, "sgnCurrMegaPlayer", SN_NOWARN)
set_name(0x8011CD4D, "deltaload", SN_NOWARN)
set_name(0x8011CD4E, "gbBufferMsgs", SN_NOWARN)
set_name(0x8011CD50, "CompNoComp", SN_NOWARN)
set_name(0x8011CD54, "CompPakComp", SN_NOWARN)
set_name(0x8011CD58, "CompCrunchComp", SN_NOWARN)
set_name(0x800CCC9C, "GameMaps", SN_NOWARN)
set_name(0x8011CD5C, "dwRecCount", SN_NOWARN)
set_name(0x8011CD72, "gbMaxPlayers", SN_NOWARN)
set_name(0x8011CD73, "gbActivePlayers", SN_NOWARN)
set_name(0x8011CD74, "gbGameDestroyed", SN_NOWARN)
set_name(0x8011CD75, "gbDeltaSender", SN_NOWARN)
set_name(0x8011CD76, "gbSelectProvider", SN_NOWARN)
set_name(0x8011CD77, "gbSomebodyWonGameKludge", SN_NOWARN)
set_name(0x8011DB28, "sgbSentThisCycle", SN_NOWARN)
set_name(0x8011DB2C, "sgdwGameLoops", SN_NOWARN)
set_name(0x8011DB30, "sgwPackPlrOffsetTbl", SN_NOWARN)
set_name(0x8011DB34, "sgbPlayerLeftGameTbl", SN_NOWARN)
set_name(0x8011DB38, "sgdwPlayerLeftReasonTbl", SN_NOWARN)
set_name(0x8011DB40, "sgbSendDeltaTbl", SN_NOWARN)
set_name(0x8011DB48, "sgGameInitInfo", SN_NOWARN)
set_name(0x8011DB50, "sgbTimeout", SN_NOWARN)
set_name(0x8011DB54, "sglTimeoutStart", SN_NOWARN)
set_name(0x8011CD6C, "gszVersionNumber", SN_NOWARN)
set_name(0x8011CD71, "sgbNetInited", SN_NOWARN)
set_name(0x800CDE64, "ObjTypeConv", SN_NOWARN)
set_name(0x800CE028, "AllObjects", SN_NOWARN)
set_name(0x80117F4C, "ObjMasterLoadList", SN_NOWARN)
set_name(0x800CE808, "object", SN_NOWARN)
set_name(0x8011CD98, "numobjects", SN_NOWARN)
set_name(0x800CFDDC, "objectactive", SN_NOWARN)
set_name(0x800CFE5C, "objectavail", SN_NOWARN)
set_name(0x8011CD9C, "InitObjFlag", SN_NOWARN)
set_name(0x8011CDA0, "trapid", SN_NOWARN)
set_name(0x800CFEDC, "ObjFileList", SN_NOWARN)
set_name(0x8011CDA4, "trapdir", SN_NOWARN)
set_name(0x8011CDA8, "leverid", SN_NOWARN)
set_name(0x8011CD90, "numobjfiles", SN_NOWARN)
set_name(0x800CE720, "bxadd", SN_NOWARN)
set_name(0x800CE740, "byadd", SN_NOWARN)
set_name(0x800CE7C8, "shrineavail", SN_NOWARN)
set_name(0x800CE760, "shrinestrs", SN_NOWARN)
set_name(0x800CE7E4, "StoryBookName", SN_NOWARN)
set_name(0x8011CD94, "myscale", SN_NOWARN)
set_name(0x8011CDBC, "gbValidSaveFile", SN_NOWARN)
set_name(0x8011CDB8, "DoLoadedChar", SN_NOWARN)
set_name(0x800D00FC, "plr", SN_NOWARN)
set_name(0x8011CDDC, "myplr", SN_NOWARN)
set_name(0x8011CDE0, "deathdelay", SN_NOWARN)
set_name(0x8011CDE4, "deathflag", SN_NOWARN)
set_name(0x8011CDE5, "light_rad", SN_NOWARN)
set_name(0x8011CDD4, "light_level", SN_NOWARN)
set_name(0x800CFFF4, "MaxStats", SN_NOWARN)
set_name(0x8011CDCC, "PlrStructSize", SN_NOWARN)
set_name(0x8011CDD0, "ItemStructSize", SN_NOWARN)
set_name(0x800CFF04, "plrxoff", SN_NOWARN)
set_name(0x800CFF28, "plryoff", SN_NOWARN)
set_name(0x800CFF4C, "plrxoff2", SN_NOWARN)
set_name(0x800CFF70, "plryoff2", SN_NOWARN)
set_name(0x800CFF94, "PlrGFXAnimLens", SN_NOWARN)
set_name(0x800CFFB8, "StrengthTbl", SN_NOWARN)
set_name(0x800CFFC4, "MagicTbl", SN_NOWARN)
set_name(0x800CFFD0, "DexterityTbl", SN_NOWARN)
set_name(0x800CFFDC, "VitalityTbl", SN_NOWARN)
set_name(0x800CFFE8, "ToBlkTbl", SN_NOWARN)
set_name(0x800D0024, "ExpLvlsTbl", SN_NOWARN)
set_name(0x800D4984, "quests", SN_NOWARN)
set_name(0x8011CE14, "pQLogCel", SN_NOWARN)
set_name(0x8011CE18, "ReturnLvlX", SN_NOWARN)
set_name(0x8011CE1C, "ReturnLvlY", SN_NOWARN)
set_name(0x8011CE20, "ReturnLvl", SN_NOWARN)
set_name(0x8011CE24, "ReturnLvlT", SN_NOWARN)
set_name(0x8011CE28, "rporttest", SN_NOWARN)
set_name(0x8011CE2C, "qline", SN_NOWARN)
set_name(0x8011CE30, "numqlines", SN_NOWARN)
set_name(0x8011CE34, "qtopline", SN_NOWARN)
set_name(0x8011EB80, "qlist", SN_NOWARN)
set_name(0x8011DB58, "QSRect", SN_NOWARN)
set_name(0x8011CDF1, "questlog", SN_NOWARN)
set_name(0x800D484C, "questlist", SN_NOWARN)
set_name(0x8011CDF4, "ALLQUESTS", SN_NOWARN)
set_name(0x800D4960, "QuestGroup1", SN_NOWARN)
set_name(0x800D496C, "QuestGroup2", SN_NOWARN)
set_name(0x800D4978, "QuestGroup3", SN_NOWARN)
set_name(0x8011CDF8, "QuestGroup4", SN_NOWARN)
set_name(0x8011CE10, "WaterDone", SN_NOWARN)
set_name(0x800D494C, "questtrigstr", SN_NOWARN)
set_name(0x8011CE00, "QS_PX", SN_NOWARN)
set_name(0x8011CE04, "QS_PY", SN_NOWARN)
set_name(0x8011CE08, "QS_PW", SN_NOWARN)
set_name(0x8011CE0C, "QS_PH", SN_NOWARN)
set_name(0x8011EBC0, "QSBack", SN_NOWARN)
set_name(0x800D4AC4, "spelldata", SN_NOWARN)
set_name(0x8011CE6F, "stextflag", SN_NOWARN)
set_name(0x800D536C, "smithitem", SN_NOWARN)
set_name(0x800D5F4C, "premiumitem", SN_NOWARN)
set_name(0x8011CE70, "numpremium", SN_NOWARN)
set_name(0x8011CE74, "premiumlevel", SN_NOWARN)
set_name(0x800D62DC, "witchitem", SN_NOWARN)
set_name(0x800D6EBC, "boyitem", SN_NOWARN)
set_name(0x8011CE78, "boylevel", SN_NOWARN)
set_name(0x800D6F54, "golditem", SN_NOWARN)
set_name(0x800D6FEC, "healitem", SN_NOWARN)
set_name(0x8011CE7C, "stextsize", SN_NOWARN)
set_name(0x8011CE7D, "stextscrl", SN_NOWARN)
set_name(0x8011DB60, "stextsel", SN_NOWARN)
set_name(0x8011DB64, "stextlhold", SN_NOWARN)
set_name(0x8011DB68, "stextshold", SN_NOWARN)
set_name(0x8011DB6C, "stextvhold", SN_NOWARN)
set_name(0x8011DB70, "stextsval", SN_NOWARN)
set_name(0x8011DB74, "stextsmax", SN_NOWARN)
set_name(0x8011DB78, "stextup", SN_NOWARN)
set_name(0x8011DB7C, "stextdown", SN_NOWARN)
set_name(0x8011DB80, "stextscrlubtn", SN_NOWARN)
set_name(0x8011DB81, "stextscrldbtn", SN_NOWARN)
set_name(0x8011DB82, "SItemListFlag", SN_NOWARN)
set_name(0x8011EBD0, "stext", SN_NOWARN)
set_name(0x800D7BCC, "storehold", SN_NOWARN)
set_name(0x800D984C, "storehidx", SN_NOWARN)
set_name(0x8011DB84, "storenumh", SN_NOWARN)
set_name(0x8011DB88, "gossipstart", SN_NOWARN)
set_name(0x8011DB8C, "gossipend", SN_NOWARN)
set_name(0x8011DB90, "StoreBackRect", SN_NOWARN)
set_name(0x8011DB98, "talker", SN_NOWARN)
set_name(0x8011CE5C, "pSTextBoxCels", SN_NOWARN)
set_name(0x8011CE60, "pSTextSlidCels", SN_NOWARN)
set_name(0x8011CE64, "SStringY", SN_NOWARN)
set_name(0x800D5248, "SBack", SN_NOWARN)
set_name(0x800D5258, "SStringYNorm", SN_NOWARN)
set_name(0x800D52A8, "SStringYBuy0", SN_NOWARN)
set_name(0x800D52F8, "SStringYBuy1", SN_NOWARN)
set_name(0x800D5348, "talkname", SN_NOWARN)
set_name(0x8011CE6E, "InStoreFlag", SN_NOWARN)
set_name(0x80119298, "alltext", SN_NOWARN)
set_name(0x8011CE8C, "gdwAllTextEntries", SN_NOWARN)
set_name(0x8011DB9C, "P3Tiles", SN_NOWARN)
set_name(0x8011CE9C, "tile", SN_NOWARN)
set_name(0x8011CEAC, "_trigflag", SN_NOWARN)
set_name(0x800D9AB4, "trigs", SN_NOWARN)
set_name(0x8011CEB0, "numtrigs", SN_NOWARN)
set_name(0x8011CEB4, "townwarps", SN_NOWARN)
set_name(0x8011CEB8, "TWarpFrom", SN_NOWARN)
set_name(0x800D987C, "TownDownList", SN_NOWARN)
set_name(0x800D98A8, "TownWarp1List", SN_NOWARN)
set_name(0x800D98DC, "L1UpList", SN_NOWARN)
set_name(0x800D990C, "L1DownList", SN_NOWARN)
set_name(0x800D9934, "L2UpList", SN_NOWARN)
set_name(0x800D9940, "L2DownList", SN_NOWARN)
set_name(0x800D9954, "L2TWarpUpList", SN_NOWARN)
set_name(0x800D9960, "L3UpList", SN_NOWARN)
set_name(0x800D999C, "L3DownList", SN_NOWARN)
set_name(0x800D99C0, "L3TWarpUpList", SN_NOWARN)
set_name(0x800D99F8, "L4UpList", SN_NOWARN)
set_name(0x800D9A08, "L4DownList", SN_NOWARN)
set_name(0x800D9A20, "L4TWarpUpList", SN_NOWARN)
set_name(0x800D9A30, "L4PentaList", SN_NOWARN)
set_name(0x8011CED1, "gbSndInited", SN_NOWARN)
set_name(0x8011CED4, "sglMasterVolume", SN_NOWARN)
set_name(0x8011CED8, "sglMusicVolume", SN_NOWARN)
set_name(0x8011CEDC, "sglSoundVolume", SN_NOWARN)
set_name(0x8011CEE0, "sglSpeechVolume", SN_NOWARN)
set_name(0x8011CEE4, "sgnMusicTrack", SN_NOWARN)
set_name(0x8011CED2, "gbDupSounds", SN_NOWARN)
set_name(0x8011CEE8, "sghMusic", SN_NOWARN)
set_name(0x8011A07C, "sgszMusicTracks", SN_NOWARN)
set_name(0x8011CEF8, "_pcurr_inv", SN_NOWARN)
set_name(0x800D9B04, "_pfind_list", SN_NOWARN)
set_name(0x8011CF00, "_pfind_index", SN_NOWARN)
set_name(0x8011CF04, "_pfindx", SN_NOWARN)
set_name(0x8011CF08, "_pfindy", SN_NOWARN)
set_name(0x8011CF0A, "automapmoved", SN_NOWARN)
set_name(0x8011CEF5, "flyflag", SN_NOWARN)
set_name(0x8011CEF6, "seen_combo", SN_NOWARN)
set_name(0x8011F8F0, "GPad1", SN_NOWARN)
set_name(0x8011F990, "GPad2", SN_NOWARN)
set_name(0x8011DBA0, "CurrentProc", SN_NOWARN)
set_name(0x8011A218, "AllMsgs", SN_NOWARN)
set_name(0x8011CF44, "NumOfStrings", SN_NOWARN)
set_name(0x8011CF18, "LanguageType", SN_NOWARN)
set_name(0x8011CF1C, "hndText", SN_NOWARN)
set_name(0x8011CF20, "TextPtr", SN_NOWARN)
set_name(0x8011CF24, "LangDbNo", SN_NOWARN)
set_name(0x8011CF54, "MissDat", SN_NOWARN)
set_name(0x8011CF58, "CharFade", SN_NOWARN)
set_name(0x8011CF5C, "rotateness", SN_NOWARN)
set_name(0x8011CF60, "spiralling_shape", SN_NOWARN)
set_name(0x8011CF64, "down", SN_NOWARN)
set_name(0x800D9B54, "MlTab", SN_NOWARN)
set_name(0x800D9B64, "QlTab", SN_NOWARN)
set_name(0x800D9B74, "ObjPrintFuncs", SN_NOWARN)
set_name(0x8011CF80, "MyXoff1", SN_NOWARN)
set_name(0x8011CF84, "MyYoff1", SN_NOWARN)
set_name(0x8011CF88, "MyXoff2", SN_NOWARN)
set_name(0x8011CF8C, "MyYoff2", SN_NOWARN)
set_name(0x8011CF9C, "iscflag", SN_NOWARN)
set_name(0x8011CFA9, "sgbFadedIn", SN_NOWARN)
set_name(0x8011CFAA, "screenbright", SN_NOWARN)
set_name(0x8011CFAC, "faderate", SN_NOWARN)
set_name(0x8011CFB0, "fading", SN_NOWARN)
set_name(0x8011CFBC, "FadeCoords", SN_NOWARN)
set_name(0x8011CFB4, "st", SN_NOWARN)
set_name(0x8011CFB8, "mode", SN_NOWARN)
set_name(0x800D9CFC, "portal", SN_NOWARN)
set_name(0x8011CFEE, "portalindex", SN_NOWARN)
set_name(0x8011CFE8, "WarpDropX", SN_NOWARN)
set_name(0x8011CFEC, "WarpDropY", SN_NOWARN)
set_name(0x800D9D14, "MyVerString", SN_NOWARN)
set_name(0x8011D154, "Year", SN_NOWARN)
set_name(0x8011D158, "Day", SN_NOWARN)
set_name(0x8011DBA4, "tbuff", SN_NOWARN)
set_name(0x800D9D8C, "IconBuffer", SN_NOWARN)
set_name(0x8011DBA8, "HR1", SN_NOWARN)
set_name(0x8011DBA9, "HR2", SN_NOWARN)
set_name(0x8011DBAA, "HR3", SN_NOWARN)
set_name(0x8011DBAB, "VR1", SN_NOWARN)
set_name(0x8011DBAC, "VR2", SN_NOWARN)
set_name(0x8011DBAD, "VR3", SN_NOWARN)
set_name(0x8011D1D4, "pHallList", SN_NOWARN)
set_name(0x8011D1D8, "nRoomCnt", SN_NOWARN)
set_name(0x8011D1DC, "nSx1", SN_NOWARN)
set_name(0x8011D1E0, "nSy1", SN_NOWARN)
set_name(0x8011D1E4, "nSx2", SN_NOWARN)
set_name(0x8011D1E8, "nSy2", SN_NOWARN)
set_name(0x8011D18C, "Area_Min", SN_NOWARN)
set_name(0x8011D190, "Room_Max", SN_NOWARN)
set_name(0x8011D194, "Room_Min", SN_NOWARN)
set_name(0x8011D198, "BIG3", SN_NOWARN)
set_name(0x8011D1A0, "BIG4", SN_NOWARN)
set_name(0x8011D1A8, "BIG6", SN_NOWARN)
set_name(0x8011D1B0, "BIG7", SN_NOWARN)
set_name(0x8011D1B8, "RUINS1", SN_NOWARN)
set_name(0x8011D1BC, "RUINS2", SN_NOWARN)
set_name(0x8011D1C0, "RUINS3", SN_NOWARN)
set_name(0x8011D1C4, "RUINS4", SN_NOWARN)
set_name(0x8011D1C8, "RUINS5", SN_NOWARN)
set_name(0x8011D1CC, "RUINS6", SN_NOWARN)
set_name(0x8011D1D0, "RUINS7", SN_NOWARN)
set_name(0x8011DBB0, "abyssx", SN_NOWARN)
set_name(0x8011DBB4, "lavapool", SN_NOWARN)
set_name(0x8011D274, "lockoutcnt", SN_NOWARN)
set_name(0x8011D1F8, "L3TITE12", SN_NOWARN)
set_name(0x8011D200, "L3TITE13", SN_NOWARN)
set_name(0x8011D208, "L3CREV1", SN_NOWARN)
set_name(0x8011D210, "L3CREV2", SN_NOWARN)
set_name(0x8011D218, "L3CREV3", SN_NOWARN)
set_name(0x8011D220, "L3CREV4", SN_NOWARN)
set_name(0x8011D228, "L3CREV5", SN_NOWARN)
set_name(0x8011D230, "L3CREV6", SN_NOWARN)
set_name(0x8011D238, "L3CREV7", SN_NOWARN)
set_name(0x8011D240, "L3CREV8", SN_NOWARN)
set_name(0x8011D248, "L3CREV9", SN_NOWARN)
set_name(0x8011D250, "L3CREV10", SN_NOWARN)
set_name(0x8011D258, "L3CREV11", SN_NOWARN)
set_name(0x8011D260, "L3XTRA1", SN_NOWARN)
set_name(0x8011D264, "L3XTRA2", SN_NOWARN)
set_name(0x8011D268, "L3XTRA3", SN_NOWARN)
set_name(0x8011D26C, "L3XTRA4", SN_NOWARN)
set_name(0x8011D270, "L3XTRA5", SN_NOWARN)
set_name(0x8011D278, "diabquad1x", SN_NOWARN)
set_name(0x8011D27C, "diabquad2x", SN_NOWARN)
set_name(0x8011D280, "diabquad3x", SN_NOWARN)
set_name(0x8011D284, "diabquad4x", SN_NOWARN)
set_name(0x8011D288, "diabquad1y", SN_NOWARN)
set_name(0x8011D28C, "diabquad2y", SN_NOWARN)
set_name(0x8011D290, "diabquad3y", SN_NOWARN)
set_name(0x8011D294, "diabquad4y", SN_NOWARN)
set_name(0x8011D298, "SP4x1", SN_NOWARN)
set_name(0x8011D29C, "SP4y1", SN_NOWARN)
set_name(0x8011D2A0, "SP4x2", SN_NOWARN)
set_name(0x8011D2A4, "SP4y2", SN_NOWARN)
set_name(0x8011D2A8, "l4holdx", SN_NOWARN)
set_name(0x8011D2AC, "l4holdy", SN_NOWARN)
set_name(0x8011DBB8, "lpSetPiece1", SN_NOWARN)
set_name(0x8011DBBC, "lpSetPiece2", SN_NOWARN)
set_name(0x8011DBC0, "lpSetPiece3", SN_NOWARN)
set_name(0x8011DBC4, "lpSetPiece4", SN_NOWARN)
set_name(0x8011DBC8, "lppSetPiece2", SN_NOWARN)
set_name(0x8011DBCC, "lppSetPiece3", SN_NOWARN)
set_name(0x8011DBD0, "lppSetPiece4", SN_NOWARN)
set_name(0x8011D2BC, "SkelKingTrans1", SN_NOWARN)
set_name(0x8011D2C4, "SkelKingTrans2", SN_NOWARN)
set_name(0x800DA08C, "SkelKingTrans3", SN_NOWARN)
set_name(0x800DA0A0, "SkelKingTrans4", SN_NOWARN)
set_name(0x800DA0BC, "SkelChamTrans1", SN_NOWARN)
set_name(0x8011D2CC, "SkelChamTrans2", SN_NOWARN)
set_name(0x800DA0D0, "SkelChamTrans3", SN_NOWARN)
set_name(0x8011D3C0, "DoUiForChooseMonster", SN_NOWARN)
set_name(0x800DA0F4, "MgToText", SN_NOWARN)
set_name(0x800DA17C, "StoryText", SN_NOWARN)
set_name(0x800DA1A0, "dungeon", SN_NOWARN)
set_name(0x800DB3A0, "pdungeon", SN_NOWARN)
set_name(0x800DB9E0, "dflags", SN_NOWARN)
set_name(0x8011D3E4, "setpc_x", SN_NOWARN)
set_name(0x8011D3E8, "setpc_y", SN_NOWARN)
set_name(0x8011D3EC, "setpc_w", SN_NOWARN)
set_name(0x8011D3F0, "setpc_h", SN_NOWARN)
set_name(0x8011D3F4, "setloadflag", SN_NOWARN)
set_name(0x800DC020, "nBlockTable", SN_NOWARN)
set_name(0x800DC824, "nSolidTable", SN_NOWARN)
set_name(0x800DD028, "nTransTable", SN_NOWARN)
set_name(0x800DD82C, "nMissileTable", SN_NOWARN)
set_name(0x800DE030, "nTrapTable", SN_NOWARN)
set_name(0x8011D3F8, "dminx", SN_NOWARN)
set_name(0x8011D3FC, "dminy", SN_NOWARN)
set_name(0x8011D400, "dmaxx", SN_NOWARN)
set_name(0x8011D404, "dmaxy", SN_NOWARN)
set_name(0x8011D408, "gnDifficulty", SN_NOWARN)
set_name(0x8011D40C, "currlevel", SN_NOWARN)
set_name(0x8011D40D, "leveltype", SN_NOWARN)
set_name(0x8011D40E, "setlevel", SN_NOWARN)
set_name(0x8011D40F, "setlvlnum", SN_NOWARN)
set_name(0x8011D410, "setlvltype", SN_NOWARN)
set_name(0x8011D414, "ViewX", SN_NOWARN)
set_name(0x8011D418, "ViewY", SN_NOWARN)
set_name(0x8011D41C, "ViewDX", SN_NOWARN)
set_name(0x8011D420, "ViewDY", SN_NOWARN)
set_name(0x8011D424, "ViewBX", SN_NOWARN)
set_name(0x8011D428, "ViewBY", SN_NOWARN)
set_name(0x800DE834, "ScrollInfo", SN_NOWARN)
set_name(0x8011D42C, "LvlViewX", SN_NOWARN)
set_name(0x8011D430, "LvlViewY", SN_NOWARN)
set_name(0x8011D434, "btmbx", SN_NOWARN)
set_name(0x8011D438, "btmby", SN_NOWARN)
set_name(0x8011D43C, "btmdx", SN_NOWARN)
set_name(0x8011D440, "btmdy", SN_NOWARN)
set_name(0x8011D444, "MicroTileLen", SN_NOWARN)
set_name(0x8011D448, "TransVal", SN_NOWARN)
set_name(0x800DE848, "TransList", SN_NOWARN)
set_name(0x8011D44C, "themeCount", SN_NOWARN)
set_name(0x800DE868, "dung_map", SN_NOWARN)
set_name(0x80100B28, "dung_map_r", SN_NOWARN)
set_name(0x8010168C, "dung_map_g", SN_NOWARN)
set_name(0x801021F0, "dung_map_b", SN_NOWARN)
set_name(0x80102D54, "MinisetXY", SN_NOWARN)
set_name(0x8011D3DC, "pSetPiece", SN_NOWARN)
set_name(0x8011D3E0, "DungSize", SN_NOWARN)
set_name(0x80102F20, "theme", SN_NOWARN)
set_name(0x8011D48C, "numthemes", SN_NOWARN)
set_name(0x8011D490, "zharlib", SN_NOWARN)
set_name(0x8011D494, "armorFlag", SN_NOWARN)
set_name(0x8011D495, "bCrossFlag", SN_NOWARN)
set_name(0x8011D496, "weaponFlag", SN_NOWARN)
set_name(0x8011D498, "themex", SN_NOWARN)
set_name(0x8011D49C, "themey", SN_NOWARN)
set_name(0x8011D4A0, "themeVar1", SN_NOWARN)
set_name(0x8011D4A4, "bFountainFlag", SN_NOWARN)
set_name(0x8011D4A5, "cauldronFlag", SN_NOWARN)
set_name(0x8011D4A6, "mFountainFlag", SN_NOWARN)
set_name(0x8011D4A7, "pFountainFlag", SN_NOWARN)
set_name(0x8011D4A8, "tFountainFlag", SN_NOWARN)
set_name(0x8011D4A9, "treasureFlag", SN_NOWARN)
set_name(0x8011D4AC, "ThemeGoodIn", SN_NOWARN)
set_name(0x80102E00, "ThemeGood", SN_NOWARN)
set_name(0x80102E10, "trm5x", SN_NOWARN)
set_name(0x80102E74, "trm5y", SN_NOWARN)
set_name(0x80102ED8, "trm3x", SN_NOWARN)
set_name(0x80102EFC, "trm3y", SN_NOWARN)
set_name(0x8011D584, "nummissiles", SN_NOWARN)
set_name(0x80103138, "missileactive", SN_NOWARN)
set_name(0x8010332C, "missileavail", SN_NOWARN)
set_name(0x8011D588, "MissilePreFlag", SN_NOWARN)
set_name(0x80103520, "missile", SN_NOWARN)
set_name(0x8011D589, "ManashieldFlag", SN_NOWARN)
set_name(0x8011D58A, "ManashieldFlag2", SN_NOWARN)
set_name(0x801030B0, "XDirAdd", SN_NOWARN)
set_name(0x801030D0, "YDirAdd", SN_NOWARN)
set_name(0x8011D551, "fadetor", SN_NOWARN)
set_name(0x8011D552, "fadetog", SN_NOWARN)
set_name(0x8011D553, "fadetob", SN_NOWARN)
set_name(0x801030F0, "ValueTable", SN_NOWARN)
set_name(0x80103100, "StringTable", SN_NOWARN)
set_name(0x80105DD0, "monster", SN_NOWARN)
set_name(0x8011D5EC, "nummonsters", SN_NOWARN)
set_name(0x8010B550, "monstactive", SN_NOWARN)
set_name(0x8010B6E0, "monstkills", SN_NOWARN)
set_name(0x8010B870, "Monsters", SN_NOWARN)
set_name(0x8011D5F0, "monstimgtot", SN_NOWARN)
set_name(0x8011D5F4, "totalmonsters", SN_NOWARN)
set_name(0x8011D5F8, "uniquetrans", SN_NOWARN)
set_name(0x8011DBD4, "sgbSaveSoundOn", SN_NOWARN)
set_name(0x8011D5BC, "offset_x", SN_NOWARN)
set_name(0x8011D5C4, "offset_y", SN_NOWARN)
set_name(0x8011D5A4, "left", SN_NOWARN)
set_name(0x8011D5AC, "right", SN_NOWARN)
set_name(0x8011D5B4, "opposite", SN_NOWARN)
set_name(0x8011D598, "nummtypes", SN_NOWARN)
set_name(0x8011D59C, "animletter", SN_NOWARN)
set_name(0x80105C30, "MWVel", SN_NOWARN)
set_name(0x8011D5CC, "rnd5", SN_NOWARN)
set_name(0x8011D5D0, "rnd10", SN_NOWARN)
set_name(0x8011D5D4, "rnd20", SN_NOWARN)
set_name(0x8011D5D8, "rnd60", SN_NOWARN)
set_name(0x80105D50, "AiProc", SN_NOWARN)
set_name(0x8010BD48, "monsterdata", SN_NOWARN)
set_name(0x8010D788, "MonstConvTbl", SN_NOWARN)
set_name(0x8010D808, "MonstAvailTbl", SN_NOWARN)
set_name(0x8010D878, "UniqMonst", SN_NOWARN)
set_name(0x8010BA30, "TransPals", SN_NOWARN)
set_name(0x8010BC48, "StonePals", SN_NOWARN)
set_name(0x8011D630, "invflag", SN_NOWARN)
set_name(0x8011D631, "drawsbarflag", SN_NOWARN)
set_name(0x8011D634, "InvBackY", SN_NOWARN)
set_name(0x8011D638, "InvCursPos", SN_NOWARN)
set_name(0x8010E820, "InvSlotTable", SN_NOWARN)
set_name(0x8011D63C, "InvBackAY", SN_NOWARN)
set_name(0x8011D640, "InvSel", SN_NOWARN)
set_name(0x8011D644, "ItemW", SN_NOWARN)
set_name(0x8011D648, "ItemH", SN_NOWARN)
set_name(0x8011D64C, "ItemNo", SN_NOWARN)
set_name(0x8011D650, "BRect", SN_NOWARN)
set_name(0x8011D618, "InvPanelTData", SN_NOWARN)
set_name(0x8011D61C, "InvGfxTData", SN_NOWARN)
set_name(0x8011D614, "InvPageNo", SN_NOWARN)
set_name(0x8010E1A8, "AP2x2Tbl", SN_NOWARN)
set_name(0x8010E1D0, "InvRect", SN_NOWARN)
set_name(0x8010E418, "InvGfxTable", SN_NOWARN)
set_name(0x8010E6B8, "InvItemWidth", SN_NOWARN)
set_name(0x8010E76C, "InvItemHeight", SN_NOWARN)
set_name(0x8011D628, "InvOn", SN_NOWARN)
set_name(0x8011D62C, "sgdwLastTime", SN_NOWARN)
set_name(0x8011D687, "automapflag", SN_NOWARN)
set_name(0x8010E884, "automapview", SN_NOWARN)
set_name(0x8010E94C, "automaptype", SN_NOWARN)
set_name(0x8011D688, "AMLWallFlag", SN_NOWARN)
set_name(0x8011D689, "AMRWallFlag", SN_NOWARN)
set_name(0x8011D68A, "AMLLWallFlag", SN_NOWARN)
set_name(0x8011D68B, "AMLRWallFlag", SN_NOWARN)
set_name(0x8011D68C, "AMDirtFlag", SN_NOWARN)
set_name(0x8011D68D, "AMColumnFlag", SN_NOWARN)
set_name(0x8011D68E, "AMStairFlag", SN_NOWARN)
set_name(0x8011D68F, "AMLDoorFlag", SN_NOWARN)
set_name(0x8011D690, "AMLGrateFlag", SN_NOWARN)
set_name(0x8011D691, "AMLArchFlag", SN_NOWARN)
set_name(0x8011D692, "AMRDoorFlag", SN_NOWARN)
set_name(0x8011D693, "AMRGrateFlag", SN_NOWARN)
set_name(0x8011D694, "AMRArchFlag", SN_NOWARN)
set_name(0x8011D698, "AutoMapX", SN_NOWARN)
set_name(0x8011D69C, "AutoMapY", SN_NOWARN)
set_name(0x8011D6A0, "AutoMapXOfs", SN_NOWARN)
set_name(0x8011D6A4, "AutoMapYOfs", SN_NOWARN)
set_name(0x8011D6A8, "AMPlayerX", SN_NOWARN)
set_name(0x8011D6AC, "AMPlayerY", SN_NOWARN)
set_name(0x8011D664, "AutoMapScale", SN_NOWARN)
set_name(0x8011D668, "AutoMapPlayerR", SN_NOWARN)
set_name(0x8011D669, "AutoMapPlayerG", SN_NOWARN)
set_name(0x8011D66A, "AutoMapPlayerB", SN_NOWARN)
set_name(0x8011D66B, "AutoMapWallR", SN_NOWARN)
set_name(0x8011D66C, "AutoMapWallG", SN_NOWARN)
set_name(0x8011D66D, "AutoMapWallB", SN_NOWARN)
set_name(0x8011D66E, "AutoMapDoorR", SN_NOWARN)
set_name(0x8011D66F, "AutoMapDoorG", SN_NOWARN)
set_name(0x8011D670, "AutoMapDoorB", SN_NOWARN)
set_name(0x8011D671, "AutoMapColumnR", SN_NOWARN)
set_name(0x8011D672, "AutoMapColumnG", SN_NOWARN)
set_name(0x8011D673, "AutoMapColumnB", SN_NOWARN)
set_name(0x8011D674, "AutoMapArchR", SN_NOWARN)
set_name(0x8011D675, "AutoMapArchG", SN_NOWARN)
set_name(0x8011D676, "AutoMapArchB", SN_NOWARN)
set_name(0x8011D677, "AutoMapStairR", SN_NOWARN)
set_name(0x8011D678, "AutoMapStairG", SN_NOWARN)
set_name(0x8011D679, "AutoMapStairB", SN_NOWARN)
set_name(0x8010E86C, "SetLevelName", SN_NOWARN)
set_name(0x8011DD30, "GazTick", SN_NOWARN)
set_name(0x80124600, "RndTabs", SN_NOWARN)
set_name(0x800AA9E8, "DefaultRnd", SN_NOWARN)
set_name(0x8011DC5C, "ActiveTasks", SN_NOWARN)
set_name(0x8011DC60, "CurrentTask", SN_NOWARN)
set_name(0x8011DC64, "T", SN_NOWARN)
set_name(0x8011DC68, "MemTypeForTasker", SN_NOWARN)
set_name(0x80122048, "SchEnv", SN_NOWARN)
set_name(0x8011DC6C, "ExecId", SN_NOWARN)
set_name(0x8011DC70, "ExecMask", SN_NOWARN)
set_name(0x8011DC74, "TasksActive", SN_NOWARN)
set_name(0x8011DC78, "EpiFunc", SN_NOWARN)
set_name(0x8011DC7C, "ProFunc", SN_NOWARN)
set_name(0x8011DC80, "EpiProId", SN_NOWARN)
set_name(0x8011DC84, "EpiProMask", SN_NOWARN)
set_name(0x8011DC88, "DoTasksPrologue", SN_NOWARN)
set_name(0x8011DC8C, "DoTasksEpilogue", SN_NOWARN)
set_name(0x8011DC90, "StackFloodCallback", SN_NOWARN)
set_name(0x8011DC94, "ExtraStackProtection", SN_NOWARN)
set_name(0x8011DC98, "ExtraStackSizeLongs", SN_NOWARN)
set_name(0x8011DD44, "LastPtr", SN_NOWARN)
set_name(0x800AAA20, "WorkMemInfo", SN_NOWARN)
set_name(0x8011DC9C, "MemInitBlocks", SN_NOWARN)
set_name(0x80122078, "MemHdrBlocks", SN_NOWARN)
set_name(0x8011DCA0, "FreeBlocks", SN_NOWARN)
set_name(0x8011DCA4, "LastError", SN_NOWARN)
set_name(0x8011DCA8, "TimeStamp", SN_NOWARN)
set_name(0x8011DCAC, "FullErrorChecking", SN_NOWARN)
set_name(0x8011DCB0, "LastAttemptedAlloc", SN_NOWARN)
set_name(0x8011DCB4, "LastDeallocedBlock", SN_NOWARN)
set_name(0x8011DCB8, "VerbLev", SN_NOWARN)
set_name(0x8011DCBC, "NumOfFreeHdrs", SN_NOWARN)
set_name(0x8011DCC0, "LastTypeAlloced", SN_NOWARN)
set_name(0x8011DCC4, "AllocFilter", SN_NOWARN)
set_name(0x800AAA28, "GalErrors", SN_NOWARN)
set_name(0x800AAA50, "PhantomMem", SN_NOWARN)
set_name(0x8011DD58, "PollFunc", SN_NOWARN)
set_name(0x8011DD3C, "MsgFunc", SN_NOWARN)
set_name(0x8011DD88, "ErrorFunc", SN_NOWARN)
set_name(0x80122FC8, "buf", SN_NOWARN)
set_name(0x800AAA78, "NULL_REP", SN_NOWARN)
| 53.355267 | 94 | 0.831369 |
7a50ba5f2cb887022932cc033126ca44666734ea | 426 | py | Python | mundos/mundo-1/exercicio-014.py | pedrosantanaabreu/curso-em-video-python | fd68f9753a1c71ca94dfa40f23eb8c398cea6086 | [
"MIT"
] | 1 | 2022-03-23T22:48:23.000Z | 2022-03-23T22:48:23.000Z | mundos/mundo-1/exercicio-014.py | pedrosantanaabreu/curso-em-video-python | fd68f9753a1c71ca94dfa40f23eb8c398cea6086 | [
"MIT"
] | null | null | null | mundos/mundo-1/exercicio-014.py | pedrosantanaabreu/curso-em-video-python | fd68f9753a1c71ca94dfa40f23eb8c398cea6086 | [
"MIT"
] | null | null | null | """
@Pedro Santana Abreu (https://linktr.ee/pedrosantanaabreu)
@Curso em Vídeo (https://cursoemvideo.com)
PT-BR:
Escreva um programa que converta uma temperatura digitada em C para F.
"""
# Recebendo valores
celsius = float(input('Digite a temperatura em celsius: '))
# Convertendo
fahrenheit = (celsius * 9/5) + 32
# Resultado
print('{:.1f} graus celsius equivalem a {:.1f} graus fahrenheit'.format(celsius, fahrenheit))
| 25.058824 | 93 | 0.730047 |
c2ace02c1896059a4f3b02fa12b604e39eedb84f | 5,107 | py | Python | tests/contrib/flask/test_static.py | ocelotl/opentelemetry-auto-instr-python-1 | f5c47bd1ee492ffde298794f283031c22891f60b | [
"BSD-3-Clause"
] | 2 | 2020-03-04T17:33:22.000Z | 2021-01-20T14:20:10.000Z | tests/contrib/flask/test_static.py | ocelotl/opentelemetry-auto-instr-python-1 | f5c47bd1ee492ffde298794f283031c22891f60b | [
"BSD-3-Clause"
] | 4 | 2019-11-25T00:11:16.000Z | 2021-05-13T20:43:50.000Z | tests/contrib/flask/test_static.py | ocelotl/opentelemetry-auto-instr-python-1 | f5c47bd1ee492ffde298794f283031c22891f60b | [
"BSD-3-Clause"
] | 3 | 2020-02-05T14:54:25.000Z | 2020-03-23T02:51:27.000Z | # Copyright 2019, OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oteltrace.ext import http
from . import BaseFlaskTestCase
class FlaskStaticFileTestCase(BaseFlaskTestCase):
def test_serve_static_file(self):
"""
When fetching a static file
We create the expected spans
"""
# DEV: By default a static handler for `./static/` is configured for us
res = self.client.get('/static/test.txt')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask\n')
spans = self.get_spans()
self.assertEqual(len(spans), 9)
req_span = self.find_span_by_name(spans, 'flask.request')
handler_span = self.find_span_by_name(spans, 'static')
send_file_span = self.find_span_by_name(spans, 'flask.send_static_file')
# flask.request span
self.assertEqual(req_span.error, 0)
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET /static/<path:filename>')
self.assertEqual(req_span.get_tag('flask.endpoint'), 'static')
self.assertEqual(req_span.get_tag('flask.url_rule'), '/static/<path:filename>')
self.assertEqual(req_span.get_tag('flask.view_args.filename'), 'test.txt')
self.assertEqual(req_span.get_tag('http.status_code'), '200')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/static/test.txt')
self.assertEqual(req_span.get_tag('http.method'), 'GET')
# static span
self.assertEqual(handler_span.error, 0)
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'static')
self.assertEqual(handler_span.resource, '/static/<path:filename>')
# flask.send_static_file span
self.assertEqual(send_file_span.error, 0)
self.assertEqual(send_file_span.service, 'flask')
self.assertEqual(send_file_span.name, 'flask.send_static_file')
self.assertEqual(send_file_span.resource, 'flask.send_static_file')
def test_serve_static_file_404(self):
"""
When fetching a static file
When the file does not exist
We create the expected spans
"""
# DEV: By default a static handler for `./static/` is configured for us
res = self.client.get('/static/unknown-file')
self.assertEqual(res.status_code, 404)
spans = self.get_spans()
self.assertEqual(len(spans), 11)
req_span = self.find_span_by_name(spans, 'flask.request')
handler_span = self.find_span_by_name(spans, 'static')
send_file_span = self.find_span_by_name(spans, 'flask.send_static_file')
# flask.request span
self.assertEqual(req_span.error, 0)
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET /static/<path:filename>')
self.assertEqual(req_span.get_tag('flask.endpoint'), 'static')
self.assertEqual(req_span.get_tag('flask.url_rule'), '/static/<path:filename>')
self.assertEqual(req_span.get_tag('flask.view_args.filename'), 'unknown-file')
self.assertEqual(req_span.get_tag('http.status_code'), '404')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/static/unknown-file')
self.assertEqual(req_span.get_tag('http.method'), 'GET')
# static span
self.assertEqual(handler_span.error, 1)
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'static')
self.assertEqual(handler_span.resource, '/static/<path:filename>')
self.assertTrue(handler_span.get_tag('error.msg').startswith('404 Not Found'))
self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(handler_span.get_tag('error.type'), 'werkzeug.exceptions.NotFound')
# flask.send_static_file span
self.assertEqual(send_file_span.error, 1)
self.assertEqual(send_file_span.service, 'flask')
self.assertEqual(send_file_span.name, 'flask.send_static_file')
self.assertEqual(send_file_span.resource, 'flask.send_static_file')
self.assertTrue(send_file_span.get_tag('error.msg').startswith('404 Not Found'))
self.assertTrue(send_file_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(send_file_span.get_tag('error.type'), 'werkzeug.exceptions.NotFound')
| 47.287037 | 94 | 0.692187 |
b4fe2971f14c9e4b5cb0f8134907818b9100d7a1 | 749 | py | Python | setup.py | dwkim78/UPSILoN-T | 839ebb31360195f4cd668e255ede4ed14a46ba61 | [
"MIT"
] | 3 | 2021-09-09T06:16:15.000Z | 2021-12-17T04:40:57.000Z | setup.py | dwkim78/UPSILoN-T | 839ebb31360195f4cd668e255ede4ed14a46ba61 | [
"MIT"
] | null | null | null | setup.py | dwkim78/UPSILoN-T | 839ebb31360195f4cd668e255ede4ed14a46ba61 | [
"MIT"
] | null | null | null | from setuptools import find_packages, setup
setup(
name='upsilont',
version='0.1.0',
description='UPSILoN-T',
long_description='',
platforms=['any'],
packages=find_packages(),
include_package_data=True,
url='https://github.com/dwkim78/UPSILoN-T',
license='Apache v2.0',
author='Dae-Won Kim',
author_email='dwk@etri.re.kr',
install_requires=['scikit-learn==0.22', 'numpy>=1.17',
'scipy>=1.3', 'pandas>=0.25'],
keywords=['Machine Learning', 'Periodic Variable Stars', 'Time Series'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
]
)
| 29.96 | 76 | 0.620828 |
7817379573a385d3433d0e1a63d2429621cf3e4b | 675 | py | Python | build/__main__.py | dantegates/dantegates.github.io | ef81968a060cdd6c107c1ce865866a87947ad547 | [
"MIT"
] | null | null | null | build/__main__.py | dantegates/dantegates.github.io | ef81968a060cdd6c107c1ce865866a87947ad547 | [
"MIT"
] | null | null | null | build/__main__.py | dantegates/dantegates.github.io | ef81968a060cdd6c107c1ce865866a87947ad547 | [
"MIT"
] | 1 | 2018-07-25T20:16:56.000Z | 2018-07-25T20:16:56.000Z | import os
from .config import POSTS
def build(posts=POSTS):
for post in posts:
if os.path.exists(post.target) and not post.rebuild:
continue
print(f'building: {post.title}')
# must remap static files first before writing content
for static_filename, data in post.static_files.items():
try:
os.makedirs(os.path.dirname(static_filename))
except FileExistsError:
pass
with open(static_filename, 'wb') as f:
f.write(data)
with open(post.target, 'w') as f:
f.write(post.post.content)
if __name__ == '__main__':
build()
| 27 | 63 | 0.582222 |
a8c54b3e813d4060d38bd915b7f02a164e6cc94a | 25,800 | py | Python | allennlp/models/reading_comprehension/docqa++BERT.py | alontalmor/allennlp | 3beb3ffff3ef45311c148301e91562b2000dff3b | [
"Apache-2.0"
] | null | null | null | allennlp/models/reading_comprehension/docqa++BERT.py | alontalmor/allennlp | 3beb3ffff3ef45311c148301e91562b2000dff3b | [
"Apache-2.0"
] | null | null | null | allennlp/models/reading_comprehension/docqa++BERT.py | alontalmor/allennlp | 3beb3ffff3ef45311c148301e91562b2000dff3b | [
"Apache-2.0"
] | null | null | null | import logging
from allennlp.common.elastic_logger import ElasticLogger
from typing import Any, Dict, List
import numpy as np
from overrides import overrides
import torch
import torch.nn.functional as F
from torch.nn.functional import nll_loss
import os
import random
import traceback
import json
from allennlp.common.checks import check_dimensions_match
from allennlp.data import Vocabulary
from allennlp.models.model import Model
from allennlp.modules import Seq2SeqEncoder, TextFieldEmbedder
from allennlp.modules.input_variational_dropout import InputVariationalDropout
from allennlp.nn import InitializerApplicator, util
from allennlp.tools import squad_eval
from allennlp.training.metrics import Average, BooleanAccuracy, CategoricalAccuracy
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
@Model.register("docqa++BERT")
class DocQAPlusBERT(Model):
"""
This class implements modified version of BiDAF
(with self attention and residual layer, from Clark and Gardner ACL 17 paper) model as used in
Question Answering in Context (EMNLP 2018) paper [https://arxiv.org/pdf/1808.07036.pdf].
In this set-up, a single instance is a dialog, list of question answer pairs.
Parameters
----------
vocab : ``Vocabulary``
text_field_embedder : ``TextFieldEmbedder``
Used to embed the ``question`` and ``passage`` ``TextFields`` we get as input to the model.
phrase_layer : ``Seq2SeqEncoder``
The encoder (with its own internal stacking) that we will use in between embedding tokens
and doing the bidirectional attention.
span_start_encoder : ``Seq2SeqEncoder``
The encoder that we will use to incorporate span start predictions into the passage state
before predicting span end.
span_end_encoder : ``Seq2SeqEncoder``
The encoder that we will use to incorporate span end predictions into the passage state.
dropout : ``float``, optional (default=0.2)
If greater than 0, we will apply dropout with this probability after all encoders (pytorch
LSTMs do not apply dropout to their last layer).
multi_choice_answers: ``bool``,optional (default=False)
If True, dataset is multi-choice answer, and accuracy will be computed accurdigly.
Note that "multichoice_incorrect_answers" must be provided in the dataset.
num_context_answers : ``int``, optional (default=0)
If greater than 0, the model will consider previous question answering context.
max_span_length: ``int``, optional (default=0)
Maximum token length of the output span.
"""
def __init__(self, vocab: Vocabulary,
text_field_embedder: TextFieldEmbedder,
initializer: InitializerApplicator,
dropout: float = 0.2,
multi_choice_answers: int = 0,
frac_of_validation_used: float = 1.0,
frac_of_training_used: float = 1.0,
shared_norm: bool = True,
support_yesno: bool = False,
support_followup: bool = False,
num_context_answers: int = 0,
max_qad_triplets: int = 0,
max_span_length: int = 30,
predictions_file = None,
use_multi_label_loss: bool = False,
stats_report_freq:float = None,
debug_experiment_name:str = None) -> None:
super().__init__(vocab)
self._num_context_answers = num_context_answers
self._multi_choice_answers = multi_choice_answers
self._support_yesno = support_yesno
self._support_followup = support_followup
self._max_span_length = max_span_length
self._text_field_embedder = text_field_embedder
self._shared_norm = shared_norm
self._stats_report_freq = stats_report_freq
self._debug_experiment_name = debug_experiment_name
self._use_multi_label_loss = use_multi_label_loss
self._predictions_file = predictions_file
if predictions_file is not None and os.path.isfile(predictions_file):
os.remove(predictions_file)
# see usage below for explanation
self._all_qa_count = 0
self._qas_used_fraction = 1.0
self._max_qad_triplets = max_qad_triplets
self._frac_of_validation_used = frac_of_validation_used
self._frac_of_training_used = frac_of_training_used
self.qa_outputs = torch.nn.Linear(self._text_field_embedder.get_output_dim(), 2)
initializer(self)
self._span_start_accuracy = CategoricalAccuracy()
self._span_end_accuracy = CategoricalAccuracy()
self._span_accuracy = BooleanAccuracy()
if self._multi_choice_answers:
self._multichoice_accuracy = BooleanAccuracy()
self._official_f1 = Average()
self._official_EM = Average()
self._variational_dropout = InputVariationalDropout(dropout)
def multi_label_cross_entropy_loss(self, span_logits, answers, passage_length):
instances_with_answer = np.argwhere(answers.squeeze().cpu() >= 0)[0].unique()
target = torch.cuda.FloatTensor(len(instances_with_answer), passage_length, device=span_logits.device) \
if torch.cuda.is_available() else torch.FloatTensor(len(instances_with_answer), passage_length)
target.zero_()
answers = answers[instances_with_answer].squeeze().cpu() if len(instances_with_answer)>1 \
else answers[instances_with_answer].cpu()
for ind, q_target in enumerate(answers):
target[ind, q_target[(q_target >= 0) & (q_target < passage_length)]] = 1.0
return -(torch.log((F.softmax(span_logits[instances_with_answer], dim=-1) * \
target.float()).sum(dim=1))).mean()
def shared_norm_cross_entropy_loss(self, span_logits, answers, passage_length):
target = torch.cuda.FloatTensor(1, passage_length * answers.size(0), device=span_logits.device) \
if torch.cuda.is_available() else torch.FloatTensor(1, passage_length * answers.size(0))
target.zero_()
answers = answers.squeeze().cpu() if len(answers) > 1 else answers.cpu()
for ind, q_target in enumerate(answers):
if len(np.argwhere(q_target.squeeze() >= 0))>0 :
target[0, q_target[(q_target >= 0) & (q_target < passage_length)] + passage_length * ind] = 1.0
return -(torch.log((F.softmax(torch.cat(tuple(span_logits)), dim=-1) * target.float()).sum(dim=1))).mean()
def forward(self, # type: ignore
question: Dict[str, torch.LongTensor],
passage: Dict[str, torch.LongTensor],
span_start: torch.IntTensor = None,
span_end: torch.IntTensor = None,
metadata: List[Dict[str, Any]] = None) -> Dict[str, torch.Tensor]:
# pylint: disable=arguments-differ
"""
Parameters
----------
question : Dict[str, torch.LongTensor]
From a ``TextField``.
passage : Dict[str, torch.LongTensor]
From a ``TextField``. The model assumes that this passage contains the answer to the
question, and predicts the beginning and ending positions of the answer within the
passage.
span_start : ``torch.IntTensor``, optional
From an ``IndexField``. This is one of the things we are trying to predict - the
beginning position of the answer with the passage. This is an `inclusive` token index.
If this is given, we will compute a loss that gets included in the output dictionary.
span_end : ``torch.IntTensor``, optional
From an ``IndexField``. This is one of the things we are trying to predict - the
ending position of the answer with the passage. This is an `inclusive` token index.
If this is given, we will compute a loss that gets included in the output dictionary.
metadata : ``List[Dict[str, Any]]``, optional
If present, this should contain the question ID, original passage text, and token
offsets into the passage for each instance in the batch. We use this for computing
official metrics using the official SQuAD evaluation script. The length of this list
should be the batch size, and each dictionary should have the keys ``id``,
``original_passage``, and ``token_offsets``. If you only want the best span string and
don't care about official metrics, you can omit the ``id`` key.
Returns
-------
An output dictionary consisting of the followings.
Each of the followings is a nested list because first iterates over dialog, then questions in dialog.
qid : List[List[str]]
A list of list, consisting of question ids.
best_span_str : List[List[str]]
If sufficient metadata was provided for the instances in the batch, we also return the
string from the original passage that the model thinks is the best answer to the
question.
loss : torch.FloatTensor, optional
A scalar loss to be optimised.
"""
batch_size, num_of_passage_tokens = passage['bert'].size()
if random.randint(1, 5) % 5 == 0:
for meta in metadata:
logger.info("%s %s", meta['dataset'], meta['question_id'])
embedded_passage = self._text_field_embedder(passage)
passage_length = embedded_passage.size(1)
logits = self.qa_outputs(embedded_passage)
start_logits, end_logits = logits.split(1, dim=-1)
span_start_logits = start_logits.squeeze(-1)
span_end_logits = end_logits.squeeze(-1)
passage_mask = util.get_text_field_mask(passage).float()
repeated_passage_mask = passage_mask.unsqueeze(1).repeat(1, 1, 1)
repeated_passage_mask = repeated_passage_mask.view(batch_size, passage_length)
span_start_logits = util.replace_masked_values(span_start_logits, repeated_passage_mask, -1e7)
span_end_logits = util.replace_masked_values(span_end_logits, repeated_passage_mask, -1e7)
best_span = self._get_example_predications(span_start_logits, span_end_logits,self._max_span_length)
output_dict: Dict[str, Any] = {}
# Fraction of Examples Used. (for True accuracy calculations)
# NOTE (TODO) this is a workaround, we cannot save global information to be passed to the model yet
# (see https://github.com/allenai/allennlp/issues/1809) so we will save it every time it changes
# insuring that if we do a full pass on the validation set and take max for all_qa_count we will
# get the correct number (except if the last ones are skipped.... hopefully this is a small diff )
intances_question_id = [insta_meta['question_id'] for insta_meta in metadata]
question_instances_split_inds = np.cumsum(np.unique(intances_question_id, return_counts=True)[1])[:-1]
per_question_inds = np.split(range(batch_size), question_instances_split_inds)
metadata = np.split(metadata, question_instances_split_inds)
self._qas_used_fraction = metadata[0][0]['qas_used_fraction']
# Compute the loss.
if span_start is not None and len(np.argwhere(span_start.squeeze().cpu() >= 0)) > 0:
if self._shared_norm:
loss = 0
loss_steps = 0
# For every context/question
for question_inds, metadata_list in zip(per_question_inds,metadata):
# Could of wrote this shorter but it's clearer like this ...
if len(question_inds)==0:
continue
inds_with_gold_answer = np.argwhere(span_start.view(-1)[question_inds].cpu().numpy() >= 0)
inds_with_gold_answer = inds_with_gold_answer.squeeze() if len(
inds_with_gold_answer) > 1 else inds_with_gold_answer
if len(inds_with_gold_answer)==0:
continue
if self._use_multi_label_loss:
try:
loss += self.shared_norm_cross_entropy_loss(span_start_logits[question_inds], \
span_start[question_inds], passage_length)
loss += self.shared_norm_cross_entropy_loss(span_end_logits[question_inds], \
span_end[question_inds], passage_length)
except:
ElasticLogger().write_log('INFO', 'Loss Error', \
context_dict={'span_start_logits': span_start_logits[question_inds].cpu().size(),
'span_end_logits_size': span_end_logits[question_inds].cpu().size(),
'span_start': span_start[question_inds].squeeze().cpu().numpy().tolist(),
'span_end': span_end[question_inds].squeeze().cpu().numpy().tolist(),
'error_message': traceback.format_exc(),
'batch_size': batch_size,
'passage_length': passage_length}, print_log=True)
a = torch.autograd.Variable(torch.Tensor([[1, 2], [3, 4]]), requires_grad=True)
loss = torch.sum(a ** 2)
output_dict["loss"] = loss
else:
span_start_logits_softmaxed = util.masked_log_softmax(\
torch.cat(tuple(span_start_logits[question_inds])).unsqueeze(0), \
torch.cat(tuple(repeated_passage_mask[question_inds])).unsqueeze(0))
span_end_logits_softmaxed = util.masked_log_softmax(
torch.cat(tuple(span_end_logits[question_inds])).unsqueeze(0), \
torch.cat(tuple(repeated_passage_mask[question_inds])).unsqueeze(0))
span_start_logits_softmaxed = span_start_logits_softmaxed.reshape(len(question_inds),span_start_logits.size(1))
span_end_logits_softmaxed = span_end_logits_softmaxed.reshape(len(question_inds), span_start_logits.size(1))
# computing loss only for indexes with answers
loss += nll_loss(span_start_logits_softmaxed[inds_with_gold_answer], \
span_start.view(-1)[question_inds[inds_with_gold_answer]], ignore_index=-1)
loss += nll_loss(span_end_logits_softmaxed[inds_with_gold_answer], \
span_end.view(-1)[question_inds[inds_with_gold_answer]], ignore_index=-1)
loss_steps += 1
if loss_steps > 0:
loss /= loss_steps
output_dict["loss"] = loss
else:
# Per instance loss
if self._use_multi_label_loss:
try:
loss = self.multi_label_cross_entropy_loss(span_start_logits, span_start, passage_length)
loss += self.multi_label_cross_entropy_loss(span_end_logits, span_end, passage_length)
output_dict["loss"] = loss
except:
ElasticLogger().write_log('INFO', 'Loss Error', context_dict={'span_start_logits':span_start_logits.cpu().size(),
'span_end_logits_size':span_end_logits.cpu().size(),'span_start':span_start.squeeze().cpu().numpy().tolist(),
'span_end':span_end.squeeze().cpu().numpy().tolist(),'error_message': traceback.format_exc(),
'batch_size':batch_size, 'passage_length':passage_length},print_log=True)
a = torch.autograd.Variable(torch.Tensor([[1, 2], [3, 4]]), requires_grad=True)
loss = torch.sum(a ** 2)
output_dict["loss"] = loss
else:
inds_with_gold_answer = np.argwhere(span_start.view(-1).cpu().numpy() >= 0)
inds_with_gold_answer = inds_with_gold_answer.squeeze() if len(inds_with_gold_answer) > 1 else inds_with_gold_answer
if len(inds_with_gold_answer)>0:
loss = nll_loss(util.masked_log_softmax(span_start_logits[inds_with_gold_answer], \
repeated_passage_mask[inds_with_gold_answer]),\
span_start.view(-1)[inds_with_gold_answer], ignore_index=-1)
loss += nll_loss(util.masked_log_softmax(span_end_logits[inds_with_gold_answer], \
repeated_passage_mask[inds_with_gold_answer]),\
span_end.view(-1)[inds_with_gold_answer], ignore_index=-1)
output_dict["loss"] = loss
# TODO these are not updates
#self._span_start_accuracy(span_start_logits, span_start.view(-1))
#self._span_end_accuracy(span_end_logits, span_end.view(-1))
#self._span_accuracy(best_span[:, 0:2],torch.stack([span_start, span_end], -1).view(total_qa_count, 2))
# TODO: This is a patch, for dev question with no answer token found,
# but we would like to see if we still get F1 score for it...
# so in evaluation our loss is not Accurate! (however the question with no answer tokens will
# remain the same number so it is relatively accuracy)
if 'loss' not in output_dict:
if not self.training:
output_dict["loss"] = torch.cuda.FloatTensor([0], device=span_end_logits.device) \
if torch.cuda.is_available() else torch.FloatTensor([0])
else:
output_dict["loss"] = torch.tensor([[1.]], requires_grad=True,device=span_end_logits.device) \
if torch.cuda.is_available() else torch.tensor([[1.]], requires_grad=True)
# support for multi choice answers:
# TODO this does not handle prediction mode at all .....
# we iterate over document that do not contain the golden answer for validation and test setup.
span_start_logits_numpy = span_start_logits.data.cpu().numpy()
span_end_logits_numpy = span_end_logits.data.cpu().numpy()
# Compute F1 and preparing the output dictionary.
output_dict['best_span_str'] = []
output_dict['qid'] = []
## TODO UGLY PATCH FOR TESTING
#new_metadata = []
#for question_meta in metadata:
# new_metadata += [question_meta for i in range(num_of_docs)]
#metadata = new_metadata
# best_span is a vector of more than one span
best_span_cpu = best_span.detach().cpu().numpy()
# Iterating over every question (which may contain multiple instances, one per document)
for question_inds, question_instances_metadata in zip(per_question_inds, metadata):
if len(question_inds) == 0:
continue
# We need to perform softmax here !!
best_span_ind = np.argmax(span_start_logits_numpy[question_inds, best_span_cpu[question_inds][:, 0]] +
span_end_logits_numpy[question_inds, best_span_cpu[question_inds][:, 1]])
best_span_logit = np.max(span_start_logits_numpy[question_inds, best_span_cpu[question_inds][:, 0]] +
span_end_logits_numpy[question_inds, best_span_cpu[question_inds][:, 1]])
# TODO this shouldent happen - we should consider spans from passages not taken...
#if span_start.view(-1)[question_inds[best_span_ind]] == -1:
# self._official_f1(100 * 0.0)
# self._official_EM(100 * 0.0)
# continue
passage_str = question_instances_metadata[best_span_ind]['original_passage']
offsets = question_instances_metadata[best_span_ind]['token_offsets']
predicted_span = best_span_cpu[question_inds[best_span_ind]]
start_offset = offsets[predicted_span[0]][0]
end_offset = offsets[predicted_span[1]][1]
best_span_string = passage_str[start_offset:end_offset]
f1_score = 0.0
EM_score = 0.0
gold_answer_texts = question_instances_metadata[best_span_ind]['answer_texts_list']
if gold_answer_texts:
if len(gold_answer_texts) > 1:
t_f1 = []
t_EM = []
for answer_index in range(len(gold_answer_texts)):
idxes = list(range(len(gold_answer_texts)))
refs = [gold_answer_texts[z] for z in idxes]
t_f1.append(squad_eval.metric_max_over_ground_truths(squad_eval.f1_score,best_span_string,refs))
t_EM.append(squad_eval.metric_max_over_ground_truths(squad_eval.exact_match_score,best_span_string,refs))
f1_score = 1.0 * sum(t_f1) / len(t_f1)
EM_score = 1.0 * sum(t_EM) / len(t_EM)
else:
f1_score = squad_eval.metric_max_over_ground_truths(squad_eval.f1_score,best_span_string,gold_answer_texts)
EM_score = squad_eval.metric_max_over_ground_truths(squad_eval.exact_match_score, best_span_string,gold_answer_texts)
self._official_f1(100 * f1_score)
self._official_EM(100 * EM_score)
if self._predictions_file is not None:
with open(self._predictions_file,'a') as f:
f.write(json.dumps({'question_id':question_instances_metadata[best_span_ind]['question_id'], \
'best_span_logit':float(best_span_logit), \
'f1':100 * f1_score,
'EM':100 * EM_score,
'best_span_string':best_span_string,\
'gold_answer_texts':gold_answer_texts, \
'qas_used_fraction':self._qas_used_fraction}) + '\n')
#output_dict['qid'].append(per_dialog_query_id_list)
#output_dict['best_span_str'].append(per_dialog_best_span_list)
return output_dict
@overrides
def decode(self, output_dict: Dict[str, torch.Tensor]) -> Dict[str, Any]:
yesno_tags = [[self.vocab.get_token_from_index(x, namespace="yesno_labels") for x in yn_list] \
for yn_list in output_dict.pop("yesno")]
followup_tags = [[self.vocab.get_token_from_index(x, namespace="followup_labels") for x in followup_list] \
for followup_list in output_dict.pop("followup")]
output_dict['yesno'] = yesno_tags
output_dict['followup'] = followup_tags
return output_dict
def get_metrics(self, reset: bool = False) -> Dict[str, float]:
# calculating final accuracy considering fraction of examples used in dataset creation, and
# number used after data-reader filter namely "self._examples_used_frac"
frac_used = self._qas_used_fraction
return {'EM': self._official_EM.get_metric(reset) * frac_used,
'f1': self._official_f1.get_metric(reset) * frac_used,
'qas_used_fraction': frac_used}
@staticmethod
def _get_example_predications(span_start_logits: torch.Tensor,
span_end_logits: torch.Tensor,
max_span_length: int) -> torch.Tensor:
# Returns the index of highest-scoring span that is not longer than 30 tokens, as well as
# yesno prediction bit and followup prediction bit from the predicted span end token.
if span_start_logits.dim() != 2 or span_end_logits.dim() != 2:
raise ValueError("Input shapes must be (batch_size, passage_length)")
batch_size, passage_length = span_start_logits.size()
max_span_log_prob = [-1e20] * batch_size
span_start_argmax = [0] * batch_size
best_word_span = span_start_logits.new_zeros((batch_size, 4), dtype=torch.long)
span_start_logits = span_start_logits.data.cpu().numpy()
span_end_logits = span_end_logits.data.cpu().numpy()
for b_i in range(batch_size): # pylint: disable=invalid-name
for j in range(passage_length):
val1 = span_start_logits[b_i, span_start_argmax[b_i]]
if val1 < span_start_logits[b_i, j]:
span_start_argmax[b_i] = j
val1 = span_start_logits[b_i, j]
val2 = span_end_logits[b_i, j]
if val1 + val2 > max_span_log_prob[b_i]:
if j - span_start_argmax[b_i] > max_span_length:
continue
best_word_span[b_i, 0] = span_start_argmax[b_i]
best_word_span[b_i, 1] = j
max_span_log_prob[b_i] = val1 + val2
for b_i in range(batch_size):
j = best_word_span[b_i, 1]
return best_word_span
| 55.603448 | 143 | 0.61655 |
cf78d1e4e162b0eba2c68cdacfd3c0943b1855dd | 520 | py | Python | leet/strings/reverseVowels.py | monishshah18/python-cp-cheatsheet | a5514b08816959de1198156f7764c54a7a585f20 | [
"Apache-2.0"
] | 140 | 2020-10-21T13:23:52.000Z | 2022-03-31T15:09:45.000Z | leet/strings/reverseVowels.py | stacykutyepov/python-cp-cheatsheet | a00a57e1b36433648d1cace331e15ff276cef189 | [
"Apache-2.0"
] | 1 | 2021-07-22T14:01:25.000Z | 2021-07-22T14:01:25.000Z | leet/strings/reverseVowels.py | stacykutyepov/python-cp-cheatsheet | a00a57e1b36433648d1cace331e15ff276cef189 | [
"Apache-2.0"
] | 33 | 2020-10-21T14:17:02.000Z | 2022-03-25T11:25:03.000Z | class Solution:
def reverseVowels(self, s: str) -> str:
l, r = 0, len(s) - 1
vowels = set('aeiouAEIOU')
ans = list(s)
while l<r:
if s[l] not in vowels:
l += 1
if s[r] not in vowels:
r -= 1
if s[l] in vowels and s[r] in vowels:
ans[l],ans[r] = ans[r], ans[l]
l += 1
r -= 1
return "".join(ans) | 26 | 62 | 0.334615 |
e3226a7925ab92fa294cf625aad28992b0b51330 | 7,535 | py | Python | fairseq/data/style_transfer_dataset.py | jwcmu/bgt | 39a9ce72df3d89e410ed3791f9f856b05b37ed94 | [
"MIT"
] | 9 | 2020-11-23T23:17:45.000Z | 2022-03-09T07:14:27.000Z | fairseq/data/style_transfer_dataset.py | jwcmu/bgt | 39a9ce72df3d89e410ed3791f9f856b05b37ed94 | [
"MIT"
] | 2 | 2020-12-03T10:41:12.000Z | 2021-04-14T05:35:24.000Z | fairseq/data/style_transfer_dataset.py | jwcmu/bgt | 39a9ce72df3d89e410ed3791f9f856b05b37ed94 | [
"MIT"
] | 1 | 2021-09-10T13:46:51.000Z | 2021-09-10T13:46:51.000Z | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import numpy as np
import torch
from . import data_utils, FairseqDataset
def collate(
samples, pad_idx, eos_idx, left_pad_source=True,
):
if len(samples) == 0:
return {}
def merge(key, left_pad, move_eos_to_beginning=False):
return data_utils.collate_tokens(
[s[key] for s in samples],
pad_idx, eos_idx, left_pad, move_eos_to_beginning,
)
id = torch.LongTensor([s['id'] for s in samples])
src_tokens = merge('source', left_pad=left_pad_source)
# sort by descending source length
src_lengths = torch.LongTensor([s['source'].numel() for s in samples])
src_lengths, sort_order = src_lengths.sort(descending=True)
id = id.index_select(0, sort_order)
src_tokens = src_tokens.index_select(0, sort_order)
target = merge('target', left_pad=left_pad_source)
target = target.index_select(0, sort_order)
ntokens = sum(len(s['target']) for s in samples)
target_lengths = torch.LongTensor([s['target'].numel() for s in samples])
target_lengths = target_lengths.index_select(0, sort_order)
batch = {
'id': id,
'nsentences': len(samples),
'ntokens': ntokens,
'net_input': {
'src_tokens': src_tokens,
'src_lengths': src_lengths,
},
'target': target,
'target_lengths': target_lengths,
}
return batch
class StyleTransferDataset(FairseqDataset):
"""
A pair of torch.utils.data.Datasets.
Args:
src (torch.utils.data.Dataset): source dataset to wrap
src_sizes (List[int]): source sentence lengths
src_dict (~fairseq.data.Dictionary): source vocabulary
tgt (torch.utils.data.Dataset, optional): target dataset to wrap
tgt_sizes (List[int], optional): target sentence lengths
tgt_dict (~fairseq.data.Dictionary, optional): target vocabulary
left_pad_source (bool, optional): pad source tensors on the left side
(default: True).
left_pad_target (bool, optional): pad target tensors on the left side
(default: False).
max_source_positions (int, optional): max number of tokens in the
source sentence (default: 1024).
max_target_positions (int, optional): max number of tokens in the
target sentence (default: 1024).
shuffle (bool, optional): shuffle dataset elements before batching
(default: True).
input_feeding (bool, optional): create a shifted version of the targets
to be passed into the model for teacher forcing (default: True).
remove_eos_from_source (bool, optional): if set, removes eos from end
of source if it's present (default: False).
append_eos_to_target (bool, optional): if set, appends eos to end of
target if it's absent (default: False).
"""
def __init__(
self, src, src_sizes, src_dict,
tgt=None, tgt_sizes=None, tgt_dict=None,
left_pad_source=True,
max_source_positions=1024, max_target_positions=1024,
shuffle=True, remove_eos_from_source=False,
):
if tgt_dict is not None:
assert src_dict.pad() == tgt_dict.pad()
assert src_dict.eos() == tgt_dict.eos()
assert src_dict.unk() == tgt_dict.unk()
self.src = src
self.tgt = tgt
self.src_sizes = np.array(src_sizes)
self.tgt_sizes = np.array(tgt_sizes) if tgt_sizes is not None else None
self.src_dict = src_dict
self.tgt_dict = tgt_dict
self.left_pad_source = left_pad_source
self.max_source_positions = max_source_positions
self.max_target_positions = max_target_positions
self.shuffle = shuffle
self.remove_eos_from_source = remove_eos_from_source
def __getitem__(self, index):
tgt_item = self.tgt[index] if self.tgt is not None else None
src_item = self.src[index]
if self.remove_eos_from_source:
eos = self.src_dict.eos()
if self.src[index][-1] == eos:
src_item = self.src[index][:-1]
if self.tgt[index][-1] == eos:
tgt_item = self.tgt[index][:-1]
return {
'id': index,
'source': src_item,
'target': tgt_item,
}
def __len__(self):
return len(self.src)
def collater(self, samples):
"""Merge a list of samples to form a mini-batch.
Args:
samples (List[dict]): samples to collate
Returns:
dict: a mini-batch with the following keys:
- `id` (LongTensor): example IDs in the original input order
- `ntokens` (int): total number of tokens in the batch
- `net_input` (dict): the input to the Model, containing keys:
- `src_tokens` (LongTensor): a padded 2D Tensor of tokens in
the source sentence of shape `(bsz, src_len)`. Padding will
appear on the left if *left_pad_source* is ``True``.
- `src_lengths` (LongTensor): 1D Tensor of the unpadded
lengths of each source sentence of shape `(bsz)`
- `prev_output_tokens` (LongTensor): a padded 2D Tensor of
tokens in the target sentence, shifted right by one
position for teacher forcing, of shape `(bsz, tgt_len)`.
This key will not be present if *input_feeding* is
``False``. Padding will appear on the left if
*left_pad_target* is ``True``.
- `target` (LongTensor): a padded 2D Tensor of tokens in the
target sentence of shape `(bsz, tgt_len)`. Padding will appear
on the left if *left_pad_target* is ``True``.
"""
return collate(
samples, pad_idx=self.src_dict.pad(), eos_idx=self.src_dict.eos(),
left_pad_source=self.left_pad_source,
)
def num_tokens(self, index):
"""Return the number of tokens in a sample. This value is used to
enforce ``--max-tokens`` during batching."""
return max(self.src_sizes[index], self.tgt_sizes[index] if self.tgt_sizes is not None else 0)
def size(self, index):
"""Return an example's size as a float or tuple. This value is used when
filtering a dataset with ``--max-positions``."""
return (self.src_sizes[index], self.tgt_sizes[index] if self.tgt_sizes is not None else 0)
def ordered_indices(self):
"""Return an ordered list of indices. Batches will be constructed based
on this order."""
if self.shuffle:
indices = np.random.permutation(len(self))
else:
indices = np.arange(len(self))
if self.tgt_sizes is not None:
indices = indices[np.argsort(self.tgt_sizes[indices], kind='mergesort')]
return indices[np.argsort(self.src_sizes[indices], kind='mergesort')]
@property
def supports_prefetch(self):
return (
getattr(self.src, 'supports_prefetch', False)
and (getattr(self.tgt, 'supports_prefetch', False) or self.tgt is None)
)
def prefetch(self, indices):
self.src.prefetch(indices)
self.tgt.prefetch(indices)
| 39.867725 | 101 | 0.61858 |
686966fd386320d47bd1d4502191b10a68a6c121 | 88,995 | py | Python | Lib/test/test_posix.py | rng-dynamics/RustPython | 6165aadcc4e80e0f48f3e784e17b3c7f80d21a8a | [
"CC-BY-4.0",
"MIT"
] | null | null | null | Lib/test/test_posix.py | rng-dynamics/RustPython | 6165aadcc4e80e0f48f3e784e17b3c7f80d21a8a | [
"CC-BY-4.0",
"MIT"
] | null | null | null | Lib/test/test_posix.py | rng-dynamics/RustPython | 6165aadcc4e80e0f48f3e784e17b3c7f80d21a8a | [
"CC-BY-4.0",
"MIT"
] | null | null | null | "Test posix functions"
from test import support
from test.support import os_helper, import_helper, warnings_helper
from test.support.script_helper import assert_python_ok
# Skip these tests if there is no posix module.
posix = import_helper.import_module('posix')
import errno
import sys
import signal
import time
import os
import platform
import pwd
import stat
import tempfile
import unittest
import warnings
import textwrap
_DUMMY_SYMLINK = os.path.join(tempfile.gettempdir(),
os_helper.TESTFN + '-dummy-symlink')
requires_32b = unittest.skipUnless(sys.maxsize < 2**32,
'test is only meaningful on 32-bit builds')
def _supports_sched():
if not hasattr(posix, 'sched_getscheduler'):
return False
try:
posix.sched_getscheduler(0)
except OSError as e:
if e.errno == errno.ENOSYS:
return False
return True
requires_sched = unittest.skipUnless(_supports_sched(), 'requires POSIX scheduler API')
class PosixTester(unittest.TestCase):
def setUp(self):
# create empty file
fp = open(os_helper.TESTFN, 'w+')
fp.close()
self.teardown_files = [ os_helper.TESTFN ]
self._warnings_manager = warnings_helper.check_warnings()
self._warnings_manager.__enter__()
warnings.filterwarnings('ignore', '.* potential security risk .*',
RuntimeWarning)
def tearDown(self):
for teardown_file in self.teardown_files:
os_helper.unlink(teardown_file)
self._warnings_manager.__exit__(None, None, None)
def testNoArgFunctions(self):
# test posix functions which take no arguments and have
# no side-effects which we need to cleanup (e.g., fork, wait, abort)
NO_ARG_FUNCTIONS = [ "ctermid", "getcwd", "getcwdb", "uname",
"times", "getloadavg",
"getegid", "geteuid", "getgid", "getgroups",
"getpid", "getpgrp", "getppid", "getuid", "sync",
]
for name in NO_ARG_FUNCTIONS:
posix_func = getattr(posix, name, None)
if posix_func is not None:
posix_func()
self.assertRaises(TypeError, posix_func, 1)
@unittest.skipUnless(hasattr(posix, 'getresuid'),
'test needs posix.getresuid()')
def test_getresuid(self):
user_ids = posix.getresuid()
self.assertEqual(len(user_ids), 3)
for val in user_ids:
self.assertGreaterEqual(val, 0)
@unittest.skipUnless(hasattr(posix, 'getresgid'),
'test needs posix.getresgid()')
def test_getresgid(self):
group_ids = posix.getresgid()
self.assertEqual(len(group_ids), 3)
for val in group_ids:
self.assertGreaterEqual(val, 0)
@unittest.skipUnless(hasattr(posix, 'setresuid'),
'test needs posix.setresuid()')
def test_setresuid(self):
current_user_ids = posix.getresuid()
self.assertIsNone(posix.setresuid(*current_user_ids))
# -1 means don't change that value.
self.assertIsNone(posix.setresuid(-1, -1, -1))
@unittest.skipUnless(hasattr(posix, 'setresuid'),
'test needs posix.setresuid()')
def test_setresuid_exception(self):
# Don't do this test if someone is silly enough to run us as root.
current_user_ids = posix.getresuid()
if 0 not in current_user_ids:
new_user_ids = (current_user_ids[0]+1, -1, -1)
self.assertRaises(OSError, posix.setresuid, *new_user_ids)
@unittest.skipUnless(hasattr(posix, 'setresgid'),
'test needs posix.setresgid()')
def test_setresgid(self):
current_group_ids = posix.getresgid()
self.assertIsNone(posix.setresgid(*current_group_ids))
# -1 means don't change that value.
self.assertIsNone(posix.setresgid(-1, -1, -1))
@unittest.skipUnless(hasattr(posix, 'setresgid'),
'test needs posix.setresgid()')
def test_setresgid_exception(self):
# Don't do this test if someone is silly enough to run us as root.
current_group_ids = posix.getresgid()
if 0 not in current_group_ids:
new_group_ids = (current_group_ids[0]+1, -1, -1)
self.assertRaises(OSError, posix.setresgid, *new_group_ids)
@unittest.skipUnless(hasattr(posix, 'initgroups'),
"test needs os.initgroups()")
def test_initgroups(self):
# It takes a string and an integer; check that it raises a TypeError
# for other argument lists.
self.assertRaises(TypeError, posix.initgroups)
self.assertRaises(TypeError, posix.initgroups, None)
self.assertRaises(TypeError, posix.initgroups, 3, "foo")
self.assertRaises(TypeError, posix.initgroups, "foo", 3, object())
# If a non-privileged user invokes it, it should fail with OSError
# EPERM.
if os.getuid() != 0:
try:
name = pwd.getpwuid(posix.getuid()).pw_name
except KeyError:
# the current UID may not have a pwd entry
raise unittest.SkipTest("need a pwd entry")
try:
posix.initgroups(name, 13)
except OSError as e:
self.assertEqual(e.errno, errno.EPERM)
else:
self.fail("Expected OSError to be raised by initgroups")
@unittest.skipUnless(hasattr(posix, 'statvfs'),
'test needs posix.statvfs()')
def test_statvfs(self):
self.assertTrue(posix.statvfs(os.curdir))
@unittest.skipUnless(hasattr(posix, 'fstatvfs'),
'test needs posix.fstatvfs()')
def test_fstatvfs(self):
fp = open(os_helper.TESTFN)
try:
self.assertTrue(posix.fstatvfs(fp.fileno()))
self.assertTrue(posix.statvfs(fp.fileno()))
finally:
fp.close()
@unittest.skipUnless(hasattr(posix, 'ftruncate'),
'test needs posix.ftruncate()')
def test_ftruncate(self):
fp = open(os_helper.TESTFN, 'w+')
try:
# we need to have some data to truncate
fp.write('test')
fp.flush()
posix.ftruncate(fp.fileno(), 0)
finally:
fp.close()
@unittest.skipUnless(hasattr(posix, 'truncate'), "test needs posix.truncate()")
def test_truncate(self):
with open(os_helper.TESTFN, 'w') as fp:
fp.write('test')
fp.flush()
posix.truncate(os_helper.TESTFN, 0)
@unittest.skipUnless(getattr(os, 'execve', None) in os.supports_fd, "test needs execve() to support the fd parameter")
@unittest.skipUnless(hasattr(os, 'fork'), "test needs os.fork()")
def test_fexecve(self):
fp = os.open(sys.executable, os.O_RDONLY)
try:
pid = os.fork()
if pid == 0:
os.chdir(os.path.split(sys.executable)[0])
posix.execve(fp, [sys.executable, '-c', 'pass'], os.environ)
else:
support.wait_process(pid, exitcode=0)
finally:
os.close(fp)
@unittest.skipUnless(hasattr(posix, 'waitid'), "test needs posix.waitid()")
@unittest.skipUnless(hasattr(os, 'fork'), "test needs os.fork()")
def test_waitid(self):
pid = os.fork()
if pid == 0:
os.chdir(os.path.split(sys.executable)[0])
posix.execve(sys.executable, [sys.executable, '-c', 'pass'], os.environ)
else:
res = posix.waitid(posix.P_PID, pid, posix.WEXITED)
self.assertEqual(pid, res.si_pid)
@unittest.skipUnless(hasattr(os, 'fork'), "test needs os.fork()")
def test_register_at_fork(self):
with self.assertRaises(TypeError, msg="Positional args not allowed"):
os.register_at_fork(lambda: None)
with self.assertRaises(TypeError, msg="Args must be callable"):
os.register_at_fork(before=2)
with self.assertRaises(TypeError, msg="Args must be callable"):
os.register_at_fork(after_in_child="three")
with self.assertRaises(TypeError, msg="Args must be callable"):
os.register_at_fork(after_in_parent=b"Five")
with self.assertRaises(TypeError, msg="Args must not be None"):
os.register_at_fork(before=None)
with self.assertRaises(TypeError, msg="Args must not be None"):
os.register_at_fork(after_in_child=None)
with self.assertRaises(TypeError, msg="Args must not be None"):
os.register_at_fork(after_in_parent=None)
with self.assertRaises(TypeError, msg="Invalid arg was allowed"):
# Ensure a combination of valid and invalid is an error.
os.register_at_fork(before=None, after_in_parent=lambda: 3)
with self.assertRaises(TypeError, msg="Invalid arg was allowed"):
# Ensure a combination of valid and invalid is an error.
os.register_at_fork(before=lambda: None, after_in_child='')
# We test actual registrations in their own process so as not to
# pollute this one. There is no way to unregister for cleanup.
code = """if 1:
import os
r, w = os.pipe()
fin_r, fin_w = os.pipe()
os.register_at_fork(before=lambda: os.write(w, b'A'))
os.register_at_fork(after_in_parent=lambda: os.write(w, b'C'))
os.register_at_fork(after_in_child=lambda: os.write(w, b'E'))
os.register_at_fork(before=lambda: os.write(w, b'B'),
after_in_parent=lambda: os.write(w, b'D'),
after_in_child=lambda: os.write(w, b'F'))
pid = os.fork()
if pid == 0:
# At this point, after-forkers have already been executed
os.close(w)
# Wait for parent to tell us to exit
os.read(fin_r, 1)
os._exit(0)
else:
try:
os.close(w)
with open(r, "rb") as f:
data = f.read()
assert len(data) == 6, data
# Check before-fork callbacks
assert data[:2] == b'BA', data
# Check after-fork callbacks
assert sorted(data[2:]) == list(b'CDEF'), data
assert data.index(b'C') < data.index(b'D'), data
assert data.index(b'E') < data.index(b'F'), data
finally:
os.write(fin_w, b'!')
"""
assert_python_ok('-c', code)
@unittest.skipUnless(hasattr(posix, 'lockf'), "test needs posix.lockf()")
def test_lockf(self):
fd = os.open(os_helper.TESTFN, os.O_WRONLY | os.O_CREAT)
try:
os.write(fd, b'test')
os.lseek(fd, 0, os.SEEK_SET)
posix.lockf(fd, posix.F_LOCK, 4)
# section is locked
posix.lockf(fd, posix.F_ULOCK, 4)
finally:
os.close(fd)
@unittest.skipUnless(hasattr(posix, 'pread'), "test needs posix.pread()")
def test_pread(self):
fd = os.open(os_helper.TESTFN, os.O_RDWR | os.O_CREAT)
try:
os.write(fd, b'test')
os.lseek(fd, 0, os.SEEK_SET)
self.assertEqual(b'es', posix.pread(fd, 2, 1))
# the first pread() shouldn't disturb the file offset
self.assertEqual(b'te', posix.read(fd, 2))
finally:
os.close(fd)
@unittest.skipUnless(hasattr(posix, 'preadv'), "test needs posix.preadv()")
def test_preadv(self):
fd = os.open(os_helper.TESTFN, os.O_RDWR | os.O_CREAT)
try:
os.write(fd, b'test1tt2t3t5t6t6t8')
buf = [bytearray(i) for i in [5, 3, 2]]
self.assertEqual(posix.preadv(fd, buf, 3), 10)
self.assertEqual([b't1tt2', b't3t', b'5t'], list(buf))
finally:
os.close(fd)
@unittest.skipUnless(hasattr(posix, 'preadv'), "test needs posix.preadv()")
@unittest.skipUnless(hasattr(posix, 'RWF_HIPRI'), "test needs posix.RWF_HIPRI")
def test_preadv_flags(self):
fd = os.open(os_helper.TESTFN, os.O_RDWR | os.O_CREAT)
try:
os.write(fd, b'test1tt2t3t5t6t6t8')
buf = [bytearray(i) for i in [5, 3, 2]]
self.assertEqual(posix.preadv(fd, buf, 3, os.RWF_HIPRI), 10)
self.assertEqual([b't1tt2', b't3t', b'5t'], list(buf))
except NotImplementedError:
self.skipTest("preadv2 not available")
except OSError as inst:
# Is possible that the macro RWF_HIPRI was defined at compilation time
# but the option is not supported by the kernel or the runtime libc shared
# library.
if inst.errno in {errno.EINVAL, errno.ENOTSUP}:
raise unittest.SkipTest("RWF_HIPRI is not supported by the current system")
else:
raise
finally:
os.close(fd)
@unittest.skipUnless(hasattr(posix, 'preadv'), "test needs posix.preadv()")
@requires_32b
def test_preadv_overflow_32bits(self):
fd = os.open(os_helper.TESTFN, os.O_RDWR | os.O_CREAT)
try:
buf = [bytearray(2**16)] * 2**15
with self.assertRaises(OSError) as cm:
os.preadv(fd, buf, 0)
self.assertEqual(cm.exception.errno, errno.EINVAL)
self.assertEqual(bytes(buf[0]), b'\0'* 2**16)
finally:
os.close(fd)
@unittest.skipUnless(hasattr(posix, 'pwrite'), "test needs posix.pwrite()")
def test_pwrite(self):
fd = os.open(os_helper.TESTFN, os.O_RDWR | os.O_CREAT)
try:
os.write(fd, b'test')
os.lseek(fd, 0, os.SEEK_SET)
posix.pwrite(fd, b'xx', 1)
self.assertEqual(b'txxt', posix.read(fd, 4))
finally:
os.close(fd)
@unittest.skipUnless(hasattr(posix, 'pwritev'), "test needs posix.pwritev()")
def test_pwritev(self):
fd = os.open(os_helper.TESTFN, os.O_RDWR | os.O_CREAT)
try:
os.write(fd, b"xx")
os.lseek(fd, 0, os.SEEK_SET)
n = os.pwritev(fd, [b'test1', b'tt2', b't3'], 2)
self.assertEqual(n, 10)
os.lseek(fd, 0, os.SEEK_SET)
self.assertEqual(b'xxtest1tt2t3', posix.read(fd, 100))
finally:
os.close(fd)
@unittest.skipUnless(hasattr(posix, 'pwritev'), "test needs posix.pwritev()")
@unittest.skipUnless(hasattr(posix, 'os.RWF_SYNC'), "test needs os.RWF_SYNC")
def test_pwritev_flags(self):
fd = os.open(os_helper.TESTFN, os.O_RDWR | os.O_CREAT)
try:
os.write(fd,b"xx")
os.lseek(fd, 0, os.SEEK_SET)
n = os.pwritev(fd, [b'test1', b'tt2', b't3'], 2, os.RWF_SYNC)
self.assertEqual(n, 10)
os.lseek(fd, 0, os.SEEK_SET)
self.assertEqual(b'xxtest1tt2', posix.read(fd, 100))
finally:
os.close(fd)
@unittest.skipUnless(hasattr(posix, 'pwritev'), "test needs posix.pwritev()")
@requires_32b
def test_pwritev_overflow_32bits(self):
fd = os.open(os_helper.TESTFN, os.O_RDWR | os.O_CREAT)
try:
with self.assertRaises(OSError) as cm:
os.pwritev(fd, [b"x" * 2**16] * 2**15, 0)
self.assertEqual(cm.exception.errno, errno.EINVAL)
finally:
os.close(fd)
@unittest.skipUnless(hasattr(posix, 'posix_fallocate'),
"test needs posix.posix_fallocate()")
def test_posix_fallocate(self):
fd = os.open(os_helper.TESTFN, os.O_WRONLY | os.O_CREAT)
try:
posix.posix_fallocate(fd, 0, 10)
except OSError as inst:
# issue10812, ZFS doesn't appear to support posix_fallocate,
# so skip Solaris-based since they are likely to have ZFS.
# issue33655: Also ignore EINVAL on *BSD since ZFS is also
# often used there.
if inst.errno == errno.EINVAL and sys.platform.startswith(
('sunos', 'freebsd', 'netbsd', 'openbsd', 'gnukfreebsd')):
raise unittest.SkipTest("test may fail on ZFS filesystems")
else:
raise
finally:
os.close(fd)
# issue31106 - posix_fallocate() does not set error in errno.
@unittest.skipUnless(hasattr(posix, 'posix_fallocate'),
"test needs posix.posix_fallocate()")
def test_posix_fallocate_errno(self):
try:
posix.posix_fallocate(-42, 0, 10)
except OSError as inst:
if inst.errno != errno.EBADF:
raise
@unittest.skipUnless(hasattr(posix, 'posix_fadvise'),
"test needs posix.posix_fadvise()")
def test_posix_fadvise(self):
fd = os.open(os_helper.TESTFN, os.O_RDONLY)
try:
posix.posix_fadvise(fd, 0, 0, posix.POSIX_FADV_WILLNEED)
finally:
os.close(fd)
@unittest.skipUnless(hasattr(posix, 'posix_fadvise'),
"test needs posix.posix_fadvise()")
def test_posix_fadvise_errno(self):
try:
posix.posix_fadvise(-42, 0, 0, posix.POSIX_FADV_WILLNEED)
except OSError as inst:
if inst.errno != errno.EBADF:
raise
@unittest.skipUnless(os.utime in os.supports_fd, "test needs fd support in os.utime")
def test_utime_with_fd(self):
now = time.time()
fd = os.open(os_helper.TESTFN, os.O_RDONLY)
try:
posix.utime(fd)
posix.utime(fd, None)
self.assertRaises(TypeError, posix.utime, fd, (None, None))
self.assertRaises(TypeError, posix.utime, fd, (now, None))
self.assertRaises(TypeError, posix.utime, fd, (None, now))
posix.utime(fd, (int(now), int(now)))
posix.utime(fd, (now, now))
self.assertRaises(ValueError, posix.utime, fd, (now, now), ns=(now, now))
self.assertRaises(ValueError, posix.utime, fd, (now, 0), ns=(None, None))
self.assertRaises(ValueError, posix.utime, fd, (None, None), ns=(now, 0))
posix.utime(fd, (int(now), int((now - int(now)) * 1e9)))
posix.utime(fd, ns=(int(now), int((now - int(now)) * 1e9)))
finally:
os.close(fd)
@unittest.skipUnless(os.utime in os.supports_follow_symlinks, "test needs follow_symlinks support in os.utime")
def test_utime_nofollow_symlinks(self):
now = time.time()
posix.utime(os_helper.TESTFN, None, follow_symlinks=False)
self.assertRaises(TypeError, posix.utime, os_helper.TESTFN, (None, None), follow_symlinks=False)
self.assertRaises(TypeError, posix.utime, os_helper.TESTFN, (now, None), follow_symlinks=False)
self.assertRaises(TypeError, posix.utime, os_helper.TESTFN, (None, now), follow_symlinks=False)
posix.utime(os_helper.TESTFN, (int(now), int(now)), follow_symlinks=False)
posix.utime(os_helper.TESTFN, (now, now), follow_symlinks=False)
posix.utime(os_helper.TESTFN, follow_symlinks=False)
@unittest.skipUnless(hasattr(posix, 'writev'), "test needs posix.writev()")
def test_writev(self):
fd = os.open(os_helper.TESTFN, os.O_RDWR | os.O_CREAT)
try:
n = os.writev(fd, (b'test1', b'tt2', b't3'))
self.assertEqual(n, 10)
os.lseek(fd, 0, os.SEEK_SET)
self.assertEqual(b'test1tt2t3', posix.read(fd, 10))
# Issue #20113: empty list of buffers should not crash
try:
size = posix.writev(fd, [])
except OSError:
# writev(fd, []) raises OSError(22, "Invalid argument")
# on OpenIndiana
pass
else:
self.assertEqual(size, 0)
finally:
os.close(fd)
@unittest.skipUnless(hasattr(posix, 'writev'), "test needs posix.writev()")
@requires_32b
def test_writev_overflow_32bits(self):
fd = os.open(os_helper.TESTFN, os.O_RDWR | os.O_CREAT)
try:
with self.assertRaises(OSError) as cm:
os.writev(fd, [b"x" * 2**16] * 2**15)
self.assertEqual(cm.exception.errno, errno.EINVAL)
finally:
os.close(fd)
@unittest.skipUnless(hasattr(posix, 'readv'), "test needs posix.readv()")
def test_readv(self):
fd = os.open(os_helper.TESTFN, os.O_RDWR | os.O_CREAT)
try:
os.write(fd, b'test1tt2t3')
os.lseek(fd, 0, os.SEEK_SET)
buf = [bytearray(i) for i in [5, 3, 2]]
self.assertEqual(posix.readv(fd, buf), 10)
self.assertEqual([b'test1', b'tt2', b't3'], [bytes(i) for i in buf])
# Issue #20113: empty list of buffers should not crash
try:
size = posix.readv(fd, [])
except OSError:
# readv(fd, []) raises OSError(22, "Invalid argument")
# on OpenIndiana
pass
else:
self.assertEqual(size, 0)
finally:
os.close(fd)
@unittest.skipUnless(hasattr(posix, 'readv'), "test needs posix.readv()")
@requires_32b
def test_readv_overflow_32bits(self):
fd = os.open(os_helper.TESTFN, os.O_RDWR | os.O_CREAT)
try:
buf = [bytearray(2**16)] * 2**15
with self.assertRaises(OSError) as cm:
os.readv(fd, buf)
self.assertEqual(cm.exception.errno, errno.EINVAL)
self.assertEqual(bytes(buf[0]), b'\0'* 2**16)
finally:
os.close(fd)
@unittest.skipUnless(hasattr(posix, 'dup'),
'test needs posix.dup()')
def test_dup(self):
fp = open(os_helper.TESTFN)
try:
fd = posix.dup(fp.fileno())
self.assertIsInstance(fd, int)
os.close(fd)
finally:
fp.close()
@unittest.skipUnless(hasattr(posix, 'confstr'),
'test needs posix.confstr()')
def test_confstr(self):
self.assertRaises(ValueError, posix.confstr, "CS_garbage")
self.assertEqual(len(posix.confstr("CS_PATH")) > 0, True)
@unittest.skipUnless(hasattr(posix, 'dup2'),
'test needs posix.dup2()')
def test_dup2(self):
fp1 = open(os_helper.TESTFN)
fp2 = open(os_helper.TESTFN)
try:
posix.dup2(fp1.fileno(), fp2.fileno())
finally:
fp1.close()
fp2.close()
@unittest.skipUnless(hasattr(os, 'O_CLOEXEC'), "needs os.O_CLOEXEC")
@support.requires_linux_version(2, 6, 23)
def test_oscloexec(self):
fd = os.open(os_helper.TESTFN, os.O_RDONLY|os.O_CLOEXEC)
self.addCleanup(os.close, fd)
self.assertFalse(os.get_inheritable(fd))
@unittest.skipUnless(hasattr(posix, 'O_EXLOCK'),
'test needs posix.O_EXLOCK')
def test_osexlock(self):
fd = os.open(os_helper.TESTFN,
os.O_WRONLY|os.O_EXLOCK|os.O_CREAT)
self.assertRaises(OSError, os.open, os_helper.TESTFN,
os.O_WRONLY|os.O_EXLOCK|os.O_NONBLOCK)
os.close(fd)
if hasattr(posix, "O_SHLOCK"):
fd = os.open(os_helper.TESTFN,
os.O_WRONLY|os.O_SHLOCK|os.O_CREAT)
self.assertRaises(OSError, os.open, os_helper.TESTFN,
os.O_WRONLY|os.O_EXLOCK|os.O_NONBLOCK)
os.close(fd)
@unittest.skipUnless(hasattr(posix, 'O_SHLOCK'),
'test needs posix.O_SHLOCK')
def test_osshlock(self):
fd1 = os.open(os_helper.TESTFN,
os.O_WRONLY|os.O_SHLOCK|os.O_CREAT)
fd2 = os.open(os_helper.TESTFN,
os.O_WRONLY|os.O_SHLOCK|os.O_CREAT)
os.close(fd2)
os.close(fd1)
if hasattr(posix, "O_EXLOCK"):
fd = os.open(os_helper.TESTFN,
os.O_WRONLY|os.O_SHLOCK|os.O_CREAT)
self.assertRaises(OSError, os.open, os_helper.TESTFN,
os.O_RDONLY|os.O_EXLOCK|os.O_NONBLOCK)
os.close(fd)
@unittest.skipUnless(hasattr(posix, 'fstat'),
'test needs posix.fstat()')
def test_fstat(self):
fp = open(os_helper.TESTFN)
try:
self.assertTrue(posix.fstat(fp.fileno()))
self.assertTrue(posix.stat(fp.fileno()))
self.assertRaisesRegex(TypeError,
'should be string, bytes, os.PathLike or integer, not',
posix.stat, float(fp.fileno()))
finally:
fp.close()
# TODO: RUSTPYTHON: AssertionError: DeprecationWarning not triggered by stat
@unittest.expectedFailure
def test_stat(self):
self.assertTrue(posix.stat(os_helper.TESTFN))
self.assertTrue(posix.stat(os.fsencode(os_helper.TESTFN)))
self.assertWarnsRegex(DeprecationWarning,
'should be string, bytes, os.PathLike or integer, not',
posix.stat, bytearray(os.fsencode(os_helper.TESTFN)))
self.assertRaisesRegex(TypeError,
'should be string, bytes, os.PathLike or integer, not',
posix.stat, None)
self.assertRaisesRegex(TypeError,
'should be string, bytes, os.PathLike or integer, not',
posix.stat, list(os_helper.TESTFN))
self.assertRaisesRegex(TypeError,
'should be string, bytes, os.PathLike or integer, not',
posix.stat, list(os.fsencode(os_helper.TESTFN)))
@unittest.skipUnless(hasattr(posix, 'mkfifo'), "don't have mkfifo()")
def test_mkfifo(self):
os_helper.unlink(os_helper.TESTFN)
try:
posix.mkfifo(os_helper.TESTFN, stat.S_IRUSR | stat.S_IWUSR)
except PermissionError as e:
self.skipTest('posix.mkfifo(): %s' % e)
self.assertTrue(stat.S_ISFIFO(posix.stat(os_helper.TESTFN).st_mode))
@unittest.skipUnless(hasattr(posix, 'mknod') and hasattr(stat, 'S_IFIFO'),
"don't have mknod()/S_IFIFO")
def test_mknod(self):
# Test using mknod() to create a FIFO (the only use specified
# by POSIX).
os_helper.unlink(os_helper.TESTFN)
mode = stat.S_IFIFO | stat.S_IRUSR | stat.S_IWUSR
try:
posix.mknod(os_helper.TESTFN, mode, 0)
except OSError as e:
# Some old systems don't allow unprivileged users to use
# mknod(), or only support creating device nodes.
self.assertIn(e.errno, (errno.EPERM, errno.EINVAL, errno.EACCES))
else:
self.assertTrue(stat.S_ISFIFO(posix.stat(os_helper.TESTFN).st_mode))
# Keyword arguments are also supported
os_helper.unlink(os_helper.TESTFN)
try:
posix.mknod(path=os_helper.TESTFN, mode=mode, device=0,
dir_fd=None)
except OSError as e:
self.assertIn(e.errno, (errno.EPERM, errno.EINVAL, errno.EACCES))
@unittest.skipUnless(hasattr(posix, 'makedev'), 'test needs posix.makedev()')
def test_makedev(self):
st = posix.stat(os_helper.TESTFN)
dev = st.st_dev
self.assertIsInstance(dev, int)
self.assertGreaterEqual(dev, 0)
major = posix.major(dev)
self.assertIsInstance(major, int)
self.assertGreaterEqual(major, 0)
self.assertEqual(posix.major(dev), major)
self.assertRaises(TypeError, posix.major, float(dev))
self.assertRaises(TypeError, posix.major)
self.assertRaises((ValueError, OverflowError), posix.major, -1)
minor = posix.minor(dev)
self.assertIsInstance(minor, int)
self.assertGreaterEqual(minor, 0)
self.assertEqual(posix.minor(dev), minor)
self.assertRaises(TypeError, posix.minor, float(dev))
self.assertRaises(TypeError, posix.minor)
self.assertRaises((ValueError, OverflowError), posix.minor, -1)
self.assertEqual(posix.makedev(major, minor), dev)
self.assertRaises(TypeError, posix.makedev, float(major), minor)
self.assertRaises(TypeError, posix.makedev, major, float(minor))
self.assertRaises(TypeError, posix.makedev, major)
self.assertRaises(TypeError, posix.makedev)
def _test_all_chown_common(self, chown_func, first_param, stat_func):
"""Common code for chown, fchown and lchown tests."""
def check_stat(uid, gid):
if stat_func is not None:
stat = stat_func(first_param)
self.assertEqual(stat.st_uid, uid)
self.assertEqual(stat.st_gid, gid)
uid = os.getuid()
gid = os.getgid()
# test a successful chown call
chown_func(first_param, uid, gid)
check_stat(uid, gid)
chown_func(first_param, -1, gid)
check_stat(uid, gid)
chown_func(first_param, uid, -1)
check_stat(uid, gid)
if uid == 0:
# Try an amusingly large uid/gid to make sure we handle
# large unsigned values. (chown lets you use any
# uid/gid you like, even if they aren't defined.)
#
# This problem keeps coming up:
# http://bugs.python.org/issue1747858
# http://bugs.python.org/issue4591
# http://bugs.python.org/issue15301
# Hopefully the fix in 4591 fixes it for good!
#
# This part of the test only runs when run as root.
# Only scary people run their tests as root.
big_value = 2**31
chown_func(first_param, big_value, big_value)
check_stat(big_value, big_value)
chown_func(first_param, -1, -1)
check_stat(big_value, big_value)
chown_func(first_param, uid, gid)
check_stat(uid, gid)
elif platform.system() in ('HP-UX', 'SunOS'):
# HP-UX and Solaris can allow a non-root user to chown() to root
# (issue #5113)
raise unittest.SkipTest("Skipping because of non-standard chown() "
"behavior")
else:
# non-root cannot chown to root, raises OSError
self.assertRaises(OSError, chown_func, first_param, 0, 0)
check_stat(uid, gid)
self.assertRaises(OSError, chown_func, first_param, 0, -1)
check_stat(uid, gid)
if 0 not in os.getgroups():
self.assertRaises(OSError, chown_func, first_param, -1, 0)
check_stat(uid, gid)
# test illegal types
for t in str, float:
self.assertRaises(TypeError, chown_func, first_param, t(uid), gid)
check_stat(uid, gid)
self.assertRaises(TypeError, chown_func, first_param, uid, t(gid))
check_stat(uid, gid)
@unittest.skipUnless(hasattr(posix, 'chown'), "test needs os.chown()")
def test_chown(self):
# raise an OSError if the file does not exist
os.unlink(os_helper.TESTFN)
self.assertRaises(OSError, posix.chown, os_helper.TESTFN, -1, -1)
# re-create the file
os_helper.create_empty_file(os_helper.TESTFN)
self._test_all_chown_common(posix.chown, os_helper.TESTFN, posix.stat)
@unittest.skipUnless(hasattr(posix, 'fchown'), "test needs os.fchown()")
def test_fchown(self):
os.unlink(os_helper.TESTFN)
# re-create the file
test_file = open(os_helper.TESTFN, 'w')
try:
fd = test_file.fileno()
self._test_all_chown_common(posix.fchown, fd,
getattr(posix, 'fstat', None))
finally:
test_file.close()
@unittest.skipUnless(hasattr(posix, 'lchown'), "test needs os.lchown()")
def test_lchown(self):
os.unlink(os_helper.TESTFN)
# create a symlink
os.symlink(_DUMMY_SYMLINK, os_helper.TESTFN)
self._test_all_chown_common(posix.lchown, os_helper.TESTFN,
getattr(posix, 'lstat', None))
@unittest.skipUnless(hasattr(posix, 'chdir'), 'test needs posix.chdir()')
def test_chdir(self):
posix.chdir(os.curdir)
self.assertRaises(OSError, posix.chdir, os_helper.TESTFN)
def test_listdir(self):
self.assertIn(os_helper.TESTFN, posix.listdir(os.curdir))
def test_listdir_default(self):
# When listdir is called without argument,
# it's the same as listdir(os.curdir).
self.assertIn(os_helper.TESTFN, posix.listdir())
def test_listdir_bytes(self):
# When listdir is called with a bytes object,
# the returned strings are of type bytes.
self.assertIn(os.fsencode(os_helper.TESTFN), posix.listdir(b'.'))
# TODO: RUSTPYTHON: AssertionError: DeprecationWarning not triggered
@unittest.expectedFailure
def test_listdir_bytes_like(self):
for cls in bytearray, memoryview:
with self.assertWarns(DeprecationWarning):
names = posix.listdir(cls(b'.'))
self.assertIn(os.fsencode(os_helper.TESTFN), names)
for name in names:
self.assertIs(type(name), bytes)
@unittest.skipUnless(posix.listdir in os.supports_fd,
"test needs fd support for posix.listdir()")
def test_listdir_fd(self):
f = posix.open(posix.getcwd(), posix.O_RDONLY)
self.addCleanup(posix.close, f)
self.assertEqual(
sorted(posix.listdir('.')),
sorted(posix.listdir(f))
)
# Check that the fd offset was reset (issue #13739)
self.assertEqual(
sorted(posix.listdir('.')),
sorted(posix.listdir(f))
)
@unittest.skipUnless(hasattr(posix, 'access'), 'test needs posix.access()')
def test_access(self):
self.assertTrue(posix.access(os_helper.TESTFN, os.R_OK))
@unittest.skipUnless(hasattr(posix, 'umask'), 'test needs posix.umask()')
def test_umask(self):
old_mask = posix.umask(0)
self.assertIsInstance(old_mask, int)
posix.umask(old_mask)
@unittest.skipUnless(hasattr(posix, 'strerror'),
'test needs posix.strerror()')
def test_strerror(self):
self.assertTrue(posix.strerror(0))
@unittest.skipUnless(hasattr(posix, 'pipe'), 'test needs posix.pipe()')
def test_pipe(self):
reader, writer = posix.pipe()
os.close(reader)
os.close(writer)
@unittest.skipUnless(hasattr(os, 'pipe2'), "test needs os.pipe2()")
@support.requires_linux_version(2, 6, 27)
def test_pipe2(self):
self.assertRaises(TypeError, os.pipe2, 'DEADBEEF')
self.assertRaises(TypeError, os.pipe2, 0, 0)
# try calling with flags = 0, like os.pipe()
r, w = os.pipe2(0)
os.close(r)
os.close(w)
# test flags
r, w = os.pipe2(os.O_CLOEXEC|os.O_NONBLOCK)
self.addCleanup(os.close, r)
self.addCleanup(os.close, w)
self.assertFalse(os.get_inheritable(r))
self.assertFalse(os.get_inheritable(w))
self.assertFalse(os.get_blocking(r))
self.assertFalse(os.get_blocking(w))
# try reading from an empty pipe: this should fail, not block
self.assertRaises(OSError, os.read, r, 1)
# try a write big enough to fill-up the pipe: this should either
# fail or perform a partial write, not block
try:
os.write(w, b'x' * support.PIPE_MAX_SIZE)
except OSError:
pass
@support.cpython_only
@unittest.skipUnless(hasattr(os, 'pipe2'), "test needs os.pipe2()")
@support.requires_linux_version(2, 6, 27)
def test_pipe2_c_limits(self):
# Issue 15989
import _testcapi
self.assertRaises(OverflowError, os.pipe2, _testcapi.INT_MAX + 1)
self.assertRaises(OverflowError, os.pipe2, _testcapi.UINT_MAX + 1)
@unittest.skipUnless(hasattr(posix, 'utime'), 'test needs posix.utime()')
def test_utime(self):
now = time.time()
posix.utime(os_helper.TESTFN, None)
self.assertRaises(TypeError, posix.utime, os_helper.TESTFN, (None, None))
self.assertRaises(TypeError, posix.utime, os_helper.TESTFN, (now, None))
self.assertRaises(TypeError, posix.utime, os_helper.TESTFN, (None, now))
posix.utime(os_helper.TESTFN, (int(now), int(now)))
posix.utime(os_helper.TESTFN, (now, now))
def _test_chflags_regular_file(self, chflags_func, target_file, **kwargs):
st = os.stat(target_file)
self.assertTrue(hasattr(st, 'st_flags'))
# ZFS returns EOPNOTSUPP when attempting to set flag UF_IMMUTABLE.
flags = st.st_flags | stat.UF_IMMUTABLE
try:
chflags_func(target_file, flags, **kwargs)
except OSError as err:
if err.errno != errno.EOPNOTSUPP:
raise
msg = 'chflag UF_IMMUTABLE not supported by underlying fs'
self.skipTest(msg)
try:
new_st = os.stat(target_file)
self.assertEqual(st.st_flags | stat.UF_IMMUTABLE, new_st.st_flags)
try:
fd = open(target_file, 'w+')
except OSError as e:
self.assertEqual(e.errno, errno.EPERM)
finally:
posix.chflags(target_file, st.st_flags)
@unittest.skipUnless(hasattr(posix, 'chflags'), 'test needs os.chflags()')
def test_chflags(self):
self._test_chflags_regular_file(posix.chflags, os_helper.TESTFN)
@unittest.skipUnless(hasattr(posix, 'lchflags'), 'test needs os.lchflags()')
def test_lchflags_regular_file(self):
self._test_chflags_regular_file(posix.lchflags, os_helper.TESTFN)
self._test_chflags_regular_file(posix.chflags, os_helper.TESTFN, follow_symlinks=False)
@unittest.skipUnless(hasattr(posix, 'lchflags'), 'test needs os.lchflags()')
def test_lchflags_symlink(self):
testfn_st = os.stat(os_helper.TESTFN)
self.assertTrue(hasattr(testfn_st, 'st_flags'))
os.symlink(os_helper.TESTFN, _DUMMY_SYMLINK)
self.teardown_files.append(_DUMMY_SYMLINK)
dummy_symlink_st = os.lstat(_DUMMY_SYMLINK)
def chflags_nofollow(path, flags):
return posix.chflags(path, flags, follow_symlinks=False)
for fn in (posix.lchflags, chflags_nofollow):
# ZFS returns EOPNOTSUPP when attempting to set flag UF_IMMUTABLE.
flags = dummy_symlink_st.st_flags | stat.UF_IMMUTABLE
try:
fn(_DUMMY_SYMLINK, flags)
except OSError as err:
if err.errno != errno.EOPNOTSUPP:
raise
msg = 'chflag UF_IMMUTABLE not supported by underlying fs'
self.skipTest(msg)
try:
new_testfn_st = os.stat(os_helper.TESTFN)
new_dummy_symlink_st = os.lstat(_DUMMY_SYMLINK)
self.assertEqual(testfn_st.st_flags, new_testfn_st.st_flags)
self.assertEqual(dummy_symlink_st.st_flags | stat.UF_IMMUTABLE,
new_dummy_symlink_st.st_flags)
finally:
fn(_DUMMY_SYMLINK, dummy_symlink_st.st_flags)
def test_environ(self):
if os.name == "nt":
item_type = str
else:
item_type = bytes
for k, v in posix.environ.items():
self.assertEqual(type(k), item_type)
self.assertEqual(type(v), item_type)
def test_putenv(self):
with self.assertRaises(ValueError):
os.putenv('FRUIT\0VEGETABLE', 'cabbage')
with self.assertRaises(ValueError):
os.putenv(b'FRUIT\0VEGETABLE', b'cabbage')
with self.assertRaises(ValueError):
os.putenv('FRUIT', 'orange\0VEGETABLE=cabbage')
with self.assertRaises(ValueError):
os.putenv(b'FRUIT', b'orange\0VEGETABLE=cabbage')
with self.assertRaises(ValueError):
os.putenv('FRUIT=ORANGE', 'lemon')
with self.assertRaises(ValueError):
os.putenv(b'FRUIT=ORANGE', b'lemon')
@unittest.skipUnless(hasattr(posix, 'getcwd'), 'test needs posix.getcwd()')
def test_getcwd_long_pathnames(self):
dirname = 'getcwd-test-directory-0123456789abcdef-01234567890abcdef'
curdir = os.getcwd()
base_path = os.path.abspath(os_helper.TESTFN) + '.getcwd'
try:
os.mkdir(base_path)
os.chdir(base_path)
except:
# Just returning nothing instead of the SkipTest exception, because
# the test results in Error in that case. Is that ok?
# raise unittest.SkipTest("cannot create directory for testing")
return
def _create_and_do_getcwd(dirname, current_path_length = 0):
try:
os.mkdir(dirname)
except:
raise unittest.SkipTest("mkdir cannot create directory sufficiently deep for getcwd test")
os.chdir(dirname)
try:
os.getcwd()
if current_path_length < 1027:
_create_and_do_getcwd(dirname, current_path_length + len(dirname) + 1)
finally:
os.chdir('..')
os.rmdir(dirname)
_create_and_do_getcwd(dirname)
finally:
os.chdir(curdir)
os_helper.rmtree(base_path)
@unittest.skipUnless(hasattr(posix, 'getgrouplist'), "test needs posix.getgrouplist()")
@unittest.skipUnless(hasattr(pwd, 'getpwuid'), "test needs pwd.getpwuid()")
@unittest.skipUnless(hasattr(os, 'getuid'), "test needs os.getuid()")
def test_getgrouplist(self):
user = pwd.getpwuid(os.getuid())[0]
group = pwd.getpwuid(os.getuid())[3]
self.assertIn(group, posix.getgrouplist(user, group))
@unittest.skipUnless(hasattr(os, 'getegid'), "test needs os.getegid()")
def test_getgroups(self):
with os.popen('id -G 2>/dev/null') as idg:
groups = idg.read().strip()
ret = idg.close()
try:
idg_groups = set(int(g) for g in groups.split())
except ValueError:
idg_groups = set()
if ret is not None or not idg_groups:
raise unittest.SkipTest("need working 'id -G'")
# Issues 16698: OS X ABIs prior to 10.6 have limits on getgroups()
if sys.platform == 'darwin':
import sysconfig
dt = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') or '10.0'
if tuple(int(n) for n in str(dt).split('.')[0:2]) < (10, 6):
raise unittest.SkipTest("getgroups(2) is broken prior to 10.6")
# 'id -G' and 'os.getgroups()' should return the same
# groups, ignoring order, duplicates, and the effective gid.
# #10822/#26944 - It is implementation defined whether
# posix.getgroups() includes the effective gid.
symdiff = idg_groups.symmetric_difference(posix.getgroups())
self.assertTrue(not symdiff or symdiff == {posix.getegid()})
# tests for the posix *at functions follow
@unittest.skipUnless(os.access in os.supports_dir_fd, "test needs dir_fd support for os.access()")
def test_access_dir_fd(self):
f = posix.open(posix.getcwd(), posix.O_RDONLY)
try:
self.assertTrue(posix.access(os_helper.TESTFN, os.R_OK, dir_fd=f))
finally:
posix.close(f)
@unittest.skipUnless(os.chmod in os.supports_dir_fd, "test needs dir_fd support in os.chmod()")
def test_chmod_dir_fd(self):
os.chmod(os_helper.TESTFN, stat.S_IRUSR)
f = posix.open(posix.getcwd(), posix.O_RDONLY)
try:
posix.chmod(os_helper.TESTFN, stat.S_IRUSR | stat.S_IWUSR, dir_fd=f)
s = posix.stat(os_helper.TESTFN)
self.assertEqual(s[0] & stat.S_IRWXU, stat.S_IRUSR | stat.S_IWUSR)
finally:
posix.close(f)
@unittest.skipUnless(os.chown in os.supports_dir_fd, "test needs dir_fd support in os.chown()")
def test_chown_dir_fd(self):
os_helper.unlink(os_helper.TESTFN)
os_helper.create_empty_file(os_helper.TESTFN)
f = posix.open(posix.getcwd(), posix.O_RDONLY)
try:
posix.chown(os_helper.TESTFN, os.getuid(), os.getgid(), dir_fd=f)
finally:
posix.close(f)
@unittest.skipUnless(os.stat in os.supports_dir_fd, "test needs dir_fd support in os.stat()")
def test_stat_dir_fd(self):
os_helper.unlink(os_helper.TESTFN)
with open(os_helper.TESTFN, 'w') as outfile:
outfile.write("testline\n")
f = posix.open(posix.getcwd(), posix.O_RDONLY)
try:
s1 = posix.stat(os_helper.TESTFN)
s2 = posix.stat(os_helper.TESTFN, dir_fd=f)
self.assertEqual(s1, s2)
s2 = posix.stat(os_helper.TESTFN, dir_fd=None)
self.assertEqual(s1, s2)
self.assertRaisesRegex(TypeError, 'should be integer or None, not',
posix.stat, os_helper.TESTFN, dir_fd=posix.getcwd())
self.assertRaisesRegex(TypeError, 'should be integer or None, not',
posix.stat, os_helper.TESTFN, dir_fd=float(f))
self.assertRaises(OverflowError,
posix.stat, os_helper.TESTFN, dir_fd=10**20)
finally:
posix.close(f)
@unittest.skipUnless(os.utime in os.supports_dir_fd, "test needs dir_fd support in os.utime()")
def test_utime_dir_fd(self):
f = posix.open(posix.getcwd(), posix.O_RDONLY)
try:
now = time.time()
posix.utime(os_helper.TESTFN, None, dir_fd=f)
posix.utime(os_helper.TESTFN, dir_fd=f)
self.assertRaises(TypeError, posix.utime, os_helper.TESTFN, now, dir_fd=f)
self.assertRaises(TypeError, posix.utime, os_helper.TESTFN, (None, None), dir_fd=f)
self.assertRaises(TypeError, posix.utime, os_helper.TESTFN, (now, None), dir_fd=f)
self.assertRaises(TypeError, posix.utime, os_helper.TESTFN, (None, now), dir_fd=f)
self.assertRaises(TypeError, posix.utime, os_helper.TESTFN, (now, "x"), dir_fd=f)
posix.utime(os_helper.TESTFN, (int(now), int(now)), dir_fd=f)
posix.utime(os_helper.TESTFN, (now, now), dir_fd=f)
posix.utime(os_helper.TESTFN,
(int(now), int((now - int(now)) * 1e9)), dir_fd=f)
posix.utime(os_helper.TESTFN, dir_fd=f,
times=(int(now), int((now - int(now)) * 1e9)))
# try dir_fd and follow_symlinks together
if os.utime in os.supports_follow_symlinks:
try:
posix.utime(os_helper.TESTFN, follow_symlinks=False, dir_fd=f)
except ValueError:
# whoops! using both together not supported on this platform.
pass
finally:
posix.close(f)
@unittest.skipUnless(os.link in os.supports_dir_fd, "test needs dir_fd support in os.link()")
def test_link_dir_fd(self):
f = posix.open(posix.getcwd(), posix.O_RDONLY)
try:
posix.link(os_helper.TESTFN, os_helper.TESTFN + 'link', src_dir_fd=f, dst_dir_fd=f)
except PermissionError as e:
self.skipTest('posix.link(): %s' % e)
else:
# should have same inodes
self.assertEqual(posix.stat(os_helper.TESTFN)[1],
posix.stat(os_helper.TESTFN + 'link')[1])
finally:
posix.close(f)
os_helper.unlink(os_helper.TESTFN + 'link')
@unittest.skipUnless(os.mkdir in os.supports_dir_fd, "test needs dir_fd support in os.mkdir()")
def test_mkdir_dir_fd(self):
f = posix.open(posix.getcwd(), posix.O_RDONLY)
try:
posix.mkdir(os_helper.TESTFN + 'dir', dir_fd=f)
posix.stat(os_helper.TESTFN + 'dir') # should not raise exception
finally:
posix.close(f)
os_helper.rmtree(os_helper.TESTFN + 'dir')
@unittest.skipUnless((os.mknod in os.supports_dir_fd) and hasattr(stat, 'S_IFIFO'),
"test requires both stat.S_IFIFO and dir_fd support for os.mknod()")
def test_mknod_dir_fd(self):
# Test using mknodat() to create a FIFO (the only use specified
# by POSIX).
os_helper.unlink(os_helper.TESTFN)
mode = stat.S_IFIFO | stat.S_IRUSR | stat.S_IWUSR
f = posix.open(posix.getcwd(), posix.O_RDONLY)
try:
posix.mknod(os_helper.TESTFN, mode, 0, dir_fd=f)
except OSError as e:
# Some old systems don't allow unprivileged users to use
# mknod(), or only support creating device nodes.
self.assertIn(e.errno, (errno.EPERM, errno.EINVAL, errno.EACCES))
else:
self.assertTrue(stat.S_ISFIFO(posix.stat(os_helper.TESTFN).st_mode))
finally:
posix.close(f)
@unittest.skipUnless(os.open in os.supports_dir_fd, "test needs dir_fd support in os.open()")
def test_open_dir_fd(self):
os_helper.unlink(os_helper.TESTFN)
with open(os_helper.TESTFN, 'w') as outfile:
outfile.write("testline\n")
a = posix.open(posix.getcwd(), posix.O_RDONLY)
b = posix.open(os_helper.TESTFN, posix.O_RDONLY, dir_fd=a)
try:
res = posix.read(b, 9).decode(encoding="utf-8")
self.assertEqual("testline\n", res)
finally:
posix.close(a)
posix.close(b)
@unittest.skipUnless(os.readlink in os.supports_dir_fd, "test needs dir_fd support in os.readlink()")
def test_readlink_dir_fd(self):
os.symlink(os_helper.TESTFN, os_helper.TESTFN + 'link')
f = posix.open(posix.getcwd(), posix.O_RDONLY)
try:
self.assertEqual(posix.readlink(os_helper.TESTFN + 'link'),
posix.readlink(os_helper.TESTFN + 'link', dir_fd=f))
finally:
os_helper.unlink(os_helper.TESTFN + 'link')
posix.close(f)
@unittest.skipUnless(os.rename in os.supports_dir_fd, "test needs dir_fd support in os.rename()")
def test_rename_dir_fd(self):
os_helper.unlink(os_helper.TESTFN)
os_helper.create_empty_file(os_helper.TESTFN + 'ren')
f = posix.open(posix.getcwd(), posix.O_RDONLY)
try:
posix.rename(os_helper.TESTFN + 'ren', os_helper.TESTFN, src_dir_fd=f, dst_dir_fd=f)
except:
posix.rename(os_helper.TESTFN + 'ren', os_helper.TESTFN)
raise
else:
posix.stat(os_helper.TESTFN) # should not raise exception
finally:
posix.close(f)
@unittest.skipUnless(hasattr(signal, 'SIGCHLD'), 'CLD_XXXX be placed in si_code for a SIGCHLD signal')
@unittest.skipUnless(hasattr(os, 'waitid_result'), "test needs os.waitid_result")
def test_cld_xxxx_constants(self):
os.CLD_EXITED
os.CLD_KILLED
os.CLD_DUMPED
os.CLD_TRAPPED
os.CLD_STOPPED
os.CLD_CONTINUED
@unittest.skipUnless(os.symlink in os.supports_dir_fd, "test needs dir_fd support in os.symlink()")
def test_symlink_dir_fd(self):
f = posix.open(posix.getcwd(), posix.O_RDONLY)
try:
posix.symlink(os_helper.TESTFN, os_helper.TESTFN + 'link', dir_fd=f)
self.assertEqual(posix.readlink(os_helper.TESTFN + 'link'), os_helper.TESTFN)
finally:
posix.close(f)
os_helper.unlink(os_helper.TESTFN + 'link')
@unittest.skipUnless(os.unlink in os.supports_dir_fd, "test needs dir_fd support in os.unlink()")
def test_unlink_dir_fd(self):
f = posix.open(posix.getcwd(), posix.O_RDONLY)
os_helper.create_empty_file(os_helper.TESTFN + 'del')
posix.stat(os_helper.TESTFN + 'del') # should not raise exception
try:
posix.unlink(os_helper.TESTFN + 'del', dir_fd=f)
except:
os_helper.unlink(os_helper.TESTFN + 'del')
raise
else:
self.assertRaises(OSError, posix.stat, os_helper.TESTFN + 'link')
finally:
posix.close(f)
# TODO: RUSTPYTHON: AttributeError: module 'os' has no attribute 'mkfifo'
#
# @unittest.skipUnless(os.mkfifo in os.supports_dir_fd, "test needs dir_fd support in os.mkfifo()")
@unittest.expectedFailure
def test_mkfifo_dir_fd(self):
os_helper.unlink(os_helper.TESTFN)
f = posix.open(posix.getcwd(), posix.O_RDONLY)
try:
try:
posix.mkfifo(os_helper.TESTFN,
stat.S_IRUSR | stat.S_IWUSR, dir_fd=f)
except PermissionError as e:
self.skipTest('posix.mkfifo(): %s' % e)
self.assertTrue(stat.S_ISFIFO(posix.stat(os_helper.TESTFN).st_mode))
finally:
posix.close(f)
requires_sched_h = unittest.skipUnless(hasattr(posix, 'sched_yield'),
"don't have scheduling support")
requires_sched_affinity = unittest.skipUnless(hasattr(posix, 'sched_setaffinity'),
"don't have sched affinity support")
@requires_sched_h
def test_sched_yield(self):
# This has no error conditions (at least on Linux).
posix.sched_yield()
@requires_sched_h
@unittest.skipUnless(hasattr(posix, 'sched_get_priority_max'),
"requires sched_get_priority_max()")
def test_sched_priority(self):
# Round-robin usually has interesting priorities.
pol = posix.SCHED_RR
lo = posix.sched_get_priority_min(pol)
hi = posix.sched_get_priority_max(pol)
self.assertIsInstance(lo, int)
self.assertIsInstance(hi, int)
self.assertGreaterEqual(hi, lo)
# OSX evidently just returns 15 without checking the argument.
if sys.platform != "darwin":
self.assertRaises(OSError, posix.sched_get_priority_min, -23)
self.assertRaises(OSError, posix.sched_get_priority_max, -23)
@requires_sched
def test_get_and_set_scheduler_and_param(self):
possible_schedulers = [sched for name, sched in posix.__dict__.items()
if name.startswith("SCHED_")]
mine = posix.sched_getscheduler(0)
self.assertIn(mine, possible_schedulers)
try:
parent = posix.sched_getscheduler(os.getppid())
except OSError as e:
if e.errno != errno.EPERM:
raise
else:
self.assertIn(parent, possible_schedulers)
self.assertRaises(OSError, posix.sched_getscheduler, -1)
self.assertRaises(OSError, posix.sched_getparam, -1)
param = posix.sched_getparam(0)
self.assertIsInstance(param.sched_priority, int)
# POSIX states that calling sched_setparam() or sched_setscheduler() on
# a process with a scheduling policy other than SCHED_FIFO or SCHED_RR
# is implementation-defined: NetBSD and FreeBSD can return EINVAL.
if not sys.platform.startswith(('freebsd', 'netbsd')):
try:
posix.sched_setscheduler(0, mine, param)
posix.sched_setparam(0, param)
except OSError as e:
if e.errno != errno.EPERM:
raise
self.assertRaises(OSError, posix.sched_setparam, -1, param)
self.assertRaises(OSError, posix.sched_setscheduler, -1, mine, param)
self.assertRaises(TypeError, posix.sched_setscheduler, 0, mine, None)
self.assertRaises(TypeError, posix.sched_setparam, 0, 43)
param = posix.sched_param(None)
self.assertRaises(TypeError, posix.sched_setparam, 0, param)
large = 214748364700
param = posix.sched_param(large)
self.assertRaises(OverflowError, posix.sched_setparam, 0, param)
param = posix.sched_param(sched_priority=-large)
self.assertRaises(OverflowError, posix.sched_setparam, 0, param)
@unittest.skipUnless(hasattr(posix, "sched_rr_get_interval"), "no function")
def test_sched_rr_get_interval(self):
try:
interval = posix.sched_rr_get_interval(0)
except OSError as e:
# This likely means that sched_rr_get_interval is only valid for
# processes with the SCHED_RR scheduler in effect.
if e.errno != errno.EINVAL:
raise
self.skipTest("only works on SCHED_RR processes")
self.assertIsInstance(interval, float)
# Reasonable constraints, I think.
self.assertGreaterEqual(interval, 0.)
self.assertLess(interval, 1.)
@requires_sched_affinity
def test_sched_getaffinity(self):
mask = posix.sched_getaffinity(0)
self.assertIsInstance(mask, set)
self.assertGreaterEqual(len(mask), 1)
self.assertRaises(OSError, posix.sched_getaffinity, -1)
for cpu in mask:
self.assertIsInstance(cpu, int)
self.assertGreaterEqual(cpu, 0)
self.assertLess(cpu, 1 << 32)
@requires_sched_affinity
def test_sched_setaffinity(self):
mask = posix.sched_getaffinity(0)
if len(mask) > 1:
# Empty masks are forbidden
mask.pop()
posix.sched_setaffinity(0, mask)
self.assertEqual(posix.sched_getaffinity(0), mask)
self.assertRaises(OSError, posix.sched_setaffinity, 0, [])
self.assertRaises(ValueError, posix.sched_setaffinity, 0, [-10])
self.assertRaises(ValueError, posix.sched_setaffinity, 0, map(int, "0X"))
self.assertRaises(OverflowError, posix.sched_setaffinity, 0, [1<<128])
self.assertRaises(OSError, posix.sched_setaffinity, -1, mask)
def test_rtld_constants(self):
# check presence of major RTLD_* constants
posix.RTLD_LAZY
posix.RTLD_NOW
posix.RTLD_GLOBAL
posix.RTLD_LOCAL
@unittest.skipUnless(hasattr(os, 'SEEK_HOLE'),
"test needs an OS that reports file holes")
def test_fs_holes(self):
# Even if the filesystem doesn't report holes,
# if the OS supports it the SEEK_* constants
# will be defined and will have a consistent
# behaviour:
# os.SEEK_DATA = current position
# os.SEEK_HOLE = end of file position
with open(os_helper.TESTFN, 'r+b') as fp:
fp.write(b"hello")
fp.flush()
size = fp.tell()
fno = fp.fileno()
try :
for i in range(size):
self.assertEqual(i, os.lseek(fno, i, os.SEEK_DATA))
self.assertLessEqual(size, os.lseek(fno, i, os.SEEK_HOLE))
self.assertRaises(OSError, os.lseek, fno, size, os.SEEK_DATA)
self.assertRaises(OSError, os.lseek, fno, size, os.SEEK_HOLE)
except OSError :
# Some OSs claim to support SEEK_HOLE/SEEK_DATA
# but it is not true.
# For instance:
# http://lists.freebsd.org/pipermail/freebsd-amd64/2012-January/014332.html
raise unittest.SkipTest("OSError raised!")
def test_path_error2(self):
"""
Test functions that call path_error2(), providing two filenames in their exceptions.
"""
for name in ("rename", "replace", "link"):
function = getattr(os, name, None)
if function is None:
continue
for dst in ("noodly2", os_helper.TESTFN):
try:
function('doesnotexistfilename', dst)
except OSError as e:
self.assertIn("'doesnotexistfilename' -> '{}'".format(dst), str(e))
break
else:
self.fail("No valid path_error2() test for os." + name)
def test_path_with_null_character(self):
fn = os_helper.TESTFN
fn_with_NUL = fn + '\0'
self.addCleanup(os_helper.unlink, fn)
os_helper.unlink(fn)
fd = None
try:
with self.assertRaises(ValueError):
fd = os.open(fn_with_NUL, os.O_WRONLY | os.O_CREAT) # raises
finally:
if fd is not None:
os.close(fd)
self.assertFalse(os.path.exists(fn))
self.assertRaises(ValueError, os.mkdir, fn_with_NUL)
self.assertFalse(os.path.exists(fn))
open(fn, 'wb').close()
self.assertRaises(ValueError, os.stat, fn_with_NUL)
def test_path_with_null_byte(self):
fn = os.fsencode(os_helper.TESTFN)
fn_with_NUL = fn + b'\0'
self.addCleanup(os_helper.unlink, fn)
os_helper.unlink(fn)
fd = None
try:
with self.assertRaises(ValueError):
fd = os.open(fn_with_NUL, os.O_WRONLY | os.O_CREAT) # raises
finally:
if fd is not None:
os.close(fd)
self.assertFalse(os.path.exists(fn))
self.assertRaises(ValueError, os.mkdir, fn_with_NUL)
self.assertFalse(os.path.exists(fn))
open(fn, 'wb').close()
self.assertRaises(ValueError, os.stat, fn_with_NUL)
@unittest.skipUnless(hasattr(os, "pidfd_open"), "pidfd_open unavailable")
def test_pidfd_open(self):
with self.assertRaises(OSError) as cm:
os.pidfd_open(-1)
if cm.exception.errno == errno.ENOSYS:
self.skipTest("system does not support pidfd_open")
if isinstance(cm.exception, PermissionError):
self.skipTest(f"pidfd_open syscall blocked: {cm.exception!r}")
self.assertEqual(cm.exception.errno, errno.EINVAL)
os.close(os.pidfd_open(os.getpid(), 0))
class PosixGroupsTester(unittest.TestCase):
def setUp(self):
if posix.getuid() != 0:
raise unittest.SkipTest("not enough privileges")
if not hasattr(posix, 'getgroups'):
raise unittest.SkipTest("need posix.getgroups")
if sys.platform == 'darwin':
raise unittest.SkipTest("getgroups(2) is broken on OSX")
self.saved_groups = posix.getgroups()
def tearDown(self):
if hasattr(posix, 'setgroups'):
posix.setgroups(self.saved_groups)
elif hasattr(posix, 'initgroups'):
name = pwd.getpwuid(posix.getuid()).pw_name
posix.initgroups(name, self.saved_groups[0])
@unittest.skipUnless(hasattr(posix, 'initgroups'),
"test needs posix.initgroups()")
def test_initgroups(self):
# find missing group
g = max(self.saved_groups or [0]) + 1
name = pwd.getpwuid(posix.getuid()).pw_name
posix.initgroups(name, g)
self.assertIn(g, posix.getgroups())
@unittest.skipUnless(hasattr(posix, 'setgroups'),
"test needs posix.setgroups()")
def test_setgroups(self):
for groups in [[0], list(range(16))]:
posix.setgroups(groups)
self.assertListEqual(groups, posix.getgroups())
class _PosixSpawnMixin:
# Program which does nothing and exits with status 0 (success)
NOOP_PROGRAM = (sys.executable, '-I', '-S', '-c', 'pass')
spawn_func = None
def python_args(self, *args):
# Disable site module to avoid side effects. For example,
# on Fedora 28, if the HOME environment variable is not set,
# site._getuserbase() calls pwd.getpwuid() which opens
# /var/lib/sss/mc/passwd but then leaves the file open which makes
# test_close_file() to fail.
return (sys.executable, '-I', '-S', *args)
def test_returns_pid(self):
pidfile = os_helper.TESTFN
self.addCleanup(os_helper.unlink, pidfile)
script = f"""if 1:
import os
with open({pidfile!r}, "w") as pidfile:
pidfile.write(str(os.getpid()))
"""
args = self.python_args('-c', script)
pid = self.spawn_func(args[0], args, os.environ)
support.wait_process(pid, exitcode=0)
with open(pidfile) as f:
self.assertEqual(f.read(), str(pid))
def test_no_such_executable(self):
no_such_executable = 'no_such_executable'
try:
pid = self.spawn_func(no_such_executable,
[no_such_executable],
os.environ)
# bpo-35794: PermissionError can be raised if there are
# directories in the $PATH that are not accessible.
except (FileNotFoundError, PermissionError) as exc:
self.assertEqual(exc.filename, no_such_executable)
else:
pid2, status = os.waitpid(pid, 0)
self.assertEqual(pid2, pid)
self.assertNotEqual(status, 0)
# TODO: RUSTPYTHON: TypeError: '_Environ' object is not a mapping
@unittest.expectedFailure
def test_specify_environment(self):
envfile = os_helper.TESTFN
self.addCleanup(os_helper.unlink, envfile)
script = f"""if 1:
import os
with open({envfile!r}, "w") as envfile:
envfile.write(os.environ['foo'])
"""
args = self.python_args('-c', script)
pid = self.spawn_func(args[0], args,
{**os.environ, 'foo': 'bar'})
support.wait_process(pid, exitcode=0)
with open(envfile) as f:
self.assertEqual(f.read(), 'bar')
def test_none_file_actions(self):
pid = self.spawn_func(
self.NOOP_PROGRAM[0],
self.NOOP_PROGRAM,
os.environ,
file_actions=None
)
support.wait_process(pid, exitcode=0)
def test_empty_file_actions(self):
pid = self.spawn_func(
self.NOOP_PROGRAM[0],
self.NOOP_PROGRAM,
os.environ,
file_actions=[]
)
support.wait_process(pid, exitcode=0)
# TODO: RUSTPYTHON: TypeError: Unexpected keyword argument resetids
@unittest.expectedFailure
def test_resetids_explicit_default(self):
pid = self.spawn_func(
sys.executable,
[sys.executable, '-c', 'pass'],
os.environ,
resetids=False
)
support.wait_process(pid, exitcode=0)
# TODO: RUSTPYTHON: TypeError: Unexpected keyword argument resetids
@unittest.expectedFailure
def test_resetids(self):
pid = self.spawn_func(
sys.executable,
[sys.executable, '-c', 'pass'],
os.environ,
resetids=True
)
support.wait_process(pid, exitcode=0)
def test_resetids_wrong_type(self):
with self.assertRaises(TypeError):
self.spawn_func(sys.executable,
[sys.executable, "-c", "pass"],
os.environ, resetids=None)
# TODO: RUSTPYTHON: TypeError: Unexpected keyword argument setpgroup
@unittest.expectedFailure
def test_setpgroup(self):
pid = self.spawn_func(
sys.executable,
[sys.executable, '-c', 'pass'],
os.environ,
setpgroup=os.getpgrp()
)
support.wait_process(pid, exitcode=0)
def test_setpgroup_wrong_type(self):
with self.assertRaises(TypeError):
self.spawn_func(sys.executable,
[sys.executable, "-c", "pass"],
os.environ, setpgroup="023")
@unittest.skipUnless(hasattr(signal, 'pthread_sigmask'),
'need signal.pthread_sigmask()')
def test_setsigmask(self):
code = textwrap.dedent("""\
import signal
signal.raise_signal(signal.SIGUSR1)""")
pid = self.spawn_func(
sys.executable,
[sys.executable, '-c', code],
os.environ,
setsigmask=[signal.SIGUSR1]
)
support.wait_process(pid, exitcode=0)
# TODO: RUSTPYTHON: TypeError: Unexpected keyword argument setsigmask
@unittest.expectedFailure
def test_setsigmask_wrong_type(self):
with self.assertRaises(TypeError):
self.spawn_func(sys.executable,
[sys.executable, "-c", "pass"],
os.environ, setsigmask=34)
with self.assertRaises(TypeError):
self.spawn_func(sys.executable,
[sys.executable, "-c", "pass"],
os.environ, setsigmask=["j"])
with self.assertRaises(ValueError):
self.spawn_func(sys.executable,
[sys.executable, "-c", "pass"],
os.environ, setsigmask=[signal.NSIG,
signal.NSIG+1])
# TODO: RUSTPYTHON: TypeError: Unexpected keyword argument setsid
@unittest.expectedFailure
def test_setsid(self):
rfd, wfd = os.pipe()
self.addCleanup(os.close, rfd)
try:
os.set_inheritable(wfd, True)
code = textwrap.dedent(f"""
import os
fd = {wfd}
sid = os.getsid(0)
os.write(fd, str(sid).encode())
""")
try:
pid = self.spawn_func(sys.executable,
[sys.executable, "-c", code],
os.environ, setsid=True)
except NotImplementedError as exc:
self.skipTest(f"setsid is not supported: {exc!r}")
except PermissionError as exc:
self.skipTest(f"setsid failed with: {exc!r}")
finally:
os.close(wfd)
support.wait_process(pid, exitcode=0)
output = os.read(rfd, 100)
child_sid = int(output)
parent_sid = os.getsid(os.getpid())
self.assertNotEqual(parent_sid, child_sid)
@unittest.skipUnless(hasattr(signal, 'pthread_sigmask'),
'need signal.pthread_sigmask()')
def test_setsigdef(self):
original_handler = signal.signal(signal.SIGUSR1, signal.SIG_IGN)
code = textwrap.dedent("""\
import signal
signal.raise_signal(signal.SIGUSR1)""")
try:
pid = self.spawn_func(
sys.executable,
[sys.executable, '-c', code],
os.environ,
setsigdef=[signal.SIGUSR1]
)
finally:
signal.signal(signal.SIGUSR1, original_handler)
support.wait_process(pid, exitcode=-signal.SIGUSR1)
def test_setsigdef_wrong_type(self):
with self.assertRaises(TypeError):
self.spawn_func(sys.executable,
[sys.executable, "-c", "pass"],
os.environ, setsigdef=34)
with self.assertRaises(TypeError):
self.spawn_func(sys.executable,
[sys.executable, "-c", "pass"],
os.environ, setsigdef=["j"])
with self.assertRaises(ValueError):
self.spawn_func(sys.executable,
[sys.executable, "-c", "pass"],
os.environ, setsigdef=[signal.NSIG, signal.NSIG+1])
# TODO: RUSTPYTHON: TypeError: Unexpected keyword argument scheduler
@unittest.expectedFailure
@requires_sched
@unittest.skipIf(sys.platform.startswith(('freebsd', 'netbsd')),
"bpo-34685: test can fail on BSD")
def test_setscheduler_only_param(self):
policy = os.sched_getscheduler(0)
priority = os.sched_get_priority_min(policy)
code = textwrap.dedent(f"""\
import os, sys
if os.sched_getscheduler(0) != {policy}:
sys.exit(101)
if os.sched_getparam(0).sched_priority != {priority}:
sys.exit(102)""")
pid = self.spawn_func(
sys.executable,
[sys.executable, '-c', code],
os.environ,
scheduler=(None, os.sched_param(priority))
)
support.wait_process(pid, exitcode=0)
# TODO: RUSTPYTHON: TypeError: Unexpected keyword argument scheduler
@unittest.expectedFailure
@requires_sched
@unittest.skipIf(sys.platform.startswith(('freebsd', 'netbsd')),
"bpo-34685: test can fail on BSD")
def test_setscheduler_with_policy(self):
policy = os.sched_getscheduler(0)
priority = os.sched_get_priority_min(policy)
code = textwrap.dedent(f"""\
import os, sys
if os.sched_getscheduler(0) != {policy}:
sys.exit(101)
if os.sched_getparam(0).sched_priority != {priority}:
sys.exit(102)""")
pid = self.spawn_func(
sys.executable,
[sys.executable, '-c', code],
os.environ,
scheduler=(policy, os.sched_param(priority))
)
support.wait_process(pid, exitcode=0)
def test_multiple_file_actions(self):
file_actions = [
(os.POSIX_SPAWN_OPEN, 3, os.path.realpath(__file__), os.O_RDONLY, 0),
(os.POSIX_SPAWN_CLOSE, 0),
(os.POSIX_SPAWN_DUP2, 1, 4),
]
pid = self.spawn_func(self.NOOP_PROGRAM[0],
self.NOOP_PROGRAM,
os.environ,
file_actions=file_actions)
support.wait_process(pid, exitcode=0)
def test_bad_file_actions(self):
args = self.NOOP_PROGRAM
with self.assertRaises(TypeError):
self.spawn_func(args[0], args, os.environ,
file_actions=[None])
with self.assertRaises(TypeError):
self.spawn_func(args[0], args, os.environ,
file_actions=[()])
with self.assertRaises(TypeError):
self.spawn_func(args[0], args, os.environ,
file_actions=[(None,)])
with self.assertRaises(TypeError):
self.spawn_func(args[0], args, os.environ,
file_actions=[(12345,)])
with self.assertRaises(TypeError):
self.spawn_func(args[0], args, os.environ,
file_actions=[(os.POSIX_SPAWN_CLOSE,)])
with self.assertRaises(TypeError):
self.spawn_func(args[0], args, os.environ,
file_actions=[(os.POSIX_SPAWN_CLOSE, 1, 2)])
with self.assertRaises(TypeError):
self.spawn_func(args[0], args, os.environ,
file_actions=[(os.POSIX_SPAWN_CLOSE, None)])
with self.assertRaises(ValueError):
self.spawn_func(args[0], args, os.environ,
file_actions=[(os.POSIX_SPAWN_OPEN,
3, __file__ + '\0',
os.O_RDONLY, 0)])
def test_open_file(self):
outfile = os_helper.TESTFN
self.addCleanup(os_helper.unlink, outfile)
script = """if 1:
import sys
sys.stdout.write("hello")
"""
file_actions = [
(os.POSIX_SPAWN_OPEN, 1, outfile,
os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
stat.S_IRUSR | stat.S_IWUSR),
]
args = self.python_args('-c', script)
pid = self.spawn_func(args[0], args, os.environ,
file_actions=file_actions)
support.wait_process(pid, exitcode=0)
with open(outfile) as f:
self.assertEqual(f.read(), 'hello')
# TODO: RUSTPYTHON: FileNotFoundError: [Errno 2] No such file or directory (os error 2): '@test_55144_tmp' -> 'None'
@unittest.expectedFailure
def test_close_file(self):
closefile = os_helper.TESTFN
self.addCleanup(os_helper.unlink, closefile)
script = f"""if 1:
import os
try:
os.fstat(0)
except OSError as e:
with open({closefile!r}, 'w') as closefile:
closefile.write('is closed %d' % e.errno)
"""
args = self.python_args('-c', script)
pid = self.spawn_func(args[0], args, os.environ,
file_actions=[(os.POSIX_SPAWN_CLOSE, 0)])
support.wait_process(pid, exitcode=0)
with open(closefile) as f:
self.assertEqual(f.read(), 'is closed %d' % errno.EBADF)
def test_dup2(self):
dupfile = os_helper.TESTFN
self.addCleanup(os_helper.unlink, dupfile)
script = """if 1:
import sys
sys.stdout.write("hello")
"""
with open(dupfile, "wb") as childfile:
file_actions = [
(os.POSIX_SPAWN_DUP2, childfile.fileno(), 1),
]
args = self.python_args('-c', script)
pid = self.spawn_func(args[0], args, os.environ,
file_actions=file_actions)
support.wait_process(pid, exitcode=0)
with open(dupfile) as f:
self.assertEqual(f.read(), 'hello')
@unittest.skipUnless(hasattr(os, 'posix_spawn'), "test needs os.posix_spawn")
class TestPosixSpawn(unittest.TestCase, _PosixSpawnMixin):
spawn_func = getattr(posix, 'posix_spawn', None)
@unittest.skipUnless(hasattr(os, 'posix_spawnp'), "test needs os.posix_spawnp")
class TestPosixSpawnP(unittest.TestCase, _PosixSpawnMixin):
spawn_func = getattr(posix, 'posix_spawnp', None)
@os_helper.skip_unless_symlink
def test_posix_spawnp(self):
# Use a symlink to create a program in its own temporary directory
temp_dir = tempfile.mkdtemp()
self.addCleanup(os_helper.rmtree, temp_dir)
program = 'posix_spawnp_test_program.exe'
program_fullpath = os.path.join(temp_dir, program)
os.symlink(sys.executable, program_fullpath)
try:
path = os.pathsep.join((temp_dir, os.environ['PATH']))
except KeyError:
path = temp_dir # PATH is not set
spawn_args = (program, '-I', '-S', '-c', 'pass')
code = textwrap.dedent("""
import os
from test import support
args = %a
pid = os.posix_spawnp(args[0], args, os.environ)
support.wait_process(pid, exitcode=0)
""" % (spawn_args,))
# Use a subprocess to test os.posix_spawnp() with a modified PATH
# environment variable: posix_spawnp() uses the current environment
# to locate the program, not its environment argument.
args = ('-c', code)
assert_python_ok(*args, PATH=path)
@unittest.skip("TODO: RUSTPYTHON, NameError: name 'ParserCreate' is not defined")
@unittest.skipUnless(sys.platform == "darwin", "test weak linking on macOS")
class TestPosixWeaklinking(unittest.TestCase):
# These test cases verify that weak linking support on macOS works
# as expected. These cases only test new behaviour introduced by weak linking,
# regular behaviour is tested by the normal test cases.
#
# See the section on Weak Linking in Mac/README.txt for more information.
def setUp(self):
import sysconfig
import platform
config_vars = sysconfig.get_config_vars()
self.available = { nm for nm in config_vars if nm.startswith("HAVE_") and config_vars[nm] }
self.mac_ver = tuple(int(part) for part in platform.mac_ver()[0].split("."))
def _verify_available(self, name):
if name not in self.available:
raise unittest.SkipTest(f"{name} not weak-linked")
def test_pwritev(self):
self._verify_available("HAVE_PWRITEV")
if self.mac_ver >= (10, 16):
self.assertTrue(hasattr(os, "pwritev"), "os.pwritev is not available")
self.assertTrue(hasattr(os, "preadv"), "os.readv is not available")
else:
self.assertFalse(hasattr(os, "pwritev"), "os.pwritev is available")
self.assertFalse(hasattr(os, "preadv"), "os.readv is available")
def test_stat(self):
self._verify_available("HAVE_FSTATAT")
if self.mac_ver >= (10, 10):
self.assertIn("HAVE_FSTATAT", posix._have_functions)
else:
self.assertNotIn("HAVE_FSTATAT", posix._have_functions)
with self.assertRaisesRegex(NotImplementedError, "dir_fd unavailable"):
os.stat("file", dir_fd=0)
def test_access(self):
self._verify_available("HAVE_FACCESSAT")
if self.mac_ver >= (10, 10):
self.assertIn("HAVE_FACCESSAT", posix._have_functions)
else:
self.assertNotIn("HAVE_FACCESSAT", posix._have_functions)
with self.assertRaisesRegex(NotImplementedError, "dir_fd unavailable"):
os.access("file", os.R_OK, dir_fd=0)
with self.assertRaisesRegex(NotImplementedError, "follow_symlinks unavailable"):
os.access("file", os.R_OK, follow_symlinks=False)
with self.assertRaisesRegex(NotImplementedError, "effective_ids unavailable"):
os.access("file", os.R_OK, effective_ids=True)
def test_chmod(self):
self._verify_available("HAVE_FCHMODAT")
if self.mac_ver >= (10, 10):
self.assertIn("HAVE_FCHMODAT", posix._have_functions)
else:
self.assertNotIn("HAVE_FCHMODAT", posix._have_functions)
self.assertIn("HAVE_LCHMOD", posix._have_functions)
with self.assertRaisesRegex(NotImplementedError, "dir_fd unavailable"):
os.chmod("file", 0o644, dir_fd=0)
def test_chown(self):
self._verify_available("HAVE_FCHOWNAT")
if self.mac_ver >= (10, 10):
self.assertIn("HAVE_FCHOWNAT", posix._have_functions)
else:
self.assertNotIn("HAVE_FCHOWNAT", posix._have_functions)
self.assertIn("HAVE_LCHOWN", posix._have_functions)
with self.assertRaisesRegex(NotImplementedError, "dir_fd unavailable"):
os.chown("file", 0, 0, dir_fd=0)
def test_link(self):
self._verify_available("HAVE_LINKAT")
if self.mac_ver >= (10, 10):
self.assertIn("HAVE_LINKAT", posix._have_functions)
else:
self.assertNotIn("HAVE_LINKAT", posix._have_functions)
with self.assertRaisesRegex(NotImplementedError, "src_dir_fd unavailable"):
os.link("source", "target", src_dir_fd=0)
with self.assertRaisesRegex(NotImplementedError, "dst_dir_fd unavailable"):
os.link("source", "target", dst_dir_fd=0)
with self.assertRaisesRegex(NotImplementedError, "src_dir_fd unavailable"):
os.link("source", "target", src_dir_fd=0, dst_dir_fd=0)
# issue 41355: !HAVE_LINKAT code path ignores the follow_symlinks flag
with os_helper.temp_dir() as base_path:
link_path = os.path.join(base_path, "link")
target_path = os.path.join(base_path, "target")
source_path = os.path.join(base_path, "source")
with open(source_path, "w") as fp:
fp.write("data")
os.symlink("target", link_path)
# Calling os.link should fail in the link(2) call, and
# should not reject *follow_symlinks* (to match the
# behaviour you'd get when building on a platform without
# linkat)
with self.assertRaises(FileExistsError):
os.link(source_path, link_path, follow_symlinks=True)
with self.assertRaises(FileExistsError):
os.link(source_path, link_path, follow_symlinks=False)
def test_listdir_scandir(self):
self._verify_available("HAVE_FDOPENDIR")
if self.mac_ver >= (10, 10):
self.assertIn("HAVE_FDOPENDIR", posix._have_functions)
else:
self.assertNotIn("HAVE_FDOPENDIR", posix._have_functions)
with self.assertRaisesRegex(TypeError, "listdir: path should be string, bytes, os.PathLike or None, not int"):
os.listdir(0)
with self.assertRaisesRegex(TypeError, "scandir: path should be string, bytes, os.PathLike or None, not int"):
os.scandir(0)
def test_mkdir(self):
self._verify_available("HAVE_MKDIRAT")
if self.mac_ver >= (10, 10):
self.assertIn("HAVE_MKDIRAT", posix._have_functions)
else:
self.assertNotIn("HAVE_MKDIRAT", posix._have_functions)
with self.assertRaisesRegex(NotImplementedError, "dir_fd unavailable"):
os.mkdir("dir", dir_fd=0)
def test_rename_replace(self):
self._verify_available("HAVE_RENAMEAT")
if self.mac_ver >= (10, 10):
self.assertIn("HAVE_RENAMEAT", posix._have_functions)
else:
self.assertNotIn("HAVE_RENAMEAT", posix._have_functions)
with self.assertRaisesRegex(NotImplementedError, "src_dir_fd and dst_dir_fd unavailable"):
os.rename("a", "b", src_dir_fd=0)
with self.assertRaisesRegex(NotImplementedError, "src_dir_fd and dst_dir_fd unavailable"):
os.rename("a", "b", dst_dir_fd=0)
with self.assertRaisesRegex(NotImplementedError, "src_dir_fd and dst_dir_fd unavailable"):
os.replace("a", "b", src_dir_fd=0)
with self.assertRaisesRegex(NotImplementedError, "src_dir_fd and dst_dir_fd unavailable"):
os.replace("a", "b", dst_dir_fd=0)
def test_unlink_rmdir(self):
self._verify_available("HAVE_UNLINKAT")
if self.mac_ver >= (10, 10):
self.assertIn("HAVE_UNLINKAT", posix._have_functions)
else:
self.assertNotIn("HAVE_UNLINKAT", posix._have_functions)
with self.assertRaisesRegex(NotImplementedError, "dir_fd unavailable"):
os.unlink("path", dir_fd=0)
with self.assertRaisesRegex(NotImplementedError, "dir_fd unavailable"):
os.rmdir("path", dir_fd=0)
def test_open(self):
self._verify_available("HAVE_OPENAT")
if self.mac_ver >= (10, 10):
self.assertIn("HAVE_OPENAT", posix._have_functions)
else:
self.assertNotIn("HAVE_OPENAT", posix._have_functions)
with self.assertRaisesRegex(NotImplementedError, "dir_fd unavailable"):
os.open("path", os.O_RDONLY, dir_fd=0)
def test_readlink(self):
self._verify_available("HAVE_READLINKAT")
if self.mac_ver >= (10, 10):
self.assertIn("HAVE_READLINKAT", posix._have_functions)
else:
self.assertNotIn("HAVE_READLINKAT", posix._have_functions)
with self.assertRaisesRegex(NotImplementedError, "dir_fd unavailable"):
os.readlink("path", dir_fd=0)
def test_symlink(self):
self._verify_available("HAVE_SYMLINKAT")
if self.mac_ver >= (10, 10):
self.assertIn("HAVE_SYMLINKAT", posix._have_functions)
else:
self.assertNotIn("HAVE_SYMLINKAT", posix._have_functions)
with self.assertRaisesRegex(NotImplementedError, "dir_fd unavailable"):
os.symlink("a", "b", dir_fd=0)
def test_utime(self):
self._verify_available("HAVE_FUTIMENS")
self._verify_available("HAVE_UTIMENSAT")
if self.mac_ver >= (10, 13):
self.assertIn("HAVE_FUTIMENS", posix._have_functions)
self.assertIn("HAVE_UTIMENSAT", posix._have_functions)
else:
self.assertNotIn("HAVE_FUTIMENS", posix._have_functions)
self.assertNotIn("HAVE_UTIMENSAT", posix._have_functions)
with self.assertRaisesRegex(NotImplementedError, "dir_fd unavailable"):
os.utime("path", dir_fd=0)
def test_main():
try:
support.run_unittest(
PosixTester,
PosixGroupsTester,
TestPosixSpawn,
TestPosixSpawnP,
TestPosixWeaklinking
)
finally:
support.reap_children()
if __name__ == '__main__':
test_main()
| 40.898438 | 122 | 0.595011 |
3870e1a84ea0cc341ab60c99ee208341ed524d4a | 4,508 | py | Python | my_env/bin/gdal_auth.py | wilsonfilhodev/gis | 65926fd36460a7a3590ef7511ccae1d64e3d9988 | [
"MIT"
] | null | null | null | my_env/bin/gdal_auth.py | wilsonfilhodev/gis | 65926fd36460a7a3590ef7511ccae1d64e3d9988 | [
"MIT"
] | null | null | null | my_env/bin/gdal_auth.py | wilsonfilhodev/gis | 65926fd36460a7a3590ef7511ccae1d64e3d9988 | [
"MIT"
] | null | null | null | #!/home/wilsonsf/environments/my_env/bin/python3
# -*- coding: utf-8 -*-
# ******************************************************************************
# $Id$
#
# Project: GDAL
# Purpose: Application for Google web service authentication.
# Author: Frank Warmerdam, warmerdam@pobox.com
#
# ******************************************************************************
# Copyright (c) 2013, Frank Warmerdam <warmerdam@pobox.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
# ******************************************************************************
import sys
import time
import webbrowser
from osgeo import gdal
SCOPES = {
'ft': 'https://www.googleapis.com/auth/fusiontables',
'storage': 'https://www.googleapis.com/auth/devstorage.read_only',
'storage-rw': 'https://www.googleapis.com/auth/devstorage.read_write'
}
# =============================================================================
# Usage()
# =============================================================================
def Usage():
print('')
print('Usage: gdal_auth_py [-s scope]')
print(' - interactive use.')
print('')
print('or:')
print('Usage: gdal_auth.py login [-s scope] ')
print('Usage: gdal_auth.py auth2refresh [-s scope] auth_token')
print('Usage: gdal_auth.py refresh2access [-s scope] refresh_token')
print('')
print('scopes: ft/storage/storage-rw/full_url')
print('')
sys.exit(1)
# =============================================================================
# Mainline
# =============================================================================
scope = SCOPES['ft']
token_in = None
command = None
argv = gdal.GeneralCmdLineProcessor(sys.argv)
if argv is None:
sys.exit(0)
# Parse command line arguments.
i = 1
while i < len(argv):
arg = argv[i]
if arg == '-s' and i < len(argv) - 1:
if argv[i + 1] in SCOPES:
scope = SCOPES[argv[i + 1]]
elif argv[i + 1].startswith('http'):
scope = argv[i + 1]
else:
print('Scope %s not recognised.' % argv[i + 1])
Usage()
sys.exit(1)
i = i + 1
elif arg[0] == '-':
Usage()
elif command is None:
command = arg
elif token_in is None:
token_in = arg
else:
Usage()
i = i + 1
if command is None:
command = 'interactive'
if command == 'login':
print(gdal.GOA2GetAuthorizationURL(scope))
elif command == 'auth2refresh':
print(gdal.GOA2GetRefreshToken(token_in, scope))
elif command == 'refresh2access':
print(gdal.GOA2GetAccessToken(token_in, scope))
elif command != 'interactive':
Usage()
else:
# Interactive case
print('Authorization requested for scope:')
print(scope)
print('')
print('Please login and authorize access in web browser...')
webbrowser.open(gdal.GOA2GetAuthorizationURL(scope))
time.sleep(2.0)
print('')
print('Enter authorization token:')
auth_token = sys.stdin.readline()
refresh_token = gdal.GOA2GetRefreshToken(auth_token, scope)
print('Refresh Token:' + refresh_token)
print('')
if scope == SCOPES['ft']:
print('Consider setting a configuration option like:')
print('GFT_REFRESH_TOKEN=' + refresh_token)
elif scope in (SCOPES['storage'], SCOPES['storage-rw']):
print('Consider setting a configuration option like:')
print('GS_OAUTH2_REFRESH_TOKEN=' + refresh_token)
| 31.524476 | 80 | 0.584516 |
451fd258bd9cb27c3bbc0374153cccc023066b80 | 2,464 | py | Python | year_3/databases_sem1/lab1/maxdb/utils.py | honchardev/KPI | f8425681857c02a67127ffb05c0af0563a8473e1 | [
"MIT"
] | null | null | null | year_3/databases_sem1/lab1/maxdb/utils.py | honchardev/KPI | f8425681857c02a67127ffb05c0af0563a8473e1 | [
"MIT"
] | 21 | 2020-03-24T16:26:04.000Z | 2022-02-18T15:56:16.000Z | year_3/databases_sem1/lab1/maxdb/utils.py | honchardev/KPI | f8425681857c02a67127ffb05c0af0563a8473e1 | [
"MIT"
] | null | null | null | """
Utilities for maxdb classes.
"""
import os
class LRUCache(dict):
"""
LRU: least recently used cache - default storage for queries.
Available functionality:
get item: .get or __getitem__
put item: __setitem__
del item: .clear or __delitem__
"""
def __init__(self, *args, **kw):
self.capacity = kw.pop('capacity', None) or float('nan')
self.lru = []
super(LRUCache, self).__init__(*args, **kw)
def _refresh(self, key):
if key in self.lru:
self.lru.remove(key)
self.lru.append(key)
def get(self, key, default=None):
item = super(LRUCache, self).get(key, default)
self._refresh(key)
return item
def clear(self):
super(LRUCache, self).clear()
del self.lru[:]
def __getitem__(self, key):
item = super(LRUCache, self).__getitem__(key)
self._refresh(key)
return item
def __setitem__(self, key, value):
super(LRUCache, self).__setitem__(key, value)
self._refresh(key)
if len(self) >= self.capacity:
self.pop(self.lru.pop(0))
def __delitem__(self, key):
super(LRUCache, self).__delitem__(key)
self.lru.remove(key)
class FrozenDict(dict):
"""
An immutable dict to save into db.
Usage in maxdb: through 'freeze' method.
"""
def __hash__(self):
return hash(tuple(sorted(self.items())))
def _immutable(self, *args, **kw):
raise TypeError('FrozenDict is immutable')
__setitem__ = _immutable
__delitem__ = _immutable
clear = _immutable
update = _immutable
setdefault = _immutable
pop = _immutable
popitem = _immutable
def freeze(obj):
"""
Make an object immutable to store it as an element in DB.
The whole idea of freezing elements:
1) sort of encapsulation
2) queries are commutative:
(a|b == b|a) or (a&b == b&a)
"""
if isinstance(obj, dict):
return FrozenDict((k, freeze(v)) for k, v in obj.items())
elif isinstance(obj, list):
return tuple(freeze(el) for el in obj)
elif isinstance(obj, set):
return frozenset(obj)
else:
return obj
def touch(fname, create_dirs):
if create_dirs:
base_dir = os.path.dirname(fname)
if not os.path.exists(base_dir):
os.makedirs(base_dir)
with open(fname, 'a'):
os.utime(fname, None)
| 24.39604 | 65 | 0.599432 |
f75f9ff65e5153de2e0101361f4fb59830bab0ee | 3,235 | py | Python | src/newsfeed/handlers/subscriptions.py | ets-labs/newsfeed | 9f59f94e1cd5f24d4b4121929050fc8b304173af | [
"BSD-3-Clause"
] | 10 | 2019-11-07T15:04:02.000Z | 2022-02-19T11:47:40.000Z | src/newsfeed/handlers/subscriptions.py | ets-labs/newsfeed | 9f59f94e1cd5f24d4b4121929050fc8b304173af | [
"BSD-3-Clause"
] | 27 | 2019-10-31T16:31:27.000Z | 2020-01-14T15:21:29.000Z | src/newsfeed/handlers/subscriptions.py | ets-labs/newsfeed | 9f59f94e1cd5f24d4b4121929050fc8b304173af | [
"BSD-3-Clause"
] | 10 | 2019-11-07T15:08:43.000Z | 2021-12-03T22:31:49.000Z | """Subscription API handlers."""
from typing import Dict, Union
from aiohttp import web
from dependency_injector.wiring import Provide
from newsfeed.domain.subscription import (
Subscription,
SubscriptionService,
)
from newsfeed.domain.error import DomainError
from newsfeed.containers import Container
SerializedSubscription = Dict[
str,
Union[
str,
int,
],
]
async def get_subscriptions_handler(
request: web.Request, *,
subscription_service: SubscriptionService = Provide[
Container.subscription_service
],
) -> web.Response:
"""Handle subscriptions getting requests."""
newsfeed_subscriptions = await subscription_service.get_subscriptions(
newsfeed_id=request.match_info['newsfeed_id'],
)
return web.json_response(
data={
'results': [
_serialize_subscription(subscription)
for subscription in newsfeed_subscriptions
],
},
)
async def post_subscription_handler(
request: web.Request, *,
subscription_service: SubscriptionService = Provide[
Container.subscription_service
],
) -> web.Response:
"""Handle subscriptions posting requests."""
data = await request.json()
try:
subscription = await subscription_service.create_subscription(
newsfeed_id=request.match_info['newsfeed_id'],
to_newsfeed_id=data['to_newsfeed_id'],
)
except DomainError as exception:
return web.json_response(
status=400,
data={
'message': exception.message,
}
)
return web.json_response(
status=200,
data=_serialize_subscription(subscription),
)
async def delete_subscription_handler(
request: web.Request, *,
subscription_service: SubscriptionService = Provide[
Container.subscription_service
],
) -> web.Response:
"""Handle subscriptions deleting requests."""
await subscription_service.delete_subscription(
newsfeed_id=request.match_info['newsfeed_id'],
subscription_id=request.match_info['subscription_id'],
)
return web.json_response(status=204)
async def get_subscriber_subscriptions_handler(
request: web.Request, *,
subscription_service: SubscriptionService = Provide[
Container.subscription_service
],
) -> web.Response: # noqa
"""Handle subscriber subscriptions getting requests."""
newsfeed_subscriptions = await subscription_service.get_subscriber_subscriptions(
newsfeed_id=request.match_info['newsfeed_id'],
)
return web.json_response(
data={
'results': [
_serialize_subscription(subscription)
for subscription in newsfeed_subscriptions
],
},
)
def _serialize_subscription(subscription: Subscription) -> SerializedSubscription:
return {
'id': str(subscription.id),
'newsfeed_id': str(subscription.newsfeed_id),
'to_newsfeed_id': str(subscription.to_newsfeed_id),
'subscribed_at': int(subscription.subscribed_at.timestamp()),
}
| 28.377193 | 85 | 0.657496 |
4bfb88255066dbb178763ef4681430c240af8a8b | 627 | py | Python | BackEnd/backEnd/manage.py | hosseindehghanipour1998/onlineClass-Backend | f7e8c410ffe6ece5d010886dea54c85dd3ec99b7 | [
"MIT"
] | 3 | 2020-03-06T09:53:39.000Z | 2020-03-31T22:43:09.000Z | BackEnd/backEnd/manage.py | hosseindehghanipour1998/onlineClass-Backend | f7e8c410ffe6ece5d010886dea54c85dd3ec99b7 | [
"MIT"
] | 10 | 2020-03-01T08:58:36.000Z | 2022-02-10T08:18:50.000Z | manage.py | BeOrNot2Be/MEDILOV-back | 549bea50513a3b9f9bd5daafc77250cf523029cc | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backEnd.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.5 | 73 | 0.682616 |
537da7f7f7bb74160eade38b40d76d584efe286a | 1,297 | py | Python | prepro.py | linVdcd/AIAYN_MultiTurnChatbot | c33deec9f4be5f669b51a801076a936119c6568a | [
"Apache-2.0"
] | null | null | null | prepro.py | linVdcd/AIAYN_MultiTurnChatbot | c33deec9f4be5f669b51a801076a936119c6568a | [
"Apache-2.0"
] | null | null | null | prepro.py | linVdcd/AIAYN_MultiTurnChatbot | c33deec9f4be5f669b51a801076a936119c6568a | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
#/usr/bin/python2
'''
June 2017 by kyubyong park.
kbpark.linguist@gmail.com.
https://www.github.com/kyubyong/transformer
'''
from __future__ import print_function
from hyperparams import Hyperparams as hp
import tensorflow as tf
import numpy as np
import codecs
import os
import regex
from collections import Counter
def make_vocab(fpath, fname):
'''Constructs vocabulary.
Args:
fpath: A string. Input file path.
fname: A string. Output file name.
Writes vocabulary line by line to `preprocessed/fname`
'''
text = codecs.open(fpath, 'r', 'utf-8').read()
#text = regex.sub("[^\s\p{Latin}']", "", text)
words = text.split()
word2cnt = Counter(words)
if not os.path.exists('preprocessed'): os.mkdir('preprocessed')
with codecs.open('preprocessed/{}'.format(fname), 'w', 'utf-8') as fout:
fout.write("{}\t1000000000\n{}\t1000000000\n{}\t1000000000\n{}\t1000000000\n".format("<PAD>", "<UNK>", "<S>", "</S>"))
for word, cnt in word2cnt.most_common(len(word2cnt)):
fout.write(u"{}\t{}\n".format(word, cnt))
if __name__ == '__main__':
make_vocab(hp.source_train, "de.vocab.tsv")
make_vocab(hp.target_train, "en.vocab.tsv")
print("Done") | 33.25641 | 127 | 0.635312 |
0c18d94e4047b9aecfd5107868694b4e3c77d482 | 10,510 | py | Python | lav/models/bev_planner.py | dotchen/LAV | dc9b4cfca39abd50c7438e8749d49f6ac0fe5e4e | [
"Apache-2.0"
] | 122 | 2022-03-22T18:15:25.000Z | 2022-03-31T13:28:57.000Z | lav/models/bev_planner.py | chisyliu/LAV | 0a5068c0fad3ecc2f2616801c6d3b00bc0ff03f3 | [
"Apache-2.0"
] | 5 | 2022-03-23T06:20:25.000Z | 2022-03-30T09:23:09.000Z | lav/models/bev_planner.py | chisyliu/LAV | 0a5068c0fad3ecc2f2616801c6d3b00bc0ff03f3 | [
"Apache-2.0"
] | 11 | 2022-03-22T19:23:09.000Z | 2022-03-30T08:13:33.000Z | import numpy as np
import torch
from torch import nn
from torch.nn import functional as F
from .resnet import resnet18
from copy import deepcopy
class BEVPlanner(nn.Module):
def __init__(self,
pixels_per_meter=2, crop_size=64, x_offset=0, y_offset=0.75,
feature_x_jitter=1, feature_angle_jitter=10,
num_plan=10, k=16, num_out_feature=64, num_cmds=6, max_num_cars=5,
num_plan_iter=1,
):
super().__init__()
self.num_cmds = num_cmds
self.num_plan = num_plan
self.num_plan_iter = num_plan_iter
self.max_num_cars = max_num_cars
self.num_out_feature = num_out_feature
self.pixels_per_meter = pixels_per_meter
self.crop_size = crop_size
self.feature_x_jitter = feature_x_jitter
self.feature_angle_jitter = np.deg2rad(feature_angle_jitter)
self.offset_x = nn.Parameter(torch.tensor(x_offset).float(), requires_grad=False)
self.offset_y = nn.Parameter(torch.tensor(y_offset).float(), requires_grad=False)
self.bev_conv_emb = nn.Sequential(
resnet18(num_channels=5),
nn.AdaptiveAvgPool2d((1,1)),
nn.Flatten(),
)
self.plan_gru = nn.GRU(4,512,batch_first=True)
self.plan_mlp = nn.Linear(512,2)
self.cast_grus = nn.ModuleList([nn.GRU(512, 64, batch_first=True) for _ in range(self.num_cmds)])
self.cast_mlps = nn.ModuleList([nn.Linear(64, 2) for _ in range(self.num_cmds)])
self.cast_cmd_pred = nn.Sequential(
nn.Linear(512,self.num_cmds),
nn.Sigmoid(),
)
def infer(self, bev, nxps):
cropped_ego_bev = self.crop_feature(
bev,
torch.zeros((1,2), dtype=bev.dtype,device=bev.device),
torch.zeros((1,),dtype=bev.dtype,device=bev.device),
pixels_per_meter=self.pixels_per_meter,
crop_size=self.crop_size*2
)
# cropped_ego_bev = self.crop_feature(bev, locs_jitter, oris_jitter, pixels_per_meter=self.pixels_per_meter, crop_size=self.crop_size*2)
ego_bev_embd = self.bev_conv_emb(cropped_ego_bev)
ego_cast_locs = self.cast(ego_bev_embd)
ego_plan_locs = self.plan(
ego_bev_embd, nxps,
cast_locs=ego_cast_locs,
pixels_per_meter=self.pixels_per_meter,
crop_size=self.crop_size*2
)
ego_cast_cmds = self.cast_cmd_pred(ego_bev_embd)
return ego_plan_locs, ego_cast_locs, ego_cast_cmds
def forward(self, bev, ego_locs, locs, oris, nxps, typs):
ego_oris = oris[:,:1]
locs = locs[:,1:]
oris = oris[:,1:]
typs = (typs[:,1:]==1) # 1 is for vehicles
N = locs.size(1)
# Only pick the good ones.
typs = filter_cars(ego_locs, locs, typs)
# Other vehicles
if int(typs.float().sum()) > 0:
# Guard against OOM: randomly sample cars to train on
typs = random_sample(typs, size=self.max_num_cars)
# Flatten the locs
flat_bev = bev.expand(N,*bev.size()).permute(1,0,2,3,4).contiguous()[typs]
flat_locs = (locs[:,:,1:]-locs[:,:,:1])[typs]
flat_rel_loc0 = (locs[:,:,0]-ego_locs[:,None,0])[typs]
flat_rel_ori0 = (oris-ego_oris)[typs]
K = flat_locs.size(0)
locs_jitter = (torch.rand((K,2))*2-1).float().to(locs.device) * self.feature_x_jitter
locs_jitter[:,1] = 0
oris_jitter = (torch.rand((K,))*2-1).float().to(oris.device) * self.feature_angle_jitter
cropped_other_bev = self.crop_feature(flat_bev, flat_rel_loc0+locs_jitter, flat_rel_ori0+oris_jitter, pixels_per_meter=self.pixels_per_meter, crop_size=self.crop_size*2)
other_locs = transform_points(flat_locs-locs_jitter[:,None], -flat_rel_ori0-oris_jitter)
# import matplotlib.pyplot as plt
# from matplotlib.pyplot import Circle
# f, [ax1, ax2] = plt.subplots(1,2,figsize=(8,4))
# ax1.imshow(bev[0].mean(0).detach().cpu().numpy())
# ax2.imshow(cropped_other_bev[0].mean(0).detach().cpu().numpy())
# for loc in other_locs[0]:
# ax2.add_patch(Circle(loc.detach().cpu().numpy()*4+[96,168],radius=2))
# plt.show()
other_bev_embd = self.bev_conv_emb(cropped_other_bev)
other_cast_locs = self.cast(other_bev_embd)
other_cast_cmds = self.cast_cmd_pred(other_bev_embd)
else:
dtype = bev.dtype
device = bev.device
other_locs = torch.zeros((N,self.num_plan,2), dtype=dtype, device=device)
other_embd = torch.zeros((N,self.num_out_feature), dtype=dtype, device=device)
other_bev_embd = torch.zeros((N,self.num_out_feature), dtype=dtype, device=device)
other_cast_locs = torch.zeros((N,self.num_cmds,self.num_plan,2), dtype=dtype, device=device)
other_cast_cmds = torch.zeros((N,self.num_cmds), dtype=dtype, device=device)
B = bev.size(0)
# locs_jitter = (torch.rand((B,2))*2-1).float().to(locs.device) * (0 if is_eval else self.feature_x_jitter)
# locs_jitter[:,1] = 0
# oris_jitter = (torch.rand((B,))*2-1).float().to(oris.device) * (0 if is_eval else self.feature_angle_jitter)
# ego_locs = transform_points(ego_locs[:,1:]-locs_jitter[:,None], -oris_jitter)
# nxps = transform_points(nxps[:,None]-locs_jitter[:,None], -oris_jitter)[:,0]
# cropped_ego_bev = self.crop_feature(bev, locs_jitter, oris_jitter, pixels_per_meter=self.pixels_per_meter, crop_size=self.crop_size*2)
cropped_ego_bev = self.crop_feature(
bev,
torch.zeros((B,2), dtype=bev.dtype,device=bev.device),
torch.zeros((B,),dtype=bev.dtype,device=bev.device),
pixels_per_meter=self.pixels_per_meter,
crop_size=self.crop_size*2
)
# f, [ax1, ax2] = plt.subplots(1,2,figsize=(8,4))
# ax1.imshow(bev[0].mean(0).detach().cpu().numpy())
# ax2.imshow(cropped_ego_bev[0].mean(0).detach().cpu().numpy())
# plt.show()
ego_bev_embd = self.bev_conv_emb(cropped_ego_bev)
ego_cast_locs = self.cast(ego_bev_embd)
ego_plan_locs = self.plan(
ego_bev_embd, nxps,
cast_locs=ego_cast_locs,
pixels_per_meter=self.pixels_per_meter,
crop_size=self.crop_size*2
)
ego_cast_cmds = self.cast_cmd_pred(ego_bev_embd)
return (
other_locs, other_cast_locs, other_cast_cmds,
ego_plan_locs, ego_cast_locs, ego_cast_cmds
)
def _plan(self, embd, nxp, cast_locs, pixels_per_meter=4, crop_size=96):
B = embd.size(0)
h0, u0 = embd, nxp*pixels_per_meter/crop_size*2-1
self.plan_gru.flatten_parameters()
locs = []
for i in range(self.num_cmds):
u = torch.cat([
u0.expand(self.num_plan, B, -1).permute(1,0,2),
cast_locs[:,i]
], dim=2)
out, _ = self.plan_gru(u, h0[None])
locs.append(torch.cumsum(self.plan_mlp(out), dim=1))
return torch.stack(locs, dim=1) + cast_locs
def plan(self, embd, nxp, cast_locs=None, pixels_per_meter=4, crop_size=96):
if cast_locs is None:
plan_loc = self.cast(embd).detach()
else:
plan_loc = cast_locs.detach()
plan_locs = []
for i in range(self.num_plan_iter):
plan_loc = self._plan(embd, nxp, plan_loc, pixels_per_meter=pixels_per_meter, crop_size=crop_size)
plan_locs.append(plan_loc)
return torch.stack(plan_locs, dim=1)
def cast(self, embd):
B = embd.size(0)
u = embd.expand(self.num_plan, B, -1).permute(1,0,2)
locs = []
for gru, mlp in zip(self.cast_grus, self.cast_mlps):
gru.flatten_parameters()
out, _ = gru(u)
locs.append(torch.cumsum(mlp(out), dim=1))
return torch.stack(locs, dim=1)
def crop_feature(self, features, rel_locs, rel_oris, pixels_per_meter=4, crop_size=96):
B, C, H, W = features.size()
# ERROR proof hack...
rel_locs = rel_locs.view(-1,2)
rel_locs = rel_locs * pixels_per_meter/torch.tensor([H/2,W/2]).type_as(rel_locs).to(rel_locs.device)
cos = torch.cos(rel_oris)
sin = torch.sin(rel_oris)
rel_x = rel_locs[...,0]
rel_y = rel_locs[...,1]
k = crop_size / H
# DEBUG
# cos = torch.ones_like(cos)
# sin = torch.zeros_like(sin)
# END DEBUG
# offset_x = self.offset_x +
rot_x_offset = -k*self.offset_x*cos+k*self.offset_y*sin+self.offset_x
rot_y_offset = -k*self.offset_x*sin-k*self.offset_y*cos+self.offset_y
# rel_x_offset = -k*self.offset_x*cos+k*self.offset_y*sin+self.offset_x
# rel_y_offset = -k*self.offset_x*sin-k*self.offset_y*cos+self.offset_y
# print (rel_x, rel_y)
theta = torch.stack([
torch.stack([k*cos, k*-sin, rot_x_offset+rel_x], dim=-1),
torch.stack([k*sin, k*cos, rot_y_offset+rel_y], dim=-1)
], dim=-2)
grids = F.affine_grid(theta, torch.Size((B,C,crop_size,crop_size)), align_corners=True)
# TODO: scale the grids??
cropped_features = F.grid_sample(features, grids, align_corners=True)
return cropped_features
def transform_points(locs, oris):
cos, sin = torch.cos(oris), torch.sin(oris)
R = torch.stack([
torch.stack([ cos, sin], dim=-1),
torch.stack([-sin, cos], dim=-1),
], dim=-2)
return locs @ R
def filter_cars(ego_locs, locs, typs):
# We don't care about cars behind us ;)
rel_locs = locs[:,:,0] - ego_locs[:,0:1]
return typs & (rel_locs[...,1] < 0)
def random_sample(binaries, size):
cut_binaries = torch.zeros_like(binaries)
for i in range(binaries.size(0)):
if binaries[i].sum() <= size:
cut_binaries[i] = binaries[i]
else:
nonzero = torch.nonzero(binaries[i]).squeeze(1)
nonzero_idx = torch.multinomial(torch.ones_like(nonzero).float(), size)
nonzero = nonzero[nonzero_idx]
cut_binaries[i,nonzero] = binaries[i,nonzero]
return cut_binaries | 35.150502 | 181 | 0.604662 |
296703d4e5382c8196e944ad3d2824ff4a3de5d4 | 6,418 | py | Python | testdata/bin/generate-test-vectors.py | AtScaleInc/Impala | 1073a9108220faffe61c3bc9dff7a953adde12df | [
"Apache-2.0"
] | 51 | 2015-01-02T04:10:26.000Z | 2020-11-21T16:33:19.000Z | testdata/bin/generate-test-vectors.py | AtScaleInc/Impala | 1073a9108220faffe61c3bc9dff7a953adde12df | [
"Apache-2.0"
] | 58 | 2015-01-29T15:52:19.000Z | 2016-04-19T08:19:02.000Z | testdata/bin/generate-test-vectors.py | AtScaleInc/Impala | 1073a9108220faffe61c3bc9dff7a953adde12df | [
"Apache-2.0"
] | 8 | 2015-03-16T11:03:41.000Z | 2019-07-11T06:39:31.000Z | #!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# This script is used to generate test "vectors" based on a dimension input file.
# A vector in this context is simply a permutation of the values in the the
# dimension input file. For example, in this case the script is generating test vectors
# for the Impala / Hive benchmark suite so interesting dimensions are data set,
# file format, and compression algorithm. More can be added later.
# The output of running this script is a list of vectors. Currently two different vector
# outputs are generated - an "exhaustive" vector which contains all permutations and a
# "pairwise" vector that contains a subset of the vectors by chosing all combinations of
# pairs (the pairwise strategy). More information about pairwise can be found at
# http://www.pairwise.org.
#
# The end goal is to have a reduced set of test vectors to provide coverage but don't take
# as long to run as the exhaustive set of vectors along with a set of vectors that provide
# full coverage. This is especially important for benchmarks which work on very large data
# sets.
#
# The output files output can then be read in by other tests by other scripts,tools,tests.
# One major use case is the generate_scehma_statements.py script, which uses the vector
# files to dynamically build schema for running benchmark and functional tests.
#
# The pairwise generation is done using the Python 'AllPairs' module. This module can be
# downloaded from http://pypi.python.org/pypi/AllPairs/2.0.1
#
import collections
import csv
import math
import os
import sys
from itertools import product
from optparse import OptionParser
import metacomm.combinatorics.all_pairs2
all_pairs = metacomm.combinatorics.all_pairs2.all_pairs2
parser = OptionParser()
parser.add_option("-w", "--workload", dest="workload",
help="The workload to generate test vectors for")
(options, args) = parser.parse_args()
if options.workload is None:
print "A workload name must be specified."
parser.print_help()
sys.exit(1)
WORKLOAD_DIR = os.environ['IMPALA_WORKLOAD_DIR']
# This array also defines the order of the dimension values. This ordering
# is important because it is used to apply constraints. Add new items to the
# end of the list.
KNOWN_DIMENSION_NAMES = ['file_format', 'dataset', 'compression_codec',
'compression_type']
FILE_FORMAT_IDX = KNOWN_DIMENSION_NAMES.index('file_format')
DATASET_IDX = KNOWN_DIMENSION_NAMES.index('dataset')
COMPRESSION_IDX = KNOWN_DIMENSION_NAMES.index('compression_codec')
COMPRESSION_TYPE_IDX = KNOWN_DIMENSION_NAMES.index('compression_type')
class VectorGenerator:
def __init__(self, input_vectors):
self.input_vectors = input_vectors
def generate_pairwise_matrix(self, filter_func = None):
if filter_func is None:
filter_func = lambda vector: True
return all_pairs(self.input_vectors, filter_func = is_valid_combination)
def generate_exhaustive_matrix(self, filter_func = None):
if filter_func is None:
filter_func = lambda vector: True
return [list(vec) for vec in product(*self.input_vectors) if filter_func(vec)]
# Add vector value constraints to this function.
def is_valid_combination(vector):
if len(vector) == 4:
return not (
(vector[FILE_FORMAT_IDX] == 'text' and vector[COMPRESSION_IDX] in ['def']) or
(vector[FILE_FORMAT_IDX] != 'text' and vector[COMPRESSION_IDX] == 'lzo') or
(vector[COMPRESSION_IDX] == 'none' and vector[COMPRESSION_TYPE_IDX] != 'none') or
(vector[COMPRESSION_IDX] != 'none' and vector[COMPRESSION_TYPE_IDX] == 'none') or
(vector[FILE_FORMAT_IDX] != 'seq' and vector[COMPRESSION_TYPE_IDX] == 'record') or
(vector[FILE_FORMAT_IDX] == 'parquet' and vector[COMPRESSION_IDX] != 'none') or
(vector[FILE_FORMAT_IDX] == 'hbase' and vector[COMPRESSION_IDX] != 'none') or
(vector[FILE_FORMAT_IDX] == 'avro' and
vector[COMPRESSION_IDX] not in ['none', 'snap', 'def']))
# The pairwise generator may call this with different vector lengths. In that case this
# should always return true.
return True
# Vector files have the format: <dimension name>: value1, value2, ... this function
# adds all specified dimensions to a map of dimension name-to-value
def read_dimension_file(file_name):
dimension_map = collections.defaultdict(list)
with open(file_name, 'rb') as input_file:
for line in input_file.readlines():
if line.strip().startswith('#'):
continue
values = line.split(':')
if len(values) != 2:
print 'Invalid dimension file format. Expected format is <dimension name>: val1,'\
' val2, ... Found: ' + line
sys.exit(1)
if not values[0] in KNOWN_DIMENSION_NAMES:
print 'Unknown dimension name: ' + values[0]
print 'Valid dimension names: ' + ', '.join(KNOWN_DIMENSION_NAMES)
sys.exit(1)
dimension_map[values[0]] = [val.strip() for val in values[1].split(',')]
return dimension_map
def write_vectors_to_csv(output_dir, output_file, matrix):
output_text = "# Generated File."
for row in matrix:
row = ['%s: %s' % (KNOWN_DIMENSION_NAMES[i], row[i]) for i in range(0, len(row))]
output_text += '\n' + ', '.join(row)
output_path = os.path.join(output_dir, output_file)
print 'Writing test vectors to: ' + output_path
with open(output_path, 'wb') as output_file:
output_file.write(output_text)
output_file.write('\n')
dimension_file = os.path.join(WORKLOAD_DIR, options.workload,
'%s_dimensions.csv' % options.workload)
if not os.path.isfile(dimension_file):
print 'Dimension file not found: ' + dimension_file
sys.exit(1)
print 'Reading dimension file: ' + dimension_file
vector_map = read_dimension_file(dimension_file)
vectors = []
# This ordering matters! We need to know the order to apply the proper constraints.
for dimension_name in KNOWN_DIMENSION_NAMES:
vectors.append(vector_map[dimension_name])
vg = VectorGenerator(vectors)
output_dir = os.path.join(WORKLOAD_DIR, options.workload)
write_vectors_to_csv(output_dir, '%s_pairwise.csv' % options.workload,
vg.generate_pairwise_matrix(is_valid_combination))
write_vectors_to_csv(output_dir, '%s_exhaustive.csv' % options.workload,
vg.generate_exhaustive_matrix(is_valid_combination))
| 44.569444 | 90 | 0.727797 |
05e741972f16bc0dadda991949d8be5fc742fc24 | 470 | py | Python | main.py | philippwiesner/compiler | 52c00221ace2cb56b9fba91b3949c14a6f0e4f68 | [
"MIT"
] | null | null | null | main.py | philippwiesner/compiler | 52c00221ace2cb56b9fba91b3949c14a6f0e4f68 | [
"MIT"
] | 8 | 2020-05-15T14:50:59.000Z | 2020-06-05T11:44:33.000Z | main.py | philippwiesner/compiler | 52c00221ace2cb56b9fba91b3949c14a6f0e4f68 | [
"MIT"
] | null | null | null | """Vega compiler"""
from argparse import ArgumentParser
from argparse import FileType
from vega.front_end.parser import Parser
from vega.front_end.exception import BaseError
if __name__ == "__main__":
parser = ArgumentParser(description="Compile")
parser.add_argument('code', type=FileType('r'))
args = parser.parse_args()
code = args.code
parser = Parser(code)
try:
parser.parse()
except BaseError as e:
print(e.message)
| 23.5 | 51 | 0.702128 |
4d0c6a83d91ab3ad10c2183cb544be446e21f64f | 1,070 | py | Python | tests/dags/test_legacy_job_operator.py | Fahadsaadullahkhan/KubernetesJobOperator | d96f9498667f937503d1e45142060904674f823f | [
"MIT"
] | null | null | null | tests/dags/test_legacy_job_operator.py | Fahadsaadullahkhan/KubernetesJobOperator | d96f9498667f937503d1e45142060904674f823f | [
"MIT"
] | null | null | null | tests/dags/test_legacy_job_operator.py | Fahadsaadullahkhan/KubernetesJobOperator | d96f9498667f937503d1e45142060904674f823f | [
"MIT"
] | null | null | null | from utils import default_args
from airflow import DAG
from airflow_kubernetes_job_operator.kubernetes_legacy_job_operator import KubernetesLegacyJobOperator
dag = DAG(
"kub-job-op-legacy",
default_args=default_args,
description="Test base job operator",
schedule_interval=None,
catchup=False,
)
bash_script = """
#/usr/bin/env bash
echo "Starting"
TIC_COUNT=10
cur_count=0
while true; do
cur_count=$((cur_count + 1))
if [ "$cur_count" -ge "$TIC_COUNT" ]; then
break
fi
date
sleep 1
done
echo "Complete"
"""
# BashOperator(bash_command="date", task_id="test-bash", dag=dag)
op = KubernetesLegacyJobOperator(
task_id="legacy-test-job-success",
image="ubuntu",
cmds=["bash", "-c", bash_script],
dag=dag,
is_delete_operator_pod=True,
)
KubernetesLegacyJobOperator(
task_id="legacy-test-job-fail",
image="ubuntu",
cmds=["bash", "-c", bash_script + "\nexit 99"],
dag=dag,
is_delete_operator_pod=True,
)
if __name__ == "__main__":
dag.clear(reset_dag_runs=True)
dag.run()
| 20.576923 | 102 | 0.688785 |
a0aba84f36e369ba885173b99f7cf522802abca1 | 25,575 | py | Python | littlelambocoin/wallet/trade_manager.py | BTCgreen-Network/littlelambocoin-blockchain | 0e70a50dd11db08275d116929222dddbaeb0d6e6 | [
"Apache-2.0"
] | 5 | 2022-01-10T08:12:19.000Z | 2022-03-14T07:20:30.000Z | littlelambocoin/wallet/trade_manager.py | BTCgreen-Network/littlelambocoin-blockchain | 0e70a50dd11db08275d116929222dddbaeb0d6e6 | [
"Apache-2.0"
] | 5 | 2022-01-18T18:17:29.000Z | 2022-03-01T18:14:44.000Z | littlelambocoin/wallet/trade_manager.py | BTCgreen-Network/littlelambocoin-blockchain | 0e70a50dd11db08275d116929222dddbaeb0d6e6 | [
"Apache-2.0"
] | 5 | 2022-01-11T03:19:43.000Z | 2022-03-20T19:42:28.000Z | import dataclasses
import logging
import time
import traceback
from typing import Any, Dict, List, Optional, Tuple, Union, Set
from littlelambocoin.protocols.wallet_protocol import CoinState
from littlelambocoin.types.blockchain_format.coin import Coin
from littlelambocoin.types.blockchain_format.program import Program
from littlelambocoin.types.blockchain_format.sized_bytes import bytes32
from littlelambocoin.types.spend_bundle import SpendBundle
from littlelambocoin.util.db_wrapper import DBWrapper
from littlelambocoin.util.hash import std_hash
from littlelambocoin.util.ints import uint32, uint64
from littlelambocoin.wallet.cat_wallet.cat_wallet import CATWallet
from littlelambocoin.wallet.payment import Payment
from littlelambocoin.wallet.trade_record import TradeRecord
from littlelambocoin.wallet.trading.offer import Offer, NotarizedPayment
from littlelambocoin.wallet.trading.trade_status import TradeStatus
from littlelambocoin.wallet.trading.trade_store import TradeStore
from littlelambocoin.wallet.transaction_record import TransactionRecord
from littlelambocoin.wallet.util.transaction_type import TransactionType
from littlelambocoin.wallet.util.wallet_types import WalletType
from littlelambocoin.wallet.wallet import Wallet
from littlelambocoin.wallet.wallet_coin_record import WalletCoinRecord
class TradeManager:
wallet_state_manager: Any
log: logging.Logger
trade_store: TradeStore
@staticmethod
async def create(
wallet_state_manager: Any,
db_wrapper: DBWrapper,
name: str = None,
):
self = TradeManager()
if name:
self.log = logging.getLogger(name)
else:
self.log = logging.getLogger(__name__)
self.wallet_state_manager = wallet_state_manager
self.trade_store = await TradeStore.create(db_wrapper)
return self
async def get_offers_with_status(self, status: TradeStatus) -> List[TradeRecord]:
records = await self.trade_store.get_trade_record_with_status(status)
return records
async def get_coins_of_interest(
self,
) -> Dict[bytes32, Coin]:
"""
Returns list of coins we want to check if they are included in filter,
These will include coins that belong to us and coins that that on other side of treade
"""
all_pending = []
pending_accept = await self.get_offers_with_status(TradeStatus.PENDING_ACCEPT)
pending_confirm = await self.get_offers_with_status(TradeStatus.PENDING_CONFIRM)
pending_cancel = await self.get_offers_with_status(TradeStatus.PENDING_CANCEL)
all_pending.extend(pending_accept)
all_pending.extend(pending_confirm)
all_pending.extend(pending_cancel)
interested_dict = {}
for trade in all_pending:
for coin in trade.coins_of_interest:
interested_dict[coin.name()] = coin
return interested_dict
async def get_trade_by_coin(self, coin: Coin) -> Optional[TradeRecord]:
all_trades = await self.get_all_trades()
for trade in all_trades:
if trade.status == TradeStatus.CANCELLED.value:
continue
if coin in trade.coins_of_interest:
return trade
return None
async def coins_of_interest_farmed(self, coin_state: CoinState, fork_height: Optional[uint32]):
"""
If both our coins and other coins in trade got removed that means that trade was successfully executed
If coins from other side of trade got farmed without ours, that means that trade failed because either someone
else completed trade or other side of trade canceled the trade by doing a spend.
If our coins got farmed but coins from other side didn't, we successfully canceled trade by spending inputs.
"""
self.log.info(f"coins_of_interest_farmed: {coin_state}")
trade = await self.get_trade_by_coin(coin_state.coin)
if trade is None:
self.log.error(f"Coin: {coin_state.coin}, not in any trade")
return
if coin_state.spent_height is None:
self.log.error(f"Coin: {coin_state.coin}, has not been spent so trade can remain valid")
# Then let's filter the offer into coins that WE offered
offer = Offer.from_bytes(trade.offer)
primary_coin_ids = [c.name() for c in offer.get_primary_coins()]
our_coin_records: List[WalletCoinRecord] = await self.wallet_state_manager.coin_store.get_multiple_coin_records(
primary_coin_ids
)
our_primary_coins: List[bytes32] = [cr.coin.name() for cr in our_coin_records]
all_settlement_payments: List[Coin] = [c for coins in offer.get_offered_coins().values() for c in coins]
our_settlement_payments: List[Coin] = list(
filter(lambda c: offer.get_root_removal(c).name() in our_primary_coins, all_settlement_payments)
)
our_settlement_ids: List[bytes32] = [c.name() for c in our_settlement_payments]
# And get all relevant coin states
coin_states = await self.wallet_state_manager.wallet_node.get_coin_state(our_settlement_ids, fork_height)
assert coin_states is not None
coin_state_names: List[bytes32] = [cs.coin.name() for cs in coin_states]
# If any of our settlement_payments were spent, this offer was a success!
if set(our_settlement_ids) & set(coin_state_names):
height = coin_states[0].spent_height
await self.trade_store.set_status(trade.trade_id, TradeStatus.CONFIRMED, True, height)
tx_records: List[TransactionRecord] = await self.calculate_tx_records_for_offer(offer, False)
for tx in tx_records:
if TradeStatus(trade.status) == TradeStatus.PENDING_ACCEPT:
await self.wallet_state_manager.add_transaction(
dataclasses.replace(tx, confirmed_at_height=height, confirmed=True), in_transaction=True
)
self.log.info(f"Trade with id: {trade.trade_id} confirmed at height: {height}")
else:
# In any other scenario this trade failed
await self.wallet_state_manager.delete_trade_transactions(trade.trade_id)
if trade.status == TradeStatus.PENDING_CANCEL.value:
await self.trade_store.set_status(trade.trade_id, TradeStatus.CANCELLED, True)
self.log.info(f"Trade with id: {trade.trade_id} canceled")
elif trade.status == TradeStatus.PENDING_CONFIRM.value:
await self.trade_store.set_status(trade.trade_id, TradeStatus.FAILED, True)
self.log.warning(f"Trade with id: {trade.trade_id} failed")
async def get_locked_coins(self, wallet_id: int = None) -> Dict[bytes32, WalletCoinRecord]:
"""Returns a dictionary of confirmed coins that are locked by a trade."""
all_pending = []
pending_accept = await self.get_offers_with_status(TradeStatus.PENDING_ACCEPT)
pending_confirm = await self.get_offers_with_status(TradeStatus.PENDING_CONFIRM)
pending_cancel = await self.get_offers_with_status(TradeStatus.PENDING_CANCEL)
all_pending.extend(pending_accept)
all_pending.extend(pending_confirm)
all_pending.extend(pending_cancel)
coins_of_interest = []
for trade_offer in all_pending:
coins_of_interest.extend([c.name() for c in Offer.from_bytes(trade_offer.offer).get_involved_coins()])
result = {}
coin_records = await self.wallet_state_manager.coin_store.get_multiple_coin_records(coins_of_interest)
for record in coin_records:
if wallet_id is None or record.wallet_id == wallet_id:
result[record.name()] = record
return result
async def get_all_trades(self):
all: List[TradeRecord] = await self.trade_store.get_all_trades()
return all
async def get_trade_by_id(self, trade_id: bytes32) -> Optional[TradeRecord]:
record = await self.trade_store.get_trade_record(trade_id)
return record
async def cancel_pending_offer(self, trade_id: bytes32):
await self.trade_store.set_status(trade_id, TradeStatus.CANCELLED, False)
self.wallet_state_manager.state_changed("offer_cancelled")
async def cancel_pending_offer_safely(
self, trade_id: bytes32, fee: uint64 = uint64(0)
) -> Optional[List[TransactionRecord]]:
"""This will create a transaction that includes coins that were offered"""
self.log.info(f"Secure-Cancel pending offer with id trade_id {trade_id.hex()}")
trade = await self.trade_store.get_trade_record(trade_id)
if trade is None:
return None
all_txs: List[TransactionRecord] = []
fee_to_pay: uint64 = fee
for coin in Offer.from_bytes(trade.offer).get_primary_coins():
wallet = await self.wallet_state_manager.get_wallet_for_coin(coin.name())
if wallet is None:
continue
new_ph = await wallet.get_new_puzzlehash()
# This should probably not switch on whether or not we're spending a CAT but it has to for now
if wallet.type() == WalletType.CAT:
txs = await wallet.generate_signed_transaction(
[coin.amount], [new_ph], fee=fee_to_pay, coins={coin}, ignore_max_send_amount=True
)
all_txs.extend(txs)
else:
if fee_to_pay > coin.amount:
selected_coins: Set[Coin] = await wallet.select_coins(
uint64(fee_to_pay - coin.amount),
exclude=[coin],
)
selected_coins.add(coin)
else:
selected_coins = {coin}
tx = await wallet.generate_signed_transaction(
uint64(sum([c.amount for c in selected_coins]) - fee_to_pay),
new_ph,
fee=fee_to_pay,
coins=selected_coins,
ignore_max_send_amount=True,
)
all_txs.append(tx)
fee_to_pay = uint64(0)
cancellation_addition = Coin(coin.name(), new_ph, coin.amount)
all_txs.append(
TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=new_ph,
amount=coin.amount,
fee_amount=fee,
confirmed=False,
sent=uint32(10),
spend_bundle=None,
additions=[cancellation_addition],
removals=[coin],
wallet_id=wallet.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.INCOMING_TX.value),
name=cancellation_addition.name(),
memos=[],
)
)
for tx in all_txs:
await self.wallet_state_manager.add_pending_transaction(tx_record=dataclasses.replace(tx, fee_amount=fee))
await self.trade_store.set_status(trade_id, TradeStatus.PENDING_CANCEL, False)
return all_txs
async def save_trade(self, trade: TradeRecord):
await self.trade_store.add_trade_record(trade, False)
self.wallet_state_manager.state_changed("offer_added")
async def create_offer_for_ids(
self, offer: Dict[Union[int, bytes32], int], fee: uint64 = uint64(0), validate_only: bool = False
) -> Tuple[bool, Optional[TradeRecord], Optional[str]]:
success, created_offer, error = await self._create_offer_for_ids(offer, fee=fee)
if not success or created_offer is None:
raise Exception(f"Error creating offer: {error}")
now = uint64(int(time.time()))
trade_offer: TradeRecord = TradeRecord(
confirmed_at_index=uint32(0),
accepted_at_time=None,
created_at_time=now,
is_my_offer=True,
sent=uint32(0),
offer=bytes(created_offer),
taken_offer=None,
coins_of_interest=created_offer.get_involved_coins(),
trade_id=created_offer.name(),
status=uint32(TradeStatus.PENDING_ACCEPT.value),
sent_to=[],
)
if success is True and trade_offer is not None and not validate_only:
await self.save_trade(trade_offer)
return success, trade_offer, error
async def _create_offer_for_ids(
self, offer_dict: Dict[Union[int, bytes32], int], fee: uint64 = uint64(0)
) -> Tuple[bool, Optional[Offer], Optional[str]]:
"""
Offer is dictionary of wallet ids and amount
"""
try:
coins_to_offer: Dict[uint32, List[Coin]] = {}
requested_payments: Dict[Optional[bytes32], List[Payment]] = {}
for id, amount in offer_dict.items():
if amount > 0:
if isinstance(id, int):
wallet_id = uint32(id)
wallet = self.wallet_state_manager.wallets[wallet_id]
p2_ph: bytes32 = await wallet.get_new_puzzlehash()
if wallet.type() == WalletType.STANDARD_WALLET:
key: Optional[bytes32] = None
memos: List[bytes] = []
elif wallet.type() == WalletType.CAT:
key = bytes32(bytes.fromhex(wallet.get_asset_id()))
memos = [p2_ph]
else:
raise ValueError(f"Offers are not implemented for {wallet.type()}")
else:
p2_ph = await self.wallet_state_manager.main_wallet.get_new_puzzlehash()
key = id
memos = [p2_ph]
requested_payments[key] = [Payment(p2_ph, uint64(amount), memos)]
elif amount < 0:
assert isinstance(id, int)
wallet_id = uint32(id)
wallet = self.wallet_state_manager.wallets[wallet_id]
balance = await wallet.get_confirmed_balance()
if balance < abs(amount):
raise Exception(f"insufficient funds in wallet {wallet_id}")
coins_to_offer[wallet_id] = await wallet.select_coins(uint64(abs(amount)))
elif amount == 0:
raise ValueError("You cannot offer nor request 0 amount of something")
all_coins: List[Coin] = [c for coins in coins_to_offer.values() for c in coins]
notarized_payments: Dict[Optional[bytes32], List[NotarizedPayment]] = Offer.notarize_payments(
requested_payments, all_coins
)
announcements_to_assert = Offer.calculate_announcements(notarized_payments)
all_transactions: List[TransactionRecord] = []
fee_left_to_pay: uint64 = fee
for wallet_id, selected_coins in coins_to_offer.items():
wallet = self.wallet_state_manager.wallets[wallet_id]
# This should probably not switch on whether or not we're spending a CAT but it has to for now
if wallet.type() == WalletType.CAT:
txs = await wallet.generate_signed_transaction(
[abs(offer_dict[int(wallet_id)])],
[Offer.ph()],
fee=fee_left_to_pay,
coins=set(selected_coins),
puzzle_announcements_to_consume=announcements_to_assert,
)
all_transactions.extend(txs)
else:
tx = await wallet.generate_signed_transaction(
abs(offer_dict[int(wallet_id)]),
Offer.ph(),
fee=fee_left_to_pay,
coins=set(selected_coins),
puzzle_announcements_to_consume=announcements_to_assert,
)
all_transactions.append(tx)
fee_left_to_pay = uint64(0)
transaction_bundles: List[Optional[SpendBundle]] = [tx.spend_bundle for tx in all_transactions]
total_spend_bundle = SpendBundle.aggregate(list(filter(lambda b: b is not None, transaction_bundles)))
offer = Offer(notarized_payments, total_spend_bundle)
return True, offer, None
except Exception as e:
tb = traceback.format_exc()
self.log.error(f"Error with creating trade offer: {type(e)}{tb}")
return False, None, str(e)
async def maybe_create_wallets_for_offer(self, offer: Offer):
for key in offer.arbitrage():
wsm = self.wallet_state_manager
wallet: Wallet = wsm.main_wallet
if key is None:
continue
exists: Optional[Wallet] = await wsm.get_wallet_for_asset_id(key.hex())
if exists is None:
self.log.info(f"Creating wallet for asset ID: {key}")
await CATWallet.create_wallet_for_cat(wsm, wallet, key.hex())
async def check_offer_validity(self, offer: Offer) -> bool:
all_removals: List[Coin] = offer.bundle.removals()
all_removal_names: List[bytes32] = [c.name() for c in all_removals]
non_ephemeral_removals: List[Coin] = list(
filter(lambda c: c.parent_coin_info not in all_removal_names, all_removals)
)
coin_states = await self.wallet_state_manager.wallet_node.get_coin_state(
[c.name() for c in non_ephemeral_removals]
)
return len(coin_states) == len(non_ephemeral_removals) and all([cs.spent_height is None for cs in coin_states])
async def calculate_tx_records_for_offer(self, offer: Offer, validate: bool) -> List[TransactionRecord]:
if validate:
final_spend_bundle: SpendBundle = offer.to_valid_spend()
else:
final_spend_bundle = offer.bundle
settlement_coins: List[Coin] = [c for coins in offer.get_offered_coins().values() for c in coins]
settlement_coin_ids: List[bytes32] = [c.name() for c in settlement_coins]
additions: List[Coin] = final_spend_bundle.not_ephemeral_additions()
removals: List[Coin] = final_spend_bundle.removals()
all_fees = uint64(final_spend_bundle.fees())
txs = []
addition_dict: Dict[uint32, List[Coin]] = {}
for addition in additions:
wallet_info = await self.wallet_state_manager.get_wallet_id_for_puzzle_hash(addition.puzzle_hash)
if wallet_info is not None:
wallet_id, _ = wallet_info
if addition.parent_coin_info in settlement_coin_ids:
wallet = self.wallet_state_manager.wallets[wallet_id]
to_puzzle_hash = await wallet.convert_puzzle_hash(addition.puzzle_hash)
txs.append(
TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=to_puzzle_hash,
amount=addition.amount,
fee_amount=uint64(0),
confirmed=False,
sent=uint32(10),
spend_bundle=None,
additions=[addition],
removals=[],
wallet_id=wallet_id,
sent_to=[],
trade_id=offer.name(),
type=uint32(TransactionType.INCOMING_TRADE.value),
name=std_hash(final_spend_bundle.name() + addition.name()),
memos=[],
)
)
else: # This is change
addition_dict.setdefault(wallet_id, [])
addition_dict[wallet_id].append(addition)
# While we want additions to show up as separate records, removals of the same wallet should show as one
removal_dict: Dict[uint32, List[Coin]] = {}
for removal in removals:
wallet_info = await self.wallet_state_manager.get_wallet_id_for_puzzle_hash(removal.puzzle_hash)
if wallet_info is not None:
wallet_id, _ = wallet_info
removal_dict.setdefault(wallet_id, [])
removal_dict[wallet_id].append(removal)
for wid, grouped_removals in removal_dict.items():
wallet = self.wallet_state_manager.wallets[wid]
to_puzzle_hash = bytes32([1] * 32) # We use all zeros to be clear not to send here
removal_tree_hash = Program.to([rem.as_list() for rem in grouped_removals]).get_tree_hash()
# We also need to calculate the sent amount
removed: int = sum(c.amount for c in grouped_removals)
change_coins: List[Coin] = addition_dict[wid] if wid in addition_dict else []
change_amount: int = sum(c.amount for c in change_coins)
sent_amount: int = removed - change_amount
txs.append(
TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=to_puzzle_hash,
amount=uint64(sent_amount),
fee_amount=all_fees,
confirmed=False,
sent=uint32(10),
spend_bundle=None,
additions=change_coins,
removals=grouped_removals,
wallet_id=wallet.id(),
sent_to=[],
trade_id=offer.name(),
type=uint32(TransactionType.OUTGOING_TRADE.value),
name=std_hash(final_spend_bundle.name() + removal_tree_hash),
memos=[],
)
)
return txs
async def respond_to_offer(self, offer: Offer, fee=uint64(0)) -> Tuple[bool, Optional[TradeRecord], Optional[str]]:
take_offer_dict: Dict[Union[bytes32, int], int] = {}
arbitrage: Dict[Optional[bytes32], int] = offer.arbitrage()
for asset_id, amount in arbitrage.items():
if asset_id is None:
wallet = self.wallet_state_manager.main_wallet
key: Union[bytes32, int] = int(wallet.id())
else:
wallet = await self.wallet_state_manager.get_wallet_for_asset_id(asset_id.hex())
if wallet is None and amount < 0:
return False, None, f"Do not have a CAT of asset ID: {asset_id} to fulfill offer"
elif wallet is None:
key = asset_id
else:
key = int(wallet.id())
take_offer_dict[key] = amount
# First we validate that all of the coins in this offer exist
valid: bool = await self.check_offer_validity(offer)
if not valid:
return False, None, "This offer is no longer valid"
success, take_offer, error = await self._create_offer_for_ids(take_offer_dict, fee=fee)
if not success or take_offer is None:
return False, None, error
complete_offer = Offer.aggregate([offer, take_offer])
assert complete_offer.is_valid()
final_spend_bundle: SpendBundle = complete_offer.to_valid_spend()
await self.maybe_create_wallets_for_offer(complete_offer)
tx_records: List[TransactionRecord] = await self.calculate_tx_records_for_offer(complete_offer, True)
trade_record: TradeRecord = TradeRecord(
confirmed_at_index=uint32(0),
accepted_at_time=uint64(int(time.time())),
created_at_time=uint64(int(time.time())),
is_my_offer=False,
sent=uint32(0),
offer=bytes(complete_offer),
taken_offer=bytes(offer),
coins_of_interest=complete_offer.get_involved_coins(),
trade_id=complete_offer.name(),
status=uint32(TradeStatus.PENDING_CONFIRM.value),
sent_to=[],
)
await self.save_trade(trade_record)
# Dummy transaction for the sake of the wallet push
push_tx = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=bytes32([1] * 32),
amount=uint64(0),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(0),
spend_bundle=final_spend_bundle,
additions=[],
removals=[],
wallet_id=uint32(0),
sent_to=[],
trade_id=bytes32([1] * 32),
type=uint32(TransactionType.OUTGOING_TRADE.value),
name=final_spend_bundle.name(),
memos=[],
)
await self.wallet_state_manager.add_pending_transaction(push_tx)
for tx in tx_records:
await self.wallet_state_manager.add_transaction(tx)
return True, trade_record, None
| 47.273567 | 120 | 0.614233 |
8c6416dbd4bba5fa91f842726a68d732b3836128 | 7,737 | py | Python | tests/test_http_api.py | moeoverflow/BGmi | d3128e78976f81fd75a4d2587a8e69f5cc28dbf6 | [
"MIT"
] | 1 | 2021-05-10T17:10:36.000Z | 2021-05-10T17:10:36.000Z | tests/test_http_api.py | moeoverflow/BGmi | d3128e78976f81fd75a4d2587a8e69f5cc28dbf6 | [
"MIT"
] | null | null | null | tests/test_http_api.py | moeoverflow/BGmi | d3128e78976f81fd75a4d2587a8e69f5cc28dbf6 | [
"MIT"
] | null | null | null | import json
import logging
import os
import random
import string
from bgmi.config import ADMIN_TOKEN, SAVE_PATH
from bgmi.front.server import make_app
from tornado.testing import AsyncHTTPTestCase
logging.basicConfig(level=logging.DEBUG)
def random_word(length):
letters = string.ascii_lowercase
return "".join(random.choice(letters) for i in range(length))
logger = logging.getLogger()
logger.setLevel(logging.ERROR)
class ApiTestCase(AsyncHTTPTestCase):
headers = {"BGmi-Token": ADMIN_TOKEN, "Content-Type": "application/json"}
bangumi_1 = str(os.environ.get("BANGUMI_1"))
bangumi_2 = str(os.environ.get("BANGUMI_2"))
bangumi_3 = str(os.environ.get("BANGUMI_3"))
def get_app(self):
self.app = make_app(debug=False)
return self.app
def test_a_auth(self):
r = self.fetch(
"/api/auth", method="POST", body=json.dumps({"token": ADMIN_TOKEN})
)
self.assertEqual(r.code, 200)
res = self.parse_response(r)
self.assertEqual(res["status"], "success")
r = self.fetch("/api/auth", method="POST", body=json.dumps({"token": "3"}))
self.assertEqual(r.code, 400)
res = self.parse_response(r)
self.assertEqual(res["status"], "error")
def test_a_cal(self):
r = self.fetch("/api/cal", method="GET")
res = self.parse_response(r)
self.assertIsInstance(res["data"], dict)
def test_b_add(self):
r = self.fetch(
"/api/add",
method="POST",
headers=self.headers,
body=json.dumps({"name": self.bangumi_1,}),
)
self.assertEqual(r.code, 200)
r = self.fetch(
"/api/add",
method="POST",
headers=self.headers,
body=json.dumps({"name": self.bangumi_1,}),
)
self.assertEqual(r.code, 200)
r = self.parse_response(r)
self.assertEqual(r["status"], "warning")
r = self.fetch(
"/api/add",
method="POST",
headers=self.headers,
body=json.dumps({"name": self.bangumi_2,}),
)
self.assertEqual(r.code, 200)
def test_c_delete(self):
r = self.fetch(
"/api/add",
method="POST",
headers=self.headers,
body=json.dumps({"name": self.bangumi_2,}),
)
self.assertEqual(r.code, 200)
r = self.parse_response(r)
self.assertEqual(r["status"], "warning")
r = self.fetch(
"/api/add",
method="POST",
headers=self.headers,
body=json.dumps({"name": self.bangumi_2,}),
)
self.assertEqual(r.code, 200)
r = self.parse_response(r)
self.assertEqual(r["status"], "warning")
r = self.fetch(
"/api/add",
method="POST",
headers=self.headers,
body=json.dumps({"name": self.bangumi_2,}),
)
self.assertEqual(r.code, 200)
r = self.parse_response(r)
self.assertEqual(r["status"], "warning")
def test_e_mark(self):
episode = random.randint(0, 10)
self.fetch(
"/api/mark",
method="POST",
headers=self.headers,
body=json.dumps({"name": self.bangumi_1, "episode": episode}),
)
r = self.fetch("/api/index", method="GET")
self.assertEqual(r.code, 200)
res = self.parse_response(r)
bg_dict = {}
for item in res["data"]:
if item["bangumi_name"] == self.bangumi_1:
bg_dict = item
break
self.assertEqual(bg_dict["bangumi_name"], self.bangumi_1)
self.assertEqual(bg_dict["episode"], episode)
def test_d_filter(self):
include = random_word(5)
exclude = random_word(5)
regex = random_word(5)
r = self.fetch(
"/api/filter",
method="POST",
body=json.dumps({"name": self.bangumi_1,}),
headers=self.headers,
)
self.assertEqual(r.code, 200)
res = self.parse_response(r)
self.assertEqual(res["status"], "success")
if len(res["data"]["subtitle_group"]) >= 2:
subtitle_group = res["data"]["subtitle_group"][:1]
else:
subtitle_group = res["data"]["subtitle_group"][:0]
subtitle = ",".join(subtitle_group)
r = self.fetch(
"/api/filter",
method="POST",
body=json.dumps(
{
"name": self.bangumi_1,
"include": include,
"regex": regex,
"exclude": exclude,
"subtitle": subtitle,
}
),
headers=self.headers,
)
r = self.fetch(
"/api/filter",
method="POST",
body=json.dumps({"name": self.bangumi_1,}),
headers=self.headers,
)
res = self.parse_response(r)
self.assertEqual(r.code, 200)
self.assertEqual(res["status"], "success")
self.assertEqual(res["data"]["name"], self.bangumi_1)
self.assertEqual(res["data"]["include"], include)
self.assertEqual(res["data"]["regex"], regex)
self.assertEqual(res["data"]["exclude"], exclude)
r = self.fetch(
"/api/filter",
method="POST",
body=json.dumps({"name": self.bangumi_3, "regex": ".*", "subtitle": "",}),
headers=self.headers,
)
self.assertEqual(r.code, 400)
self.assertEqual(self.parse_response(r)["status"], "error")
print(subtitle_group)
self.assertFalse(bool(list(set(subtitle_group) - set(res["data"]["followed"]))))
# for item in subtitle_group:
# self.assertIn(item, res['data']['followed'])
# for item in res['data']['followed']:
# self.assertIn(item, subtitle_group)
def test_e_index(self):
save_dir = os.path.join(SAVE_PATH)
episode1_dir = os.path.join(save_dir, self.bangumi_1, "1", "episode1")
if not os.path.exists(episode1_dir):
os.makedirs(episode1_dir)
open(os.path.join(episode1_dir, "1.mp4"), "a").close()
episode2_dir = os.path.join(save_dir, self.bangumi_1, "2")
if not os.path.exists(episode2_dir):
os.makedirs(episode2_dir)
open(os.path.join(episode2_dir, "2.mkv"), "a").close()
response = self.fetch("/api/index", method="GET")
self.assertEqual(response.code, 200)
r = self.parse_response(response)
episode_list = [x for x in r["data"] if x["bangumi_name"] == self.bangumi_1]
bangumi_dict = next(iter(episode_list or []), {})
self.assertIn("1", bangumi_dict["player"].keys())
self.assertEqual(
bangumi_dict["player"]["1"]["path"],
"/{}/1/episode1/1.mp4".format(self.bangumi_1),
)
self.assertIn("2", bangumi_dict["player"].keys())
self.assertEqual(
bangumi_dict["player"]["2"]["path"], "/{}/2/2.mkv".format(self.bangumi_1)
)
def test_resource_ics(self):
r = self.fetch("/resource/feed.xml")
self.assertEqual(r.code, 200)
def test_resource_feed(self):
r = self.fetch("/resource/calendar.ics")
self.assertEqual(r.code, 200)
def test_no_auth(self):
r = self.fetch(
"/api/add", method="POST", body=json.dumps({"name": self.bangumi_1})
)
self.assertEqual(r.code, 401)
@staticmethod
def parse_response(response):
r = json.loads(response.body.decode("utf-8"))
return r
| 32.103734 | 88 | 0.55086 |
9b1acad16c4e8fb1b24bf39885937dfbd9d7c24e | 22,976 | py | Python | src/backend/kandbox_planner/fsm_adapter/toy_generator/london_service_generator.py | qiyangduan/kandbox_planner | 4785a76443bd4b8f25aeb36e03945daedb165c43 | [
"Apache-2.0"
] | 1 | 2020-05-03T21:26:43.000Z | 2020-05-03T21:26:43.000Z | src/backend/kandbox_planner/fsm_adapter/toy_generator/london_service_generator.py | qiyangduan/kandbox_planner | 4785a76443bd4b8f25aeb36e03945daedb165c43 | [
"Apache-2.0"
] | 11 | 2020-11-13T18:48:37.000Z | 2022-03-12T00:26:37.000Z | src/backend/kandbox_planner/fsm_adapter/toy_generator/london_service_generator.py | qiyangduan/kandbox_planner | 4785a76443bd4b8f25aeb36e03945daedb165c43 | [
"Apache-2.0"
] | null | null | null | # https://stackoverflow.com/questions/19069701/python-requests-library-how-to-pass-authorization-header-with-single-token
# https://www.quora.com/How-can-I-make-an-API-call-with-basic-HTTP-authentication-using-Python
import requests
import pandas as pd
from kandbox_planner.fsm_adapter.kplanner_db_adapter import KPlannerDBAdapter
from kandbox_planner.fsm_adapter.kplanner_api_adapter import KPlannerAPIAdapter
from kandbox_planner.planner_engine.feature_calc import KPlannerFeatureCalculator
from kandbox_planner.planner_engine.opti1day.opti1day_planner import Opti1DayPlanner
from kandbox_planner.planner_engine.rl.all_rl_planners import rl_run_all
kplanner_db = KPlannerDBAdapter()
import os
kplanner_service_url = os.getenv ('DOMAIN','http://127.0.0.1:8000')
kplanner_api = KPlannerAPIAdapter(service_url = kplanner_service_url)
import random
# workers, workers_id_dict = kplanner_db.load_workers(start_day = start_day, nbr_days = nbr_days_in_batch)
import kandbox_planner.util.planner_date_util as date_util
# from kandbox_planner.planner_engine.toy_planner_optimizer.worker_order_gps_planner import dispatch_jobs
# Sample Basic Auth Url with login values as username and password
from datetime import datetime
from datetime import timedelta
from random import seed
from random import randint
# seed random number generator
seed(1978)
from pprint import pprint
# import config.settings.local as config
import kandbox_planner.config as config
KANDBOX_DATE_FORMAT = config.KANDBOX_DATE_FORMAT # '%Y%m%d'
JOB_GPS_LIST = [
[ '51.447250,-0.189370', '11 Garratt Ln, London SW18 4AQ'],
[ '51.456250,-0.201050', 'The Pumphouse, Lebanon Rd, London SW18 1RE'],
[ '51.460870,-0.177520', 'St Johns Hill, Battersea, London SW11 2QP'],
[ '51.511130,-0.122040', 'Davidson Building, London WC2E 7HA'],
[ '51.422690,-0.202560', 'Unit 115 Centre Court, 4 Queens Road, Wimbledon, SW19 8YA'],
[ '51.487630,-0.178230', 'Waterfront Dr, Fulham, London SW10 0QD'],
[ '51.493350,-0.168370', '89 Sloane Ave, Chelsea, London SW3 3DX'],
[ '51.5115480,0.001055268', 'Leamouth Road, Poplar, London E14 0JG'],
[ '51.4059354,-0.140786051', 'Rown Road, London SW16 5JF'],
[ '51.5081003,0.079313135', 'Atlantis Avenue, London E16 2BF'],
[ '51.433456,-0.167078571', '-0.16707857157978992:51.433456197551315'],
[ '51.5079220,0.054734676', 'Royal Albert Way, London E16 2QU'],
[ '51.4853844,0.009171047', 'Vanbrugh Hill, Greenwich SE10 0DGs'],
[ '51.50744689,0.065910331', 'Docklands Campus, University Way, London E16 2RD'],
[ '51.31248841,-0.147364031', 'Cane Hill, Coulsdon, South Croydon CR5 3YL'],
]
WORKER_GPS_LIST = [
['51.44627780733451,-0.1960066034365281' ,'-0.1960066034365281:51.44627780733451']
,['51.50874451583848,-0.1232015238018501' ,'-0.12320152380185012:51.50874451583848']
,['51.43884800126409,-0.1727406536075019' ,'-0.1727406536075019:51.43884800126409']
,['51.45238536979151,-0.1455964578957485' ,'-0.14559645789574854:51.45238536979151']
,['51.53156334,0.076215309', 'Barking Creative Industries Quarter, Abbey Road, Barking IG11 7BT']
,['51.50986116,0.071279704', 'Gallions Roundabout, Royal Docks Road, London E16 7AB']
]
def get_normalized_location(loc_i, JOB_GPS_LIST):
# distance = haversine(loc_1[0] , loc_1[1] , loc_2[0], loc_2[1])
ll = loc_i % 15
x = JOB_GPS_LIST[ll][0].split(',')[1]
y = JOB_GPS_LIST[ll][0].split(',')[0]
return '{}:{}'.format(x,y)
def generate_and_save_one_day_orders(current_day, current_shifts, worker_list ):
current_shifts = _SHIFTS
for index, shift in enumerate(current_shifts):
# for shift in current_shifts:
# [0, 'FS', '7:12', 108, 60, 32]
shift[2] = get_normalized_location(index, JOB_GPS_LIST) # '{}:{}'.format(x,y) x, y = randint(1, 99), randint(1, 99)
shift[3] = randint(560, 1040)
shift[5] = randint(4*5, 30*4)
insert_all_orders(current_day = current_day, current_shifts = current_shifts, worker_list = worker_list)
def generate_and_save_orders(GENERATOR_START_DATE , GENERATOR_END_DATE, current_shifts , worker_list ):
for day_i in range(9999):
current_day = GENERATOR_START_DATE + timedelta(days=day_i)
if current_day >= GENERATOR_END_DATE:
break
generate_and_save_one_day_orders(current_day, current_shifts = current_shifts, worker_list = worker_list)
def select_all_workers():
url = 'http://localhost:5000/api/v1/worker?q=(columns:!(id,worker_code,name,active,service_area_code,geo_longitude,geo_latitude,working_time,level,birthday))'
response = requests.get(url, headers={'Content-Type':'application/json',
'Authorization': 'Token {}'.format(access_token)}
)
resp_json = response.json()
return(resp_json)
if resp_json['count'] < 1:
print('it is already empty!')
return []
def insert_all_workers(worker_list):
# url = '{}/kpdata/workers/'.format(kplanner_service_url)
index = 0
list_to_insert = []
for worker in worker_list:
gps = get_normalized_location(index, WORKER_GPS_LIST)
print('adding worker: ',worker, gps)
index += 1
myobj = {
'worker_code': worker[1],
'name': '{}-{}'.format(worker[1],worker[0]),
# 'birthday': '2000-10-25',
'skills': '[1]',
'geo_latitude': gps.split(':')[1],
'geo_longitude': gps.split(':')[0],
'weekly_working_minutes': '[ [0, 0], [480, 1140],[480, 1140],[480, 1140],[480, 1140],[480, 1140], [0, 0]]'
# 'level': 0,
}
list_to_insert.append(myobj)
kplanner_api.insert_all_workers(list_to_insert)
def delete_all_workers():
kplanner_api.delete_all_workers()
return
url = '{}/kpdata/workers/'.format(kplanner_service_url)
response = requests.get(url, headers={'Content-Type':'application/json',
'Authorization': 'Token {}'.format(access_token)}
)
resp_json = response.json()
# Convert JSON to dict and print
# print(resp_json)
if len(resp_json ) < 1:
print('it is already empty!')
return
for worker in resp_json :
print('deleting worker: ',worker)
url = '{}/kpdata/workers/'.format(kplanner_service_url) + str(worker['worker_code']) + ''
#print(url)
response = requests.delete(url, headers={
'Authorization': 'Token {}'.format(access_token)}
)
print(response.text )
def select_all_orders(current_day=None):
return
url = 'http://localhost:5000/api/v1/workorder'
if current_day is not None:
url = 'http://localhost:5000/api/v1/workorder/?q=(filters:!((col:requested_start_date,opr:eq,value:{})),order_columns:order_code,order_direction:desc)' \
.format(datetime.strftime(current_day,KANDBOX_DATE_FORMAT))
# ,columns:!(order_code,name,planning_status,requested_start_date,scheduled_start_time,geo_latitude,geo_longitude,fixed_date_time_flag,requested_start_time,requested_duration_minutes)
print(url)
response = requests.get(url, headers={'Content-Type':'application/json',
'Authorization': 'Token {}'.format(access_token)}
)
resp_json = response.json()
return(resp_json)
def select_orders_by_workers_TODO(workers=[]):
url = 'http://localhost:5000/api/v1/workorder'
response = requests.get(url, headers={'Content-Type':'application/json',
'Authorization': 'Token {}'.format(access_token)}
)
resp_json = response.json()
return(resp_json)
def insert_all_orders(current_day, current_shifts, worker_list):
list_to_insert = []
for order in current_shifts:
# print('adding order: ',order)
# [100, 'FS', '06:00', 18, 60, 32]
myobj = {
'job_code': '{}-{}-{}'.format(datetime.strftime(current_day, '%m%d'), order[0],order[1]),
'job_type':order[1],
'mandatory_minutes_minmax_flag': 1 if order[1] == 'FS' else 0,
'requested_start_min_minutes' : order[3],
'requested_start_max_minutes' : order[3],
'location_code': '{}-{}'.format( order[0],order[1]),
'geo_latitude': order[2].split(':')[1],
'geo_longitude': order[2].split(':')[0],
'requested_min_level': 0,
'planning_status': 'U',
'scheduled_duration_minutes': order[5] ,
'requested_duration_minutes': order[5] ,
#'requested_start_day': datetime.strftime(current_day, KANDBOX_DATE_FORMAT),
#'scheduled_start_day': datetime.strftime(current_day, KANDBOX_DATE_FORMAT), # '2019-10-29 ' ,
#'actual_start_date': datetime.strftime(current_day, KANDBOX_DATE_FORMAT), # '2019-10-29 ' ,
#'requested_start_minutes': order[3] , # + ':00'
#'scheduled_start_minutes': order[3] , # + ':00'
'requested_start_datetime': datetime.strftime(current_day + timedelta(minutes=order[3]), "%Y-%m-%dT%H:%M:%S"),
'scheduled_start_datetime': datetime.strftime(current_day + timedelta(minutes=order[3]), "%Y-%m-%dT%H:%M:%S"),
'scheduled_worker_code': '{}/kpdata/workers/{}/'.format(kplanner_service_url, worker_list [ random.randint(0, 5) ][1]),
'requested_worker_code': '{}/kpdata/workers/{}/'.format(kplanner_service_url, worker_list [ random.randint(0, 5) ][1]),
}
list_to_insert.append(myobj)
kplanner_api.insert_all_orders(list_to_insert)
def save_RL_dispatched_orders_one_day(solution_json):
# print(worker_day)
# {'duration': 100, 'job_id': '0-FS', 'fixed_schudule': {'fs_indicator': 'FT', 'fixed_minute_time_slot': [148.0, 148.0]}, 'job_gps': [57.0, 98.0], 'history_job_worker_count': [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'history_minute_start_time': 0, 'tolerated_day_min': 0, 'tolerated_day_max': 0, 'expected_job_day': 0, 'expected_job_day_orig': 0, 'actual_job_worker': None, 'actual_job_day': 1, 'actual_job_start_minute': 148, 'actual_job_duration': 20},
for task in solution_json :
task_id = task['job_id'] # + 1
worker_id = task['VisitOwner_worker_id']
url = 'http://localhost:5000/api/v1/workorder/' + str(task_id)
start_time_minute = task['start_time_minute']
updated_order = {} # latest_order_dict[id]
updated_order.update ({
'planning_status': 'I',
'scheduled_worker_rel': worker_id,
'actual_worker_rel': worker_id,
'scheduled_start_time': date_util.minutes_to_time_string( start_time_minute ),
'actual_start_time': date_util.minutes_to_time_string( start_time_minute ),
'scheduled_duration_minutes': task['duration'] ,
'actual_duration_minutes': task['duration'] ,
} )
print(updated_order)
response = requests.put(url, json=updated_order, headers={'Content-Type':'application/json',
'Authorization': 'Token {}'.format(access_token)} )
# Convert JSON to dict and print
print(response.text)
def delete_all_orders():
kplanner_api.delete_all_orders()
return
url = '{}/kpdata/jobs/'.format(kplanner_service_url) # http://localhost:5000/api/v1/workorder/1'
response = requests.get(url, headers={'Content-Type':'application/json',
'Authorization': 'Token {}'.format(access_token)}
)
resp_json = response.json()
# Convert JSON to dict and print
# print(resp_json)
if len(resp_json) < 1:
print('it is already empty!')
return
for worker in resp_json :
print('deleting order: ',worker)
url = '{}/kpdata/jobs/'.format(kplanner_service_url) + str(worker['job_code']) + ''
print(url)
response = requests.delete(url, headers={
'Authorization': 'Token {}'.format(access_token)}
)
print(response.text )
def dispatch_all_generated_orders(GENERATOR_START_DATE, TRAINING_DAYS, max_exe_time):
kplanner_db = KPlannerDBAdapter()
nbr_days_in_batch = 1
for day_i in range(TRAINING_DAYS) : #GENERATOR_RANGE):
current_day = GENERATOR_START_DATE + timedelta(days=day_i)
workers, workers_id_dict = kplanner_db.load_transformed_workers(start_day = current_day, nbr_days = nbr_days_in_batch)
jobs_orig = kplanner_db.load_jobs_original( workers_id_dict , start_day = current_day, nbr_days = 1)
if len(jobs_orig) < 1:
print('it is empty, nothing to dispatch!')
return
# _EMP = ['3:4', '12:5', '21:4','4:55', '12:50', '21:50' ]
_EMP = []
for ii, e in enumerate(workers):
_EMP.append('{}:{}'.format(e['geo_latitude'], e['geo_longitude']))
current_shifts=[]
# [0, 'FS', '7:12', 108, 60, 32]
latest_order_dict = {}
for ii, order in jobs_orig.iterrows():
current_shifts.append([ii, \
order['job_type'] , \
'{}:{}'.format(order['geo_latitude'], order['geo_longitude']),\
int( (order['requested_start_minutes']) ) , # minutes_to_time_string\
0, \
int(order['requested_duration_minutes'] ) ,
order['job_code']
])
print({'loaded day':current_day, 'job count': len(current_shifts)}) # , 'shift':current_shifts, 'emp': _EMP})
worker_day = dispatch_jobs(shifts= current_shifts, emps = _EMP, max_exe_time=max_exe_time)
if len(worker_day) < 1:
print('no data returned!')
return
#save_one_day_dispatched_orders(worker_day, workers, kplanner_db)
job_list = []
for w_i in range(len(worker_day)):
for task in worker_day[w_i][:-1]:
task_id = task[0] # + 1
worker_code = workers[w_i] ['worker_code']
# updated_order = {} # latest_order_dict[id]
job_list.append ({
'job_code': current_shifts[task_id][6],
'planning_status': 'V',
'scheduled_worker_code': worker_code,
'scheduled_start_day': datetime.strftime(current_day,KANDBOX_DATE_FORMAT),
'scheduled_start_minutes': task[1] ,
'scheduled_duration_minutes': task[2] ,
'scheduled_travel_minutes': task[3] ,
} )
'''
'actual_worker_code': worker_code,
'actual_start_day': datetime.strptime(current_day,KANDBOX_DATE_FORMAT),
'actual_start_minutes': task[1] ,
'actual_duration_minutes': task[2] , '''
job_df = pd.DataFrame(job_list)
job_df['planner_code'] = 'opti'
job_df['effective_from_date'] = datetime.now()
game_info = {
'planner_code': 'opti',
'game_code': 'curr_game_code',
}
kplanner_db.save_schedued_jobs(job_df)
#def save_one_day_dispatched_orders(worker_day, workers, kplanner_db ):
import xmlrpc.client
url = 'http://localhost:8069'
# db = 'local_demo_1'
db = 'odoodb1'
username = 'admin'
password = 'admin'
common = xmlrpc.client.ServerProxy('{}/xmlrpc/2/common'.format(url))
# print(common.version())
def move_workers_to_odoo():
uid = common.authenticate(db, username, password, {})
models = xmlrpc.client.ServerProxy('{}/xmlrpc/2/object'.format(url))
worker_result = select_all_workers()
odoo_worker_dict = {}
for ii, worker in enumerate(worker_result['result']):
worker_id = worker_result['ids'][ii]
id = models.execute_kw(db, uid, password, 'fsm.person', 'create', [{
'name': 'K-Planner-{}'.format(worker_id),
'fsm_person': True,
'ref':worker_id,
'mobile':'GPS:{}-{},worker_code:{} '.format(worker['geo_latitude'],worker['geo_longitude'],worker['worker_code'])
}])
odoo_worker_dict[worker_id] = id
print(odoo_worker_dict)
# print(p)
def move_orders_to_odoo():
uid = common.authenticate(db, username, password, {})
models = xmlrpc.client.ServerProxy('{}/xmlrpc/2/object'.format(url))
# GENERATOR_START_DATE = GENERATOR_START_DATE
for day_i in range(61,62) : #GENERATOR_RANGE):
current_day = GENERATOR_START_DATE + timedelta(days=day_i)
a_result = select_all_orders(current_day) # =datetime.strptime('20190103',KANDBOX_DATE_FORMAT))
odoo_worker_dict = {1: 6, 2: 7, 3: 8, 4: 9, 5: 10, 6: 11}
for ii, o in enumerate(a_result['result']):
order_id = a_result['ids'][ii]
# (order_code,name,planning_status,requested_start_date,scheduled_start_time,geo_latitude,geo_longitude,fixed_date_time_flag,requested_start_time,requested_duration_minutes
id = models.execute_kw(db, uid, password, 'fsm.order', 'create', [{
'name': '{}-({})-({}:{})-K'.format(o['name'], order_id, o['geo_latitude'], o['geo_longitude']),
'team_id': 1,
'location_id':1,
'company_id':1,
'description': 'requested_start_time:{},'.format(o['requested_start_time']),
'person_id':odoo_worker_dict[ o['scheduled_worker_rel']['id'] ],
'scheduled_date_start': datetime.strftime( datetime.strptime(o['scheduled_start_date'],KANDBOX_DATE_FORMAT) + \
timedelta(minutes=date_util.time_string_hhmm_to_minutes(o['scheduled_start_time'])) \
, '%Y-%m-%d %H:%M:%S'),
'scheduled_duration':o['requested_duration_minutes']/60, # o['scheduled_duration_minutes']/60,
}])
# print(id)
# print(p)
if __name__ == '__main__':
import sys
if len(sys.argv) < 2:
print('I need 1 parameters: python _this_.py token')
token = sys.argv[1]
print(token)
# token = '6161871e78b90219ade283fd3971219f66e6ed01'
kplanner_api.access_token = token
_SHIFTS = [
[0, 'FS', '7:12', 108, 60, 32], [1, 'N', '8:3', 26, 68, 22], [2, 'N', '06:4', 66, 121, 25],
[3, 'N', '15:5', 20, 72, 12], [4, 'N', '11:4', 133, 189, 16], [5, 'N', '13:2', 2, 19, 17],
[6, 'N', '20:5', 91, 131, 34], [7, 'N', '21:7', 8, 30, 52], [8, 'FS', '3:45', 180, 190, 60],
[9, 'N', '5:49', 38, 80, 22], [10, 'FS', '14:54', 43, 90, 37], [11, 'N', '13:60', 169, 169, 25],
[12, 'FS', '19:55', 218, 215, 37], [13, 'N', '20:59', 196, 234, 38],[14, 'N', '20:48', 235, 248, 13]
]
_EMP = ['-0.1960066034365281:51.44627780733451'
,'-0.12320152380185012:51.50874451583848'
,'-0.1727406536075019:51.43884800126409'
,'-0.14559645789574854:51.45238536979151'
,'-0.1944764936594672:51.479491652786834'
,'-0.16707857157978992:51.433456197551315'
]
# worker_day =[[[1, 55, 22], [7, 82, 52], ['sink']], [[0, 108, 32], [2, 143, 25], [9, 174, 22], ['sink']], [[3, 20, 12], [5, 35, 17], [4, 55, 16], [6, 91, 34], ['sink']], [[8, 180, 60], ['sink']], [[11, 146, 25], [12, 218, 37], [14, 258, 13], ['sink']], [[10, 43, 37], [13, 84, 38], ['sink']]]
people = ('Tom', 'Mike', 'Harry', 'Slim', 'Jim','Duan')
worker_list = [[wi, people[wi], _EMP[wi]] for wi in range(len(people))]
ss = config.KANDBOX_TEST_START_DAY
ee = config.KANDBOX_TEST_END_DAY
GENERATOR_START_DATE = datetime.strptime(ss, KANDBOX_DATE_FORMAT )
GENERATOR_END_DATE = datetime.strptime(ee, KANDBOX_DATE_FORMAT )
# exit(0)
'''
'''
kplanner_db.purge_all_workers_jobs()
insert_all_workers(worker_list)
generate_and_save_orders(GENERATOR_START_DATE = GENERATOR_START_DATE, GENERATOR_END_DATE=GENERATOR_END_DATE, current_shifts = _SHIFTS, worker_list = worker_list) # This genearte 450 orders
opti = Opti1DayPlanner( max_exec_time = config.KANDBOX_OPTI1DAY_EXEC_SECONDS) # 0*60*24
# opti.kplanner_db.purge_planner_job_status(planner_code=opti.planner_code,start_date = ss, end_date = ee )
res = opti.dispatch_jobs( start_date = config.KANDBOX_TEST_OPTI1DAY_START_DAY, end_date = config.KANDBOX_TEST_OPTI1DAY_END_DAY )
# pprint(res)
kfc = KPlannerFeatureCalculator()
# Copy orig to status table and add travel.
# kfc.add_travel_time_to_orig_job_status(planner_code = 'orig', start_day = ss, end_day = ee)
kfc.calc_planner_travel_time_statistics(planner_code = 'orig', start_day = ss, end_day = ee)
kfc.calc_planner_travel_time_statistics(planner_code = 'opti1day', start_day = ss, end_day = ee)
print("Started location feature calc ...")
kfc.calc_job_location_history_features(planner_code = 'opti1day', start_day = ss, end_day = ee)
rl_run_all(batch_name = 'dispatch_v2_1', train_again = True, TRAINING_DAYS=config.RL_TRAINING_DAYS, PREDICTION_DAYS = 0, intial_games = 120, MAX_EPOCHS = config.RL_MAX_EPOCHS)
rl_run_all(batch_name = 'dispatch_v2_1', train_again = False, TRAINING_DAYS=config.RL_TRAINING_DAYS, PREDICTION_DAYS = 3, intial_games = 120, MAX_EPOCHS = config.RL_MAX_EPOCHS)
rl_run_all(batch_name = '5minutes_dense_1', train_again = True, TRAINING_DAYS=config.RL_TRAINING_DAYS, PREDICTION_DAYS = 0, intial_games = 120, MAX_EPOCHS = config.RL_MAX_EPOCHS)
rl_run_all(batch_name = '5minutes_dense_1', train_again = False, TRAINING_DAYS=config.RL_TRAINING_DAYS, PREDICTION_DAYS = 3, intial_games = 120, MAX_EPOCHS = config.RL_MAX_EPOCHS)
rl_run_all(batch_name = '5minutescnn_dense_1', train_again = True, TRAINING_DAYS=config.RL_TRAINING_DAYS, PREDICTION_DAYS = 0, intial_games = 120, MAX_EPOCHS = config.RL_MAX_EPOCHS)
rl_run_all(batch_name = '5minutescnn_dense_1', train_again = False, TRAINING_DAYS=config.RL_TRAINING_DAYS, PREDICTION_DAYS = 3, intial_games = 120, MAX_EPOCHS = config.RL_MAX_EPOCHS)
rl_run_all(batch_name = 'dispatch_v2_1', train_again = True, TRAINING_DAYS=config.RL_TRAINING_DAYS, PREDICTION_DAYS = 0, intial_games = 120, MAX_EPOCHS = config.RL_MAX_EPOCHS)
rl_run_all(batch_name = 'dispatch_v2_1', train_again = False, TRAINING_DAYS=config.RL_TRAINING_DAYS, PREDICTION_DAYS = 3, intial_games = 120, MAX_EPOCHS = config.RL_MAX_EPOCHS)
rl_run_all(batch_name = 'rl_heur', train_again = False, TRAINING_DAYS=config.RL_TRAINING_DAYS, PREDICTION_DAYS = 2, intial_games = 120, MAX_EPOCHS = config.RL_MAX_EPOCHS)
exit(0)
| 45.587302 | 450 | 0.63575 |
f25a5f8401b0d7aecf3bfffa77dc6b0b4981fd65 | 424 | py | Python | manage.py | nanguoyu/FileCollectionWeb | 118f8efda5cf567787095808220f6ffe510b0644 | [
"MIT"
] | 1 | 2019-10-29T00:35:37.000Z | 2019-10-29T00:35:37.000Z | manage.py | nanguoyu/FileCollectionWeb | 118f8efda5cf567787095808220f6ffe510b0644 | [
"MIT"
] | null | null | null | manage.py | nanguoyu/FileCollectionWeb | 118f8efda5cf567787095808220f6ffe510b0644 | [
"MIT"
] | null | null | null | import os
from flask_script import Shell
from app import create_app
from flask_script import Manager
from config import ProductionConfig
# app = create_app(os.getenv('FLASK_CONFIG') or 'default')
app = create_app(ProductionConfig)
manager = Manager(app)
def make_shell_context():
return dict(app=app)
manager.add_command("shell", Shell(make_context=make_shell_context))
if __name__ == '__main__':
manager.run()
| 22.315789 | 68 | 0.778302 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.