hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bb979d457152ae5439a9b0fb0f3ee26373bc50e6
| 1,270
|
py
|
Python
|
video_message_parser.py
|
stonexu1986/carlife-protocol-analysis-tool
|
10c8ea30e27e3db81382cea3b8ef7663479a535f
|
[
"Apache-2.0"
] | null | null | null |
video_message_parser.py
|
stonexu1986/carlife-protocol-analysis-tool
|
10c8ea30e27e3db81382cea3b8ef7663479a535f
|
[
"Apache-2.0"
] | null | null | null |
video_message_parser.py
|
stonexu1986/carlife-protocol-analysis-tool
|
10c8ea30e27e3db81382cea3b8ef7663479a535f
|
[
"Apache-2.0"
] | null | null | null |
import struct
import binascii
from common_constant import const
def parse_header(header, message):
print("parsing video message header:")
print(header)
msg_data_len, timestamp, service_type = struct.unpack('>III', header)
service_type_hex = hex(service_type)[2:].rjust(8, '0')
service_type_hex = '0x' + service_type_hex.upper()
message['msg_data_len'] = msg_data_len
message['timestamp'] = timestamp
message['service_type'] = service_type_hex
print("video message header parsed: data len:" + str(msg_data_len) +
", timestamp:" + str(timestamp) + ", service type:" + service_type_hex)
direction = (service_type >> 15) & 0x00000001
if direction == 1:
message['sender'] = const.ACCESSORY_HU
message['receiver'] = const.ACCESSORY_MD
else:
message['sender'] = const.ACCESSORY_MD
message['receiver'] = const.ACCESSORY_HU
# '0x00020001' : 'MSG_VIDEO_DATA'
def parse_msg_video_data(data, message):
print("parse_msg_video_data:")
#print(data)
message['name'] = const.MSG_VIDEO_DATA_SERVICE_NAME
# '0x00020002' : 'MSG_VIDEO_HEARTBEAT'
def parse_msg_video_heartbeat(data, message):
print("parse_msg_video_heartbeat:")
#print(data)
message['name'] = const.MSG_VIDEO_HEARTBEAT_SERVICE_NAME
| 27.608696
| 74
| 0.716535
|
dc3e9c3730eec2fa33bf9ba97e2754c5ca45947d
| 29,543
|
py
|
Python
|
cogs/stats.py
|
CuteFwan/Koishi
|
34c1681a1308cd4e9ea02c027a2800638ad4baf9
|
[
"MIT"
] | 22
|
2019-03-23T13:05:03.000Z
|
2021-09-23T07:17:46.000Z
|
cogs/stats.py
|
CuteFwan/Koishi-db-beta
|
34c1681a1308cd4e9ea02c027a2800638ad4baf9
|
[
"MIT"
] | 4
|
2019-04-01T23:22:45.000Z
|
2020-06-22T15:09:39.000Z
|
cogs/stats.py
|
CuteFwan/Koishi-db-beta
|
34c1681a1308cd4e9ea02c027a2800638ad4baf9
|
[
"MIT"
] | 5
|
2019-04-25T19:14:51.000Z
|
2021-07-04T13:07:16.000Z
|
import discord
from discord.ext import commands
import time
import datetime
from io import BytesIO
from .utils import pretty
import typing
from math import cos, sin, radians, ceil
from PIL import Image, ImageOps, ImageDraw, ImageFilter, ImageEnhance, ImageFont
import logging
logger = logging.getLogger(__name__)
status = {'online':(67, 181, 129),
'idle':(250, 166, 26),
'dnd':(240, 71, 71),
'offline':(116, 127, 141)}
discord_neutral = (188,188,188)
query_base = '''
with status_data as(
select
status,
first_seen_chopped as first_seen,
case when
lag(first_seen_chopped) over (order by first_seen desc) is null then
now() at time zone 'utc'
else
lag(first_seen_chopped) over (order by first_seen desc)
end as last_seen
from (
select
distinct on (first_seen_chopped)
first_seen,
case when first_seen < (now() at time zone 'utc' - interval '30 days') then
(now() at time zone 'utc' - interval '30 days')
else first_seen end as first_seen_chopped,
status,
lag(status) over (order by first_seen desc) as status_last
from (
(select status, first_seen
from statuses
where uid=0
order by first_seen desc)
union
(select event as status, time as first_seen
from cog_log
order by first_seen desc)
union
(select 'left_guild' as status, time as first_seen
from member_removes
where uid=$1
order by first_seen desc)
union
(select status, first_seen
from statuses
where uid=$1
order by first_seen desc
limit 3000)
) first3000
order by first_seen_chopped desc, first_seen desc
) subtable
where
status is distinct from status_last
order by first_seen desc
)
'''
class Stats(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command()
async def toggle_purge(self, ctx):
'''Toggles whether or not to purge or save presence updates beyond 30 days.'''
result = await ctx.bot.pool.fetchval('''
SELECT keep
FROM presence_whitelist
WHERE uid=$1''', ctx.author.id)
await ctx.send(f"Currently I am {'not ' if not result else ''}storing your presence updates beyond 30 days. Would you like to change that? (y/n)")
def check(m):
return m.channel.id == ctx.channel.id and m.author.id == ctx.author.id and m.content.lower() in ["y", "yes", "n", "no"]
answer = await ctx.bot.wait_for("message", check=check)
if answer.content.lower() in ["y", "yes"]:
await ctx.bot.pool.execute('''
INSERT INTO presence_whitelist
(uid, keep)
VALUES
($1, $2)
ON CONFLICT (uid)
DO UPDATE
SET keep = $2''', ctx.author.id, not result)
await ctx.send(f"Changed. Now {'not ' if result else ''}storing your presence updates beyond 30 days.")
else:
await ctx.send("Ok.")
@commands.command()
async def useruptime(self, ctx, *, target : discord.Member = None):
target = target or ctx.author
if target.id == self.bot.user.id:
return await ctx.send("I cannot see myself...")
msg = f'`{target.display_name} `has been **{target.status.name}** for as long as I can tell...'
msg2 = ''
status_info = offline_info = None
status_info = await self.bot.pool.fetchval('''
with lagged as(
select
status,
lag(status) over (order by first_seen asc) as status_lag,
first_seen
from statuses
where (uid=$1 or uid=0) and
first_seen > now() at time zone 'utc' - interval '30 days'
)
select distinct on (status)
first_seen
from lagged
where
status != status_lag and
status = $2
order by status, first_seen desc
''', target.id, target.status.name)
if target.status.name != 'offline':
offline_info = await self.bot.pool.fetchval('''
with lagged as(
select
status,
lag(status) over (order by first_seen asc) as status_lag,
first_seen
from statuses
where (uid=$1 or uid=0) and
first_seen > now() at time zone 'utc' - interval '30 days'
)
select
first_seen
from lagged
where
status != 'offline' and status_lag = 'offline'
order by first_seen desc
limit 1
''', target.id)
if status_info:
utcnow = datetime.datetime.utcnow()
time = pretty.delta_to_str(status_info, utcnow)
msg = f'`{target.display_name} `has been **{target.status.name}** for {time}.'
if target.status.name != 'offline':
if offline_info:
time = pretty.delta_to_str(offline_info, utcnow)
msg2 = f'Last **offline** {time} ago.'
else:
msg2 = 'Has not been seen offline in the last 30 days as far as I can tell...'
await ctx.send(f'{msg}\n{msg2}')
@commands.command()
async def piestatus(self, ctx, *, target : discord.Member = None):
'''Generates a pie chart displaying the ratios between the statuses the bot has seen the user use.'''
target = target or ctx.author
async with ctx.channel.typing():
rows = await self.bot.pool.fetch(query_base + '''
select
status,
sum(
extract(
epoch from(
last_seen - first_seen
))) as sum
from status_data
group by status
order by sum desc
''', target.id)
async with self.bot.session.get(str(target.avatar_url_as(format='png'))) as r:
avydata = BytesIO(await r.read())
statuses = {row['status'] : row['sum'] for row in rows if row['status'] in status.keys()}
data = await self.bot.loop.run_in_executor(None, self._piestatus, avydata, statuses)
await ctx.send(file=discord.File(data, filename=f'{target.display_name}_pie_status.png'))
def _piestatus(self, avydata, statuses):
total = sum(statuses.values())
stat_deg = {k:(v/total)*360 for k, v in statuses.items()}
angles = dict()
starting = -90
for k,v in stat_deg.items():
angles[k] = starting + v
starting += v
base = Image.new(mode='RGBA', size=(400, 300), color=(0, 0, 0, 0))
piebase = Image.new(mode='RGBA', size=(400, 300), color=(0, 0, 0, 0))
with Image.open(avydata).resize((200,200), resample=Image.BICUBIC).convert('RGBA') as avy:
with Image.open('piestatustest2.png').convert('L') as mask:
base.paste(avy, (50,50), avy)
draw = ImageDraw.Draw(piebase)
maskdraw = ImageDraw.Draw(mask)
starting = -90
for k, v in angles.items():
if starting == v:
continue
else:
draw.pieslice(((-5,-5),(305,305)),starting, v, fill=status[k])
starting = v
if not 360 in stat_deg:
mult = 1000
offset = 150
for k, v in angles.items():
x = offset + ceil(offset * mult * cos(radians(v))) / mult
y = offset + ceil(offset * mult * sin(radians(v))) / mult
draw.line(((offset, offset), (x, y)), fill=(255,255,255,255), width=1)
del maskdraw
piebase.putalpha(mask)
font = ImageFont.truetype("arial.ttf", 15)
bx = 310
by = {'online':60, 'idle':110, 'dnd':160, 'offline':210}
base.paste(piebase, None, piebase)
draw = ImageDraw.Draw(base)
logger.debug(f'total statuses: {total}')
for k, v in statuses.items():
draw.rectangle(((bx, by[k]),(bx+30, by[k]+30)), fill=status[k], outline=(255,255,255,255))
draw.text((bx+40, by[k]+8), f'{(v/total)*100:.2f}%', fill=discord_neutral, font=font)
logger.debug(f'{(v/total)*100:.2f}%')
del draw
buffer = BytesIO()
base.save(buffer, 'png')
buffer.seek(0)
return buffer
@commands.command()
async def barstatus(self, ctx, *, target : discord.Member = None):
'''Generates a bar graph of each status the bot has seen the user use.'''
target = target or ctx.author
async with ctx.channel.typing():
rows = await self.bot.pool.fetch(query_base + '''
select
status,
sum(
extract(
epoch from(
last_seen - first_seen
))) as sum
from status_data
group by status
order by sum desc
''', target.id)
statuses = {row['status'] : row['sum'] for row in rows if row['status'] in status.keys()}
data = await self.bot.loop.run_in_executor(None, self._barstatus, f'{target}\'s uptime in the past 30 days', statuses)
await ctx.send(file=discord.File(data, filename=f'{target.display_name}_bar_status.png'))
def _barstatus(self, title, statuses):
highest = max(statuses.values())
highest_unit = self.get_significant(highest)
units = {stat:self.get_significant(value) for stat, value in statuses.items()}
heights = {stat:(value/highest)*250 for stat, value in statuses.items()}
box_size = (400,300)
rect_x_start = {k:64 + (84 * v) for k, v in {'online':0,'idle':1,'dnd':2,'offline':3}.items()}
rect_width = 70
rect_y_end = 275
labels = {'online':'Online', 'idle':'Idle', 'dnd':'DnD', 'offline':'Offline'}
base = Image.new(mode='RGBA', size=box_size, color=(0, 0, 0, 0))
with Image.open('barstatus_grid1.png') as grid:
font = ImageFont.truetype("arial.ttf", 12)
draw = ImageDraw.Draw(base)
draw.text((5, 5), highest_unit[1], fill=discord_neutral, font=font)
draw.text((52,2),title, fill=discord_neutral,font=font)
divs = 11
for i in range(divs):
draw.line(((50,25+((box_size[1]-50)/(divs-1))*i),(box_size[0],25+((box_size[1]-50)/(divs-1))*i)),fill=(*discord_neutral,128), width=1)
draw.text((5, 25+((box_size[1]-50)/(divs-1))*i-6), f'{highest_unit[0]-i*highest_unit[0]/(divs-1):.2f}', fill=discord_neutral, font=font)
for k, v in statuses.items():
draw.rectangle(((rect_x_start[k], rect_y_end - heights[k]),(rect_x_start[k]+rect_width, rect_y_end)), fill=status[k])
draw.text((rect_x_start[k], rect_y_end - heights[k] - 13), f'{units[k][0]} {units[k][1]}', fill=discord_neutral, font=font)
draw.text((rect_x_start[k], box_size[1] - 25), labels[k], fill=discord_neutral, font=font)
del draw
base.paste(grid, None, grid)
buffer = BytesIO()
base.save(buffer, 'png')
buffer.seek(0)
return buffer
def get_significant(self, stat):
word = ''
if stat >= 604800:
stat /= 604800
word = 'Week'
elif stat >= 86400:
stat /= 86400
word = 'Day'
elif stat >= 3600:
stat /= 3600
word = 'Hour'
elif stat >= 60:
stat /= 60
word = 'Minute'
else:
word = 'Second'
stat = float(f'{stat:.1f}')
if stat > 1 or stat == 0.0:
word += 's'
return stat, word
@commands.command()
async def histostatus(self, ctx, target : typing.Optional[discord.Member] = None , tz : int = 0):
if tz > 12 or tz < -12:
tz = 0
target = target or ctx.author
query = query_base + '''
select
hour,
status,
extract(epoch from total) / extract(epoch from max(total) over ()) as percent
from (
select
mod((extract(hour from s.hours)+$2+24)::integer, 24) as hour,
s.status,
sum(
case
when date_trunc('hour', s.last_seen) = s.hours and
date_trunc('hour', s.first_seen) = s.hours then
s.last_seen - s.first_seen
when date_trunc('hour', s.first_seen) = s.hours then
(s.hours + interval '1 hour') - s.first_seen
when date_trunc('hour', s.last_seen) = s.hours then
s.last_seen - s.hours
else
interval '1 hour'
end
) as total
from (
select
status,
first_seen,
last_seen,
generate_series(
date_trunc('hour', first_seen),
date_trunc('hour', case when last_seen is null then now() at time zone 'utc' else last_seen end),
'1 hours'
) as hours
from status_data
where
status in ('offline', 'idle', 'online', 'dnd')
) as s
group by hour, s.status
order by hour asc
) a
order by hour asc
'''
async with ctx.channel.typing():
utcnow = datetime.datetime.utcnow()
start_time = time.perf_counter()
data = await self.bot.pool.fetch(query, target.id, tz)
query_done_time = time.perf_counter()
current_hour = (utcnow.hour + tz) % 24
output = await self.bot.loop.run_in_executor(None, self._histostatus, f'{target.display_name}\'s resturant hours', data, current_hour, tz)
generated_time = time.perf_counter()
await ctx.send(file=discord.File(output, filename=f'{target.id} histostatus {utcnow.replace(microsecond=0,second=0,minute=0)}.png'))
finish_time = time.perf_counter()
msg = f'query done in **{(query_done_time - start_time)*1000:.2f}ms**'
msg += f'\nimage built in **{(generated_time - query_done_time)*1000:.2f}ms**'
msg += f'\nsent image in **{(finish_time - generated_time)*1000:.2f}ms**'
msg += f'\ntotal time **{(finish_time - start_time)*1000:.2f}ms**'
await ctx.send(f'*{msg}*')
def _histostatus(self, title, data, current_hour, tz):
box_size = (400,300)
#base = Image.new(mode='RGBA', size=box_size, color=(0, 0, 0, 0))
with Image.open('histogram_template2.png') as base:
draw = ImageDraw.Draw(base)
x = 24
spacing = 16
draw_y0 = 0
draw_y1 = box_size[1]-30
trans_font = (*discord_neutral, 50)
font = ImageFont.truetype("arial.ttf", 12)
graphsize = 255
top_offset = 15
for i in range(25):
#Draw numbers
draw_x = x+spacing*i-8
draw.line(((draw_x,draw_y0),(draw_x,draw_y1)),fill=trans_font, width=1)
draw.line(((draw_x, draw_y1), (draw_x, draw_y1+top_offset)), fill=discord_neutral, width=1)
if i != 24:
if i == current_hour:
fontcolor = (0,255,0,255)
else:
fontcolor = discord_neutral
draw.text((draw_x+1,draw_y1), f'{i:02}', fill=fontcolor, font=font)
draw.text((340,draw_y1+16), f'{"+" if tz >= 0 else ""}{tz}', fill=discord_neutral, font=font)
draw.text((2,2),title, fill=discord_neutral,font=font)
first = {'online':0,'idle':0,'dnd':0,'offline':0}
curr = {'online':0,'idle':0,'dnd':0,'offline':0}
prev = {'online':0,'idle':0,'dnd':0,'offline':0}
for d in data:
if d['hour'] == 0:
first[d['status']] = d['percent']
if d['hour'] == 23:
prev[d['status']] = d['percent']
hour = 0
for d in data:
if hour == d['hour']:
curr[d['status']] = d['percent']
elif hour + 1 == d['hour']:
for stat in prev.keys():
x0 = x - spacing
y0 = (graphsize - (prev[stat]*graphsize)) + top_offset
x1 = x
y1 = (graphsize - (curr[stat]*graphsize)) + top_offset
draw.line(((x0,y0),(x1,y1)), fill=status[stat], width=1)
draw.ellipse(((x1-1,y1-1),(x1+1,y1+1)), fill=status[stat])
prev = curr
curr = {'online':0,'idle':0,'dnd':0,'offline':0}
curr[d['status']] = d['percent']
hour += 1
x += spacing
for k, v in prev.items():
x0 = x - spacing
y0 = (graphsize - v*graphsize) + top_offset
x1 = x
y1 = (graphsize - first[k]*graphsize) + top_offset
draw.line(((x0,y0),(x1,y1)), fill=status[k], width=1)
del draw
buffer = BytesIO()
base.save(buffer, 'png')
buffer.seek(0)
return buffer
@commands.command(aliases = ['hourlystatus'])
async def calendarstatus(self, ctx, target : typing.Optional[discord.Member] = None , tz : int = 0):
'''shows hourly presence data. Each row is a day. WIP'''
if tz > 12 or tz < -12:
tz = 0
tz_delta = datetime.timedelta(hours=tz)
target = target or ctx.author
query = query_base + '''
select
s.hours as timestamp,
extract(day from s.hours) as day,
extract(hour from s.hours) as hour,
s.status,
sum(
extract(EPOCH from
case
when date_trunc('hour', s.last_seen) = s.hours and
date_trunc('hour', s.first_seen) = s.hours then
s.last_seen - s.first_seen
when date_trunc('hour', s.first_seen) = s.hours then
(s.hours + interval '1 hour') - s.first_seen
when date_trunc('hour', s.last_seen) = s.hours then
s.last_seen - s.hours
else
interval '1 hour'
end
)/3600
)as percent
from (
select
status,
first_seen + $2 as first_seen,
last_seen + $2 as last_seen,
generate_series(
date_trunc('hour', first_seen + $2),
date_trunc('hour', case when last_seen is null then now() at time zone 'utc' else last_seen end + $2),
'1 hours'
) as hours
from status_data
where
status in ('offline', 'idle', 'online', 'dnd')
) as s
group by timestamp, status
order by timestamp, hour asc
'''
async with ctx.channel.typing():
data = await ctx.bot.pool.fetch(query, target.id, tz_delta)
output = await self.bot.loop.run_in_executor(None, self._calendarstatus, data, tz)
await ctx.send(file=discord.File(output, filename='test.png'))
def _calendarstatus(self, data, tz):
base = Image.new(mode='RGBA', size=(24, 31), color=(0, 0, 0, 0))
pix = base.load()
status_percent = {}
prev_timestamp = data[0]['timestamp']
prev_day = data[0]['day']
y = 0
for d in data:
if d['day'] != prev_day:
y += (d['timestamp'].date() - prev_timestamp.date()).days
prev_day = d['day']
if prev_timestamp != d['timestamp']:
x = d['hour']
pix[x,y] = self._calculate_color(status_percent, status)
prev_timestamp = d['timestamp']
status_percent = {}
status_percent[d['status']] = d['percent']
base = base.crop((0,0,24,y+1))
new_base = Image.new(mode='RGBA', size=(24, 31), color=(0, 0, 0, 0))
new_base.paste(base, box=(0,30-y),mask=base)
new_base = new_base.resize((400,new_base.size[1]),Image.NEAREST)
new_base = new_base.resize((400,300),Image.NEAREST)
buffer = BytesIO()
new_base.save(buffer, 'png')
buffer.seek(0)
return buffer
@commands.command(aliases = ['hourlystatuspie'])
async def calendarstatuspie(self, ctx, target : typing.Optional[discord.Member] = None , tz : int = 0):
'''shows hourly presence data. Spirals inward magically.'''
if tz > 12 or tz < -12:
tz = 0
tz_delta = datetime.timedelta(hours=tz)
target = target or ctx.author
query = query_base + '''
select
s.hours as timestamp,
extract(day from s.hours) as day,
extract(hour from s.hours) as hour,
s.status,
sum(
extract(EPOCH from
case
when date_trunc('hour', s.last_seen) = s.hours and
date_trunc('hour', s.first_seen) = s.hours then
s.last_seen - s.first_seen
when date_trunc('hour', s.first_seen) = s.hours then
(s.hours + interval '1 hour') - s.first_seen
when date_trunc('hour', s.last_seen) = s.hours then
s.last_seen - s.hours
else
interval '1 hour'
end
)/3600
)as percent
from (
select
status,
first_seen + $2 as first_seen,
last_seen + $2 as last_seen,
generate_series(
date_trunc('hour', first_seen + $2),
date_trunc('hour', case when last_seen is null then now() at time zone 'utc' else last_seen end + $2),
'1 hours'
) as hours
from status_data
where
status in ('offline', 'idle', 'online', 'dnd')
) as s
group by timestamp, status
order by timestamp, hour asc
'''
async with ctx.channel.typing():
data = await ctx.bot.pool.fetch(query, target.id, tz_delta)
parsed = self._parse_to_dict(data)
output = await self.bot.loop.run_in_executor(None, self._calendarstatuspie, parsed, tz)
await ctx.send(file=discord.File(output, filename='test.png'))
def _parse_to_dict(self, data):
output = {d : {h : (0,0,0,0) for h in range(24)} for d in range(31)}
status_percent = {}
prev_timestamp = data[0]['timestamp']
prev_day = data[0]['day']
y = 0
for d in data:
if d['day'] != prev_day:
y += (d['timestamp'].date() - prev_timestamp.date()).days
prev_day = d['day']
if prev_timestamp != d['timestamp']:
x = d['hour']
output[y][x] = self._calculate_color(status_percent, status)
prev_timestamp = d['timestamp']
status_percent = {}
status_percent[d['status']] = d['percent']
return output
def _calendarstatuspie(self, data, tz):
size = 1000
halfsize = size//2
offset = 30
base = Image.new(mode='RGBA', size=(size, size), color=(0, 0, 0, 0))
draw = ImageDraw.Draw(base)
i = 0
for day in range(30, -1, -1):
for hour in range((24*3)-1, -1, -1):
hour1 = hour / 3
hour2 = hour // 3
radius = int(((halfsize*(((day+offset)*24) + hour1)/((30+offset)*24))))
xy0 = halfsize - radius
xy1 = halfsize + radius
angle = (hour1/24)*360 - 90
angle2 = angle+(15/3)
draw.pieslice((xy0,xy0,xy1,xy1), angle, angle2, fill=data[day][hour2])
buffer = BytesIO()
base.save(buffer, 'png')
buffer.seek(0)
return buffer
@commands.command()
async def hourlyupdates(self, ctx, target : typing.Optional[discord.Member] = None , tz : int = 0):
if tz > 12 or tz < -12:
tz = 0
tz_delta = datetime.timedelta(hours=tz)
target = target or ctx.author
query = query_base + '''
select
s.timestamp,
extract(day from s.timestamp) as day,
extract(hour from s.timestamp) as hour,
count(s.timestamp)
from (
select
date_trunc('hour', first_seen + $2) as timestamp
from status_data
where
status in ('offline', 'idle', 'online', 'dnd')
) as s
group by timestamp
order by timestamp asc
'''
async with ctx.channel.typing():
data = await ctx.bot.pool.fetch(query, target.id, tz_delta)
output = await self.bot.loop.run_in_executor(None, self._hourlyupdates, data, tz)
await ctx.send(file=discord.File(output, filename='test.png'))
def _hourlyupdates(self, data, tz):
base = Image.new(mode='RGBA', size=(24, 31), color=(0, 0, 0, 0))
pix = base.load()
prev_timestamp = data[0]['timestamp']
prev_day = data[0]['day']
y = 0
for d in data:
if d['day'] != prev_day:
y += (d['timestamp'].date() - prev_timestamp.date()).days
prev_timestamp = d['timestamp']
prev_day = d['day']
x = d['hour']
amount = min(1, d['count']/30)
overload = min(1, max(0, (d['count'] - 30)/30))
amount -= overload
percents = {'activity' : amount, 'overload' : overload}
colors = {'activity' : (67, 181, 129), 'overload' : (255,255,255)}
pix[x,y] = self._calculate_color(percents, colors)
base = base.crop((0,0,24,y+1))
new_base = Image.new(mode='RGBA', size=(24, 31), color=(0, 0, 0, 0))
new_base.paste(base, box=(0,30-y),mask=base)
new_base = new_base.resize((400,new_base.size[1]),Image.NEAREST)
new_base = new_base.resize((400,300),Image.NEAREST)
buffer = BytesIO()
new_base.save(buffer, 'png')
buffer.seek(0)
return buffer
def _calculate_color(self, percent, colors):
mult = sum(percent.values())
new_color = [int(sum((percent[key] / mult) * colors[key][i] for key, value in percent.items())) for i in range(3)]
alpha = ceil(mult * 255)
color_with_alpha = tuple([*new_color, alpha])
return color_with_alpha
@commands.command()
@commands.cooldown(1,7200, commands.BucketType.user)
async def getstatusdata(self, ctx, limit : int = 0):
async with ctx.channel.typing():
buf = BytesIO()
query = f'''
select
status,
first_seen
from statuses
where uid=$1
order by first_seen desc
{f'limit {limit}' if limit > 0 else ''}
'''
async with self.bot.pool.acquire() as con:
await con.copy_from_query(query, ctx.author.id, output=buf, format='csv')
buf.seek(0)
await ctx.send('Sent data to your DMs.')
await ctx.author.send(file=discord.File(buf, filename=f'{ctx.author.id}_statuses.csv'))
def setup(bot):
bot.add_cog(Stats(bot))
| 42.204286
| 154
| 0.4967
|
0b87d1947b9f204e32acc1d2582802774251c65d
| 1,589
|
py
|
Python
|
src/iBeatles/session/load_normalization_tab.py
|
ornlneutronimaging/iBeatles
|
0a6ca1e18780cf08ad97b6cedede5a23f52bc953
|
[
"MIT"
] | 3
|
2017-04-27T06:58:05.000Z
|
2020-01-21T07:12:30.000Z
|
src/iBeatles/session/load_normalization_tab.py
|
ornlneutronimaging/iBeatles
|
0a6ca1e18780cf08ad97b6cedede5a23f52bc953
|
[
"MIT"
] | 99
|
2019-05-09T14:05:56.000Z
|
2022-03-30T19:13:31.000Z
|
src/iBeatles/session/load_normalization_tab.py
|
ornlneutronimaging/iBeatles
|
0a6ca1e18780cf08ad97b6cedede5a23f52bc953
|
[
"MIT"
] | null | null | null |
from .. import DataType
from ..step2.initialization import Initialization as Step2Initialization
from ..step2.gui_handler import Step2GuiHandler
from ..utilities.pyqrgraph import Pyqtgrah as PyqtgraphUtilities
class LoadNormalization:
def __init__(self, parent=None):
self.parent = parent
self.session_dict = parent.session_dict
self.data_type = DataType.normalization
def roi(self):
data_type = self.data_type
session_dict = self.session_dict
list_roi = session_dict[data_type]['roi']
self.parent.list_roi[data_type] = list_roi
o_step2 = Step2Initialization(parent=self.parent)
o_step2.roi()
def check_widgets(self):
o_step2 = Step2GuiHandler(parent=self.parent)
o_step2.check_run_normalization_button()
def image_settings(self):
data_type = self.data_type
session_dict = self.session_dict
self.parent.image_view_settings[data_type]['state'] = session_dict[data_type]['image view state']
self.parent.image_view_settings[data_type]['histogram'] = session_dict[data_type]['image view histogram']
o_pyqt = PyqtgraphUtilities(parent=self.parent,
image_view=self.parent.step2_ui['image_view'],
data_type=data_type)
o_pyqt.set_state(session_dict[data_type]['image view state'])
o_pyqt.reload_histogram_level()
histogram_level = session_dict[data_type]['image view histogram']
o_pyqt.set_histogram_level(histogram_level=histogram_level)
| 37.833333
| 113
| 0.693518
|
7c8441e2104ae142c5ab012545f0a24e1750af57
| 25,223
|
py
|
Python
|
src/oci/announcements_service/announcement_client.py
|
ezequielramos/oci-python-sdk
|
cc4235cf217beaf9feed75760e9ce82610222762
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 3
|
2020-09-10T22:09:45.000Z
|
2021-12-24T17:00:07.000Z
|
src/oci/announcements_service/announcement_client.py
|
ezequielramos/oci-python-sdk
|
cc4235cf217beaf9feed75760e9ce82610222762
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/announcements_service/announcement_client.py
|
ezequielramos/oci-python-sdk
|
cc4235cf217beaf9feed75760e9ce82610222762
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from __future__ import absolute_import
from oci._vendor import requests # noqa: F401
from oci._vendor import six
from oci import retry, circuit_breaker # noqa: F401
from oci.base_client import BaseClient
from oci.config import get_config_value_or_default, validate_config
from oci.signer import Signer
from oci.util import Sentinel, get_signer_from_authentication_type, AUTHENTICATION_TYPE_FIELD_NAME
from .models import announcements_service_type_mapping
missing = Sentinel("Missing")
class AnnouncementClient(object):
"""
Manage Oracle Cloud Infrastructure console announcements.
"""
def __init__(self, config, **kwargs):
"""
Creates a new service client
:param dict config:
Configuration keys and values as per `SDK and Tool Configuration <https://docs.cloud.oracle.com/Content/API/Concepts/sdkconfig.htm>`__.
The :py:meth:`~oci.config.from_file` method can be used to load configuration from a file. Alternatively, a ``dict`` can be passed. You can validate_config
the dict using :py:meth:`~oci.config.validate_config`
:param str service_endpoint: (optional)
The endpoint of the service to call using this client. For example ``https://iaas.us-ashburn-1.oraclecloud.com``. If this keyword argument is
not provided then it will be derived using the region in the config parameter. You should only provide this keyword argument if you have an explicit
need to specify a service endpoint.
:param timeout: (optional)
The connection and read timeouts for the client. The default values are connection timeout 10 seconds and read timeout 60 seconds. This keyword argument can be provided
as a single float, in which case the value provided is used for both the read and connection timeouts, or as a tuple of two floats. If
a tuple is provided then the first value is used as the connection timeout and the second value as the read timeout.
:type timeout: float or tuple(float, float)
:param signer: (optional)
The signer to use when signing requests made by the service client. The default is to use a :py:class:`~oci.signer.Signer` based on the values
provided in the config parameter.
One use case for this parameter is for `Instance Principals authentication <https://docs.cloud.oracle.com/Content/Identity/Tasks/callingservicesfrominstances.htm>`__
by passing an instance of :py:class:`~oci.auth.signers.InstancePrincipalsSecurityTokenSigner` as the value for this keyword argument
:type signer: :py:class:`~oci.signer.AbstractBaseSigner`
:param obj retry_strategy: (optional)
A retry strategy to apply to all calls made by this service client (i.e. at the client level). There is no retry strategy applied by default.
Retry strategies can also be applied at the operation level by passing a ``retry_strategy`` keyword argument as part of calling the operation.
Any value provided at the operation level will override whatever is specified at the client level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
:param obj circuit_breaker_strategy: (optional)
A circuit breaker strategy to apply to all calls made by this service client (i.e. at the client level).
This client uses :py:data:`~oci.circuit_breaker.DEFAULT_CIRCUIT_BREAKER_STRATEGY` as default if no circuit breaker strategy is provided.
The specifics of circuit breaker strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/circuit_breakers.html>`__.
:param function circuit_breaker_callback: (optional)
Callback function to receive any exceptions triggerred by the circuit breaker.
"""
validate_config(config, signer=kwargs.get('signer'))
if 'signer' in kwargs:
signer = kwargs['signer']
elif AUTHENTICATION_TYPE_FIELD_NAME in config:
signer = get_signer_from_authentication_type(config)
else:
signer = Signer(
tenancy=config["tenancy"],
user=config["user"],
fingerprint=config["fingerprint"],
private_key_file_location=config.get("key_file"),
pass_phrase=get_config_value_or_default(config, "pass_phrase"),
private_key_content=config.get("key_content")
)
base_client_init_kwargs = {
'regional_client': True,
'service_endpoint': kwargs.get('service_endpoint'),
'base_path': '/20180904',
'service_endpoint_template': 'https://announcements.{region}.{secondLevelDomain}',
'skip_deserialization': kwargs.get('skip_deserialization', False),
'circuit_breaker_strategy': kwargs.get('circuit_breaker_strategy', circuit_breaker.GLOBAL_CIRCUIT_BREAKER_STRATEGY)
}
if 'timeout' in kwargs:
base_client_init_kwargs['timeout'] = kwargs.get('timeout')
if base_client_init_kwargs.get('circuit_breaker_strategy') is None:
base_client_init_kwargs['circuit_breaker_strategy'] = circuit_breaker.DEFAULT_CIRCUIT_BREAKER_STRATEGY
self.base_client = BaseClient("announcement", config, signer, announcements_service_type_mapping, **base_client_init_kwargs)
self.retry_strategy = kwargs.get('retry_strategy')
self.circuit_breaker_callback = kwargs.get('circuit_breaker_callback')
def get_announcement(self, announcement_id, **kwargs):
"""
Gets the details of a specific announcement.
:param str announcement_id: (required)
The OCID of the announcement.
:param str opc_request_id: (optional)
The unique Oracle-assigned identifier for the request. If you need to contact Oracle about
a particular request, please provide the complete request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.announcements_service.models.Announcement`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/announcementsservice/get_announcement.py.html>`__ to see an example of how to use get_announcement API.
"""
resource_path = "/announcements/{announcementId}"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"get_announcement got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"announcementId": announcement_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="Announcement")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="Announcement")
def get_announcement_user_status(self, announcement_id, **kwargs):
"""
Gets information about whether a specific announcement was acknowledged by a user.
:param str announcement_id: (required)
The OCID of the announcement.
:param str opc_request_id: (optional)
The unique Oracle-assigned identifier for the request. If you need to contact Oracle about
a particular request, please provide the complete request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.announcements_service.models.AnnouncementUserStatusDetails`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/announcementsservice/get_announcement_user_status.py.html>`__ to see an example of how to use get_announcement_user_status API.
"""
resource_path = "/announcements/{announcementId}/userStatus"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"get_announcement_user_status got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"announcementId": announcement_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="AnnouncementUserStatusDetails")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="AnnouncementUserStatusDetails")
def list_announcements(self, compartment_id, **kwargs):
"""
Gets a list of announcements for the current tenancy.
:param str compartment_id: (required)
The OCID of the compartment. Because announcements are specific to a tenancy, this is the
OCID of the root compartment.
:param int limit: (optional)
The maximum number of items to return in a paginated \"List\" call.
:param str page: (optional)
The value of the `opc-next-page` response header from the previous \"List\" call.
:param str announcement_type: (optional)
The type of announcement.
:param str lifecycle_state: (optional)
The announcement's current lifecycle state.
Allowed values are: "ACTIVE", "INACTIVE"
:param bool is_banner: (optional)
Whether the announcement is displayed as a console banner.
:param str sort_by: (optional)
The criteria to sort by. You can specify only one sort order.
Allowed values are: "timeOneValue", "timeTwoValue", "timeCreated", "referenceTicketNumber", "summary", "announcementType"
:param str sort_order: (optional)
The sort order to use. (Sorting by `announcementType` orders the announcements list according to importance.)
Allowed values are: "ASC", "DESC"
:param datetime time_one_earliest_time: (optional)
The boundary for the earliest `timeOneValue` date on announcements that you want to see.
:param datetime time_one_latest_time: (optional)
The boundary for the latest `timeOneValue` date on announcements that you want to see.
:param str opc_request_id: (optional)
The unique Oracle-assigned identifier for the request. If you need to contact Oracle about
a particular request, please provide the complete request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.announcements_service.models.AnnouncementsCollection`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/announcementsservice/list_announcements.py.html>`__ to see an example of how to use list_announcements API.
"""
resource_path = "/announcements"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"limit",
"page",
"announcement_type",
"lifecycle_state",
"is_banner",
"sort_by",
"sort_order",
"time_one_earliest_time",
"time_one_latest_time",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_announcements got unknown kwargs: {!r}".format(extra_kwargs))
if 'lifecycle_state' in kwargs:
lifecycle_state_allowed_values = ["ACTIVE", "INACTIVE"]
if kwargs['lifecycle_state'] not in lifecycle_state_allowed_values:
raise ValueError(
"Invalid value for `lifecycle_state`, must be one of {0}".format(lifecycle_state_allowed_values)
)
if 'sort_by' in kwargs:
sort_by_allowed_values = ["timeOneValue", "timeTwoValue", "timeCreated", "referenceTicketNumber", "summary", "announcementType"]
if kwargs['sort_by'] not in sort_by_allowed_values:
raise ValueError(
"Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values)
)
if 'sort_order' in kwargs:
sort_order_allowed_values = ["ASC", "DESC"]
if kwargs['sort_order'] not in sort_order_allowed_values:
raise ValueError(
"Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values)
)
query_params = {
"limit": kwargs.get("limit", missing),
"page": kwargs.get("page", missing),
"compartmentId": compartment_id,
"announcementType": kwargs.get("announcement_type", missing),
"lifecycleState": kwargs.get("lifecycle_state", missing),
"isBanner": kwargs.get("is_banner", missing),
"sortBy": kwargs.get("sort_by", missing),
"sortOrder": kwargs.get("sort_order", missing),
"timeOneEarliestTime": kwargs.get("time_one_earliest_time", missing),
"timeOneLatestTime": kwargs.get("time_one_latest_time", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="AnnouncementsCollection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="AnnouncementsCollection")
def update_announcement_user_status(self, announcement_id, status_details, **kwargs):
"""
Updates the status of the specified announcement with regard to whether it has been marked as read.
:param str announcement_id: (required)
The OCID of the announcement.
:param oci.announcements_service.models.AnnouncementUserStatusDetails status_details: (required)
The information to use to update the announcement's read status.
:param str if_match: (optional)
The locking version, used for optimistic concurrency control.
:param str opc_request_id: (optional)
The unique Oracle-assigned identifier for the request. If you need to contact Oracle about
a particular request, please provide the complete request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type None
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/announcementsservice/update_announcement_user_status.py.html>`__ to see an example of how to use update_announcement_user_status API.
"""
resource_path = "/announcements/{announcementId}/userStatus"
method = "PUT"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"if_match",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"update_announcement_user_status got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"announcementId": announcement_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"if-match": kwargs.get("if_match", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=status_details)
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=status_details)
| 50.852823
| 261
| 0.663165
|
c9639a87495a2c8a2033c8a841665f0771693de5
| 24,305
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/test_show_cdp.py
|
svautour/genieparser
|
7416c9a4b44582be835a0646fb7fad92a5181c7d
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/test_show_cdp.py
|
svautour/genieparser
|
7416c9a4b44582be835a0646fb7fad92a5181c7d
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/test_show_cdp.py
|
svautour/genieparser
|
7416c9a4b44582be835a0646fb7fad92a5181c7d
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from unittest.mock import Mock
from pyats.topology import Device
from genie.metaparser.util.exceptions import SchemaEmptyParserError
from genie.libs.parser.iosxe.show_cdp import ShowCdpNeighbors, ShowCdpNeighborsDetail
class test_show_cdp_neighbors(unittest.TestCase):
device = Device(name='aDevice')
empty_device_output = {'execute.return_value': '''
Device# show cdp neighbors
Capability Codes: R - Router, T - Trans Bridge, B - Source Route Bridge
S - Switch, H - Host, I - IGMP, r - Repeater
Device ID Local Interfce Holdtme Capability Platform ''
Port ID
'''}
expected_parsed_output_1 = {
'cdp': {
'index': {
1: {
'capability': 'R B',
'device_id': 'R5.cisco.com',
'hold_time': 125,
'local_interface': 'GigabitEthernet0/0',
'platform': '',
'port_id': 'GigabitEthernet0/0'},
2: {
'capability': 'T S',
'device_id': 'RX-SWV.cisco.com',
'hold_time': 167,
'local_interface': 'FastEthernet0/1',
'platform': 'WS-C3524-X',
'port_id': 'FastEthernet0/13'},
3: {
'capability': 'S I',
'device_id': 'C2950-1',
'hold_time': 148,
'local_interface': 'FastEthernet0/0',
'platform': 'WS-C2950T-',
'port_id': 'FastEthernet0/15'}
}
}
}
device_output_1 = {'execute.return_value': '''
Device# show cdp neighbors
Capability Codes: R - Router, T - Trans Bridge, B - Source Route Bridge
S - Switch, H - Host, I - IGMP, r - Repeater
Device ID Local Interfce Holdtme Capability Platform ''
Port ID
R5.cisco.com Gig 0/0 125 R B Gig 0/0
RX-SWV.cisco.com Fas 0/1 167 T S WS-C3524-XFas 0/13
C2950-1 Fas 0/0 148 S I WS-C2950T-Fas 0/15
'''}
expected_parsed_output_2 = {
'cdp': {
'index': {
1: {
'capability': 'R',
'device_id': 'device2',
'hold_time': 152,
'local_interface': 'Ethernet0',
'platform': 'AS5200',
'port_id': 'Ethernet0'},
2: {
'capability': 'R',
'device_id': 'device3',
'hold_time': 144,
'local_interface': 'Ethernet0',
'platform': '3640',
'port_id': 'Ethernet0/0'},
3: {
'capability': '',
'device_id': 'device4',
'hold_time': 141,
'local_interface': 'Ethernet0',
'platform': 'RP1',
'port_id': 'Ethernet0/0'}
}
}
}
device_output_2 = {'execute.return_value': '''
Device# show cdp neighbors
Capability Codes: R - Router, T - Trans Bridge, B - Source Route Bridge
S - Switch, H - Host, I - IGMP, r - Repeater
Device ID Local Interfce Holdtme Capability Platform ''
Port ID
device2 Eth 0 152 R AS5200 Eth 0
device3 Eth 0 144 R 3640 Eth0/0
device4 Eth 0 141 RP1 Eth 0/0
'''}
expected_parsed_output_3 = {
'cdp': {
'index': {
1: {
'capability': 'R B',
'device_id': 'R8.cisco.com',
'hold_time': 148,
'local_interface': 'GigabitEthernet0/0',
'platform': '',
'port_id': 'GigabitEthernet0/0'},
2: {
'capability': 'R B',
'device_id': 'R9.cisco.com',
'hold_time': 156,
'local_interface': 'GigabitEthernet0/0',
'platform': '',
'port_id': 'GigabitEthernet0/0'},
3: {
'capability': 'R S I',
'device_id': 'device6',
'hold_time': 157,
'local_interface': 'GigabitEthernet0',
'platform': 'C887VA-W-W',
'port_id': 'GigabitEthernet0'}
}
}
}
device_output_3 = {'execute.return_value': '''
Device# show cdp neighbors
Capability Codes: R - Router, T - Trans Bridge, B - Source Route Bridge
S - Switch, H - Host, I - IGMP, r - Repeater
Device ID Local Interfce Holdtme Capability Platform ''
Port ID
R8.cisco.com Gig 0/0 148 R B Gig 0/0
R9.cisco.com Gig 0/0 156 R B Gig 0/0
device6 Gig 0 157 R S I C887VA-W-W Gi 0
'''}
expected_parsed_output_4 = {
'cdp': {
'index': {
1: {
'capability': '',
'device_id': 'device4',
'hold_time': 141,
'local_interface': 'Ethernet0',
'platform': 'RP1',
'port_id': 'Ethernet0/0'},
2: {
'capability': '',
'device_id': 'device5',
'hold_time': 164,
'local_interface': 'Ethernet0',
'platform': '7206',
'port_id': 'Ethernet1/0'}
}
}
}
device_output_4 = {'execute.return_value': '''
Device# show cdp neighbors
Capability Codes: R - Router, T - Trans Bridge, B - Source Route Bridge
S - Switch, H - Host, I - IGMP, r - Repeater
Device ID Local Interfce Holdtme Capability Platform ''
Port ID
device4 Eth 0 141 RP1 Eth 0/0
device5 Eth 0 164 7206 Eth 1/0
'''}
expected_parsed_output_5 = {
'cdp': {
'index': {
1: {
'capability': 'R S C',
'device_id': 'Device_With_A_Particularly_Long_Name',
'hold_time': 134,
'local_interface': 'GigabitEthernet1',
'platform': 'N9K-9000v',
'port_id': 'Ethernet0/0'}
}
}
}
device_output_5 = {'execute.return_value': '''
Capability Codes: R - Router, T - Trans Bridge, B - Source Route Bridge
S - Switch, H - Host, I - IGMP, r - Repeater, P - Phone,
D - Remote, C - CVTA, M - Two-port Mac Relay
Device ID Local Intrfce Holdtme Capability Platform Port ID
Device_With_A_Particularly_Long_Name
Gig 1 134 R S C N9K-9000v Eth 0/0
'''}
def test_show_cdp_neighbors_1(self):
self.maxDiff = None
self.device = Mock(**self.device_output_1)
obj = ShowCdpNeighbors(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.expected_parsed_output_1)
def test_show_cdp_neighbors_2(self):
self.maxDiff = None
self.device = Mock(**self.device_output_2)
obj = ShowCdpNeighbors(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.expected_parsed_output_2)
def test_show_cdp_neighbors_3(self):
self.maxDiff = None
self.device = Mock(**self.device_output_3)
obj = ShowCdpNeighbors(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.expected_parsed_output_3)
def test_show_cdp_neighbors_4(self):
self.maxDiff = None
self.device = Mock(**self.device_output_4)
obj = ShowCdpNeighbors(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.expected_parsed_output_4)
def test_show_cdp_neighbors_5(self):
self.maxDiff = None
self.device = Mock(**self.device_output_5)
obj = ShowCdpNeighbors(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.expected_parsed_output_5)
def test_show_cdp_neighbors_empty_output(self):
self.maxDiff = None
self.device = Mock(**self.empty_device_output)
obj = ShowCdpNeighbors(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
class test_show_cdp_neighbors_detail(unittest.TestCase):
device = Device(name='aDevice')
expected_parsed_output_1 = {
'total_entries_displayed': 3,
'index': {
1: {
'advertisement_ver': 2,
'capabilities': 'Router Switch CVTA phone port',
'device_id': 'R6(9P57K4EJ8CA)',
'duplex_mode': 'full',
'entry_addresses': {'172.16.1.203': {}},
'hold_time': 133,
'local_interface': 'GigabitEthernet0/0',
'management_addresses': {'172.16.1.203': {}},
'native_vlan': '',
'platform': 'N9K-9000v',
'port_id': 'mgmt0',
'software_version': 'Cisco Nexus Operating System (NX-OS) Software, Version 9.2(1)',
'vtp_management_domain': ''},
2: {
'advertisement_ver': 2,
'capabilities': 'Router Switch CVTA phone port',
'device_id': 'R7(9QBDKB58F76)',
'duplex_mode': 'full',
'entry_addresses': {'172.16.1.204': {}},
'hold_time': 126,
'local_interface': 'GigabitEthernet0/0',
'management_addresses': {'172.16.1.204': {}},
'native_vlan': '',
'platform': 'N9K-9000v',
'port_id': 'mgmt0',
'software_version': 'Cisco Nexus Operating System (NX-OS) Software, Version 9.2(1)',
'vtp_management_domain': ''},
3: {
'advertisement_ver': 2,
'capabilities': 'Router Source-Route-Bridge',
'device_id': 'R5.cisco.com',
'duplex_mode': '',
'entry_addresses': {'172.16.1.202': {}},
'hold_time': 177,
'local_interface': 'GigabitEthernet0/0',
'management_addresses': {'172.16.1.202': {}},
'native_vlan': '',
'platform': 'Cisco ',
'port_id': 'GigabitEthernet0/0',
'software_version': 'Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.7(3)M3, RELEASE SOFTWARE (fc2)\n'
'Technical Support: http://www.cisco.com/techsupport\n'
'Copyright (c) 1986-2018 by Cisco Systems, Inc.\n'
'Compiled Wed 01-Aug-18 16:45 by prod_rel_team',
'vtp_management_domain': ''},
},
}
device_output_1 = {'execute.return_value': '''
Device# show cdp neighbors detail
Device ID: R6(9P57K4EJ8CA)
Entry address(es):
IP address: 172.16.1.203
Platform: N9K-9000v, Capabilities: Router Switch CVTA phone port
Interface: GigabitEthernet0/0, Port ID (outgoing port): mgmt0
Holdtime : 133 sec
Version :
Cisco Nexus Operating System (NX-OS) Software, Version 9.2(1)
advertisement version: 2
Duplex: full
Management address(es):
IP address: 172.16.1.203
-------------------------
Device ID: R7(9QBDKB58F76)
Entry address(es):
IP address: 172.16.1.204
Platform: N9K-9000v, Capabilities: Router Switch CVTA phone port
Interface: GigabitEthernet0/0, Port ID (outgoing port): mgmt0
Holdtime : 126 sec
Version :
Cisco Nexus Operating System (NX-OS) Software, Version 9.2(1)
advertisement version: 2
Duplex: full
Management address(es):
IP address: 172.16.1.204
-------------------------
Device ID: R5.cisco.com
Entry address(es):
IP address: 172.16.1.202
Platform: Cisco , Capabilities: Router Source-Route-Bridge
Interface: GigabitEthernet0/0, Port ID (outgoing port): GigabitEthernet0/0
Holdtime : 177 sec
Version :
Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.7(3)M3, RELEASE SOFTWARE (fc2)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2018 by Cisco Systems, Inc.
Compiled Wed 01-Aug-18 16:45 by prod_rel_team
advertisement version: 2
Management address(es):
IP address: 172.16.1.202
Total cdp entries displayed : 3
'''}
expected_parsed_output_2 = {
'total_entries_displayed': 2,
'index': {
1: {
'advertisement_ver': 2,
'capabilities': 'Router Source-Route-Bridge',
'device_id': 'R8.cisco.com',
'duplex_mode': '',
'entry_addresses': {'172.16.1.205': {}},
'hold_time': 143,
'local_interface': 'GigabitEthernet0/0',
'management_addresses': {'172.16.1.205': {}},
'native_vlan': '',
'platform': 'Cisco ',
'port_id': 'GigabitEthernet0/0',
'software_version': 'Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.7(3)M3, RELEASE SOFTWARE (fc2)\n'
'Technical Support: http://www.cisco.com/techsupport\n'
'Copyright (c) 1986-2018 by Cisco Systems, Inc.\n'
'Compiled Wed 01-Aug-18 16:45 by prod_rel_team',
'vtp_management_domain': ''},
2: {
'advertisement_ver': 2,
'capabilities': 'Router Source-Route-Bridge',
'device_id': 'R9.cisco.com',
'duplex_mode': '',
'entry_addresses': {'172.16.1.206': {}},
'hold_time': 151,
'local_interface': 'GigabitEthernet0/0',
'management_addresses': {'172.16.1.206': {}},
'native_vlan': '',
'platform': 'Cisco ',
'port_id': 'GigabitEthernet0/0',
'software_version': 'Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.7(3)M3, RELEASE SOFTWARE (fc2)\n'
'Technical Support: http://www.cisco.com/techsupport\n'
'Copyright (c) 1986-2018 by Cisco Systems, Inc.\n'
'Compiled Wed 01-Aug-18 16:45 by prod_rel_team',
'vtp_management_domain': ''},
},
}
device_output_2 = {'execute.return_value': '''
Device# show cdp neighbors detail
Device ID: R8.cisco.com
Entry address(es):
IP address: 172.16.1.205
Platform: Cisco , Capabilities: Router Source-Route-Bridge
Interface: GigabitEthernet0/0, Port ID (outgoing port): GigabitEthernet0/0
Holdtime : 143 sec
Version :
Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.7(3)M3, RELEASE SOFTWARE (fc2)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2018 by Cisco Systems, Inc.
Compiled Wed 01-Aug-18 16:45 by prod_rel_team
advertisement version: 2
Management address(es):
IP address: 172.16.1.205
-------------------------
Device ID: R9.cisco.com
Entry address(es):
IP address: 172.16.1.206
Platform: Cisco , Capabilities: Router Source-Route-Bridge
Interface: GigabitEthernet0/0, Port ID (outgoing port): GigabitEthernet0/0
Holdtime : 151 sec
Version :
Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.7(3)M3, RELEASE SOFTWARE (fc2)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2018 by Cisco Systems, Inc.
Compiled Wed 01-Aug-18 16:45 by prod_rel_team
advertisement version: 2
Management address(es):
IP address: 172.16.1.206
Total cdp entries displayed : 2
'''}
expected_parsed_output_3 = {
'total_entries_displayed': 1,
'index': {
1: {
'advertisement_ver': 2,
'capabilities': 'Router',
'device_id': 'device.cisco.com',
'duplex_mode': 'half',
'entry_addresses': {
'2001:DB8:1000:8A10::C0A8:BC06': {'type': 'global unicast'},
'FE80::203:E3FF:FE6A:BF81': {'type': 'link-local'},
},
'hold_time': 160,
'local_interface': 'Ethernet0/1',
'management_addresses': {},
'native_vlan': '42',
'platform': 'cisco 3640',
'port_id': 'Ethernet0/1',
'software_version': 'Cisco Internetwork Operating System Software IOS (tm) 3600 Software (C3640-A2IS-M), Version 12.2(25)SEB4, RELE)',
'vtp_management_domain': 'Accounting Group'},
}
}
device_output_3 = {'execute.return_value': '''
Device# show cdp neighbors detail
Device ID: device.cisco.com
Entry address(es):
IPv6 address: FE80::203:E3FF:FE6A:BF81 (link-local)
IPv6 address: 2001:DB8:1000:8A10::C0A8:BC06 (global unicast)
Platform: cisco 3640, Capabilities: Router
Interface: Ethernet0/1, Port ID (outgoing port): Ethernet0/1
Holdtime : 160 sec
Version :
Cisco Internetwork Operating System Software IOS (tm) 3600 Software (C3640-A2IS-M), Version 12.2(25)SEB4, RELE)
advertisement version: 2
Duplex Mode: half
Native VLAN: 42
VTP Management Domain: ‘Accounting Group’
'''}
device_output_4_empty = {'execute.return_value': ''}
expected_parsed_output_5 = {
'total_entries_displayed': 2,
'index': {
1: {
'advertisement_ver': 2,
'capabilities': 'Router Switch-6506 IGMP',
'device_id': 'R8.cisco.com',
'duplex_mode': '',
'entry_addresses': {'172.16.1.205': {}},
'hold_time': 143,
'local_interface': 'GigabitEthernet1/0/3',
'management_addresses': {'172.16.1.205': {}},
'native_vlan': '',
'platform': 'cisco WS_C6506_E',
'port_id': 'GigabitEthernet1/0/3',
'software_version': 'Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.7(3)M3, RELEASE SOFTWARE (fc2)\n'
'Technical Support: http://www.cisco.com/techsupport\n'
'Copyright (c) 1986-2018 by Cisco Systems, Inc.\n'
'Compiled Wed 01-Aug-18 16:45 by prod_rel_team',
'vtp_management_domain': ''},
2: {
'advertisement_ver': 2,
'capabilities': 'Router Switch_6506 IGMP',
'device_id': 'R9.cisco.com',
'duplex_mode': '',
'entry_addresses': {'172.16.1.206': {}},
'hold_time': 151,
'local_interface': 'GigabitEthernet1/2/5',
'management_addresses': {'172.16.1.206': {}},
'native_vlan': '',
'platform': 'cisco WS-C6506-E',
'port_id': 'GigabitEthernet1/2/5',
'software_version': 'Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.7(3)M3, RELEASE SOFTWARE (fc2)\n'
'Technical Support: http://www.cisco.com/techsupport\n'
'Copyright (c) 1986-2018 by Cisco Systems, Inc.\n'
'Compiled Wed 01-Aug-18 16:45 by prod_rel_team',
'vtp_management_domain': ''},
},
}
device_output_5 = {'execute.return_value': '''
Device# show cdp neighbors detail
Device ID: R8.cisco.com
Entry address(es):
IP address: 172.16.1.205
Platform: cisco WS_C6506_E, Capabilities: Router Switch-6506 IGMP
Interface: GigabitEthernet1/0/3, Port ID (outgoing port): GigabitEthernet1/0/3
Holdtime : 143 sec
Version :
Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.7(3)M3, RELEASE SOFTWARE (fc2)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2018 by Cisco Systems, Inc.
Compiled Wed 01-Aug-18 16:45 by prod_rel_team
advertisement version: 2
Management address(es):
IP address: 172.16.1.205
-------------------------
Device ID: R9.cisco.com
Entry address(es):
IP address: 172.16.1.206
Platform: cisco WS-C6506-E, Capabilities: Router Switch_6506 IGMP
Interface: GigabitEthernet1/2/5, Port ID (outgoing port): GigabitEthernet1/2/5
Holdtime : 151 sec
Version :
Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.7(3)M3, RELEASE SOFTWARE (fc2)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2018 by Cisco Systems, Inc.
Compiled Wed 01-Aug-18 16:45 by prod_rel_team
advertisement version: 2
Management address(es):
IP address: 172.16.1.206
Total cdp entries displayed : 2
'''}
def test_show_cdp_neighbors_detail_1(self):
self.maxDiff = None
self.device = Mock(**self.device_output_1)
obj = ShowCdpNeighborsDetail(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.expected_parsed_output_1)
def test_show_cdp_neighbors_detail_2(self):
self.maxDiff = None
self.device = Mock(**self.device_output_2)
obj = ShowCdpNeighborsDetail(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.expected_parsed_output_2)
def test_show_cdp_neighbors_detail_3(self):
self.maxDiff = None
self.device = Mock(**self.device_output_3)
obj = ShowCdpNeighborsDetail(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.expected_parsed_output_3)
def test_show_cdp_neighbors_detail_empty(self):
self.maxDiff = None
self.device = Mock(**self.device_output_4_empty)
obj = ShowCdpNeighborsDetail(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_cdp_neighbors_detail_4(self):
self.maxDiff = None
self.device = Mock(**self.device_output_5)
obj = ShowCdpNeighborsDetail(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.expected_parsed_output_5)
if __name__ == '__main__':
unittest.main()
| 40.373754
| 154
| 0.51512
|
c8ad5359098156ad9129d3f9d0912a4ee353f2b7
| 11,576
|
py
|
Python
|
melati/wallet/wallet_puzzle_store.py
|
luzofex/melati-blockchain
|
bddc95ed3a8c5631488cd44a9e76b764c19b4568
|
[
"Apache-2.0"
] | 12
|
2021-07-13T15:39:57.000Z
|
2022-02-09T04:32:12.000Z
|
melati/wallet/wallet_puzzle_store.py
|
luzofex/melati-blockchain
|
bddc95ed3a8c5631488cd44a9e76b764c19b4568
|
[
"Apache-2.0"
] | 1
|
2021-07-16T12:41:41.000Z
|
2021-07-16T12:42:48.000Z
|
melati/wallet/wallet_puzzle_store.py
|
luzofex/melati-blockchain
|
bddc95ed3a8c5631488cd44a9e76b764c19b4568
|
[
"Apache-2.0"
] | 3
|
2021-07-13T05:35:30.000Z
|
2021-08-06T13:11:14.000Z
|
import asyncio
import logging
from typing import List, Optional, Set, Tuple
import aiosqlite
from blspy import G1Element
from melati.types.blockchain_format.sized_bytes import bytes32
from melati.util.db_wrapper import DBWrapper
from melati.util.ints import uint32
from melati.wallet.derivation_record import DerivationRecord
from melati.wallet.util.wallet_types import WalletType
log = logging.getLogger(__name__)
class WalletPuzzleStore:
"""
WalletPuzzleStore keeps track of all generated puzzle_hashes and their derivation path / wallet.
This is only used for HD wallets where each address is derived from a public key. Otherwise, use the
WalletInterestedStore to keep track of puzzle hashes which we are interested in.
"""
db_connection: aiosqlite.Connection
lock: asyncio.Lock
cache_size: uint32
all_puzzle_hashes: Set[bytes32]
db_wrapper: DBWrapper
@classmethod
async def create(cls, db_wrapper: DBWrapper, cache_size: uint32 = uint32(600000)):
self = cls()
self.cache_size = cache_size
self.db_wrapper = db_wrapper
self.db_connection = self.db_wrapper.db
await self.db_connection.execute("pragma journal_mode=wal")
await self.db_connection.execute("pragma synchronous=2")
await self.db_connection.execute(
(
"CREATE TABLE IF NOT EXISTS derivation_paths("
"derivation_index int,"
" pubkey text,"
" puzzle_hash text PRIMARY_KEY,"
" wallet_type int,"
" wallet_id int,"
" used tinyint)"
)
)
await self.db_connection.execute(
"CREATE INDEX IF NOT EXISTS derivation_index_index on derivation_paths(derivation_index)"
)
await self.db_connection.execute("CREATE INDEX IF NOT EXISTS ph on derivation_paths(puzzle_hash)")
await self.db_connection.execute("CREATE INDEX IF NOT EXISTS pubkey on derivation_paths(pubkey)")
await self.db_connection.execute("CREATE INDEX IF NOT EXISTS wallet_type on derivation_paths(wallet_type)")
await self.db_connection.execute("CREATE INDEX IF NOT EXISTS wallet_id on derivation_paths(wallet_id)")
await self.db_connection.execute("CREATE INDEX IF NOT EXISTS used on derivation_paths(wallet_type)")
await self.db_connection.commit()
# Lock
self.lock = asyncio.Lock() # external
await self._init_cache()
return self
async def close(self):
await self.db_connection.close()
async def _init_cache(self):
self.all_puzzle_hashes = await self.get_all_puzzle_hashes()
async def _clear_database(self):
cursor = await self.db_connection.execute("DELETE FROM derivation_paths")
await cursor.close()
await self.db_connection.commit()
async def add_derivation_paths(self, records: List[DerivationRecord], in_transaction=False) -> None:
"""
Insert many derivation paths into the database.
"""
if not in_transaction:
await self.db_wrapper.lock.acquire()
try:
sql_records = []
for record in records:
self.all_puzzle_hashes.add(record.puzzle_hash)
sql_records.append(
(
record.index,
bytes(record.pubkey).hex(),
record.puzzle_hash.hex(),
record.wallet_type,
record.wallet_id,
0,
),
)
cursor = await self.db_connection.executemany(
"INSERT OR REPLACE INTO derivation_paths VALUES(?, ?, ?, ?, ?, ?)",
sql_records,
)
await cursor.close()
finally:
if not in_transaction:
await self.db_connection.commit()
self.db_wrapper.lock.release()
async def get_derivation_record(self, index: uint32, wallet_id: uint32) -> Optional[DerivationRecord]:
"""
Returns the derivation record by index and wallet id.
"""
cursor = await self.db_connection.execute(
"SELECT * FROM derivation_paths WHERE derivation_index=? and wallet_id=?;",
(
index,
wallet_id,
),
)
row = await cursor.fetchone()
await cursor.close()
if row is not None and row[0] is not None:
return DerivationRecord(
uint32(row[0]),
bytes32.fromhex(row[2]),
G1Element.from_bytes(bytes.fromhex(row[1])),
WalletType(row[3]),
uint32(row[4]),
)
return None
async def get_derivation_record_for_puzzle_hash(self, puzzle_hash: str) -> Optional[DerivationRecord]:
"""
Returns the derivation record by index and wallet id.
"""
cursor = await self.db_connection.execute(
"SELECT * FROM derivation_paths WHERE puzzle_hash=?;",
(puzzle_hash,),
)
row = await cursor.fetchone()
await cursor.close()
if row is not None and row[0] is not None:
return DerivationRecord(
uint32(row[0]),
bytes32.fromhex(row[2]),
G1Element.from_bytes(bytes.fromhex(row[1])),
WalletType(row[3]),
uint32(row[4]),
)
return None
async def set_used_up_to(self, index: uint32, in_transaction=False) -> None:
"""
Sets a derivation path to used so we don't use it again.
"""
if not in_transaction:
await self.db_wrapper.lock.acquire()
try:
cursor = await self.db_connection.execute(
"UPDATE derivation_paths SET used=1 WHERE derivation_index<=?",
(index,),
)
await cursor.close()
finally:
if not in_transaction:
await self.db_connection.commit()
self.db_wrapper.lock.release()
async def puzzle_hash_exists(self, puzzle_hash: bytes32) -> bool:
"""
Checks if passed puzzle_hash is present in the db.
"""
cursor = await self.db_connection.execute(
"SELECT * from derivation_paths WHERE puzzle_hash=?", (puzzle_hash.hex(),)
)
row = await cursor.fetchone()
await cursor.close()
return row is not None
async def one_of_puzzle_hashes_exists(self, puzzle_hashes: List[bytes32]) -> bool:
"""
Checks if one of the passed puzzle_hashes is present in the db.
"""
if len(puzzle_hashes) < 1:
return False
for ph in puzzle_hashes:
if ph in self.all_puzzle_hashes:
return True
return False
async def index_for_pubkey(self, pubkey: G1Element) -> Optional[uint32]:
"""
Returns derivation paths for the given pubkey.
Returns None if not present.
"""
cursor = await self.db_connection.execute(
"SELECT * from derivation_paths WHERE pubkey=?", (bytes(pubkey).hex(),)
)
row = await cursor.fetchone()
await cursor.close()
if row is not None:
return uint32(row[0])
return None
async def index_for_puzzle_hash(self, puzzle_hash: bytes32) -> Optional[uint32]:
"""
Returns the derivation path for the puzzle_hash.
Returns None if not present.
"""
cursor = await self.db_connection.execute(
"SELECT * from derivation_paths WHERE puzzle_hash=?", (puzzle_hash.hex(),)
)
row = await cursor.fetchone()
await cursor.close()
if row is not None:
return uint32(row[0])
return None
async def index_for_puzzle_hash_and_wallet(self, puzzle_hash: bytes32, wallet_id: uint32) -> Optional[uint32]:
"""
Returns the derivation path for the puzzle_hash.
Returns None if not present.
"""
cursor = await self.db_connection.execute(
"SELECT * from derivation_paths WHERE puzzle_hash=? and wallet_id=?;",
(
puzzle_hash.hex(),
wallet_id,
),
)
row = await cursor.fetchone()
await cursor.close()
if row is not None:
return uint32(row[0])
return None
async def wallet_info_for_puzzle_hash(self, puzzle_hash: bytes32) -> Optional[Tuple[uint32, WalletType]]:
"""
Returns the derivation path for the puzzle_hash.
Returns None if not present.
"""
cursor = await self.db_connection.execute(
"SELECT * from derivation_paths WHERE puzzle_hash=?", (puzzle_hash.hex(),)
)
row = await cursor.fetchone()
await cursor.close()
if row is not None:
return row[4], WalletType(row[3])
return None
async def get_all_puzzle_hashes(self) -> Set[bytes32]:
"""
Return a set containing all puzzle_hashes we generated.
"""
cursor = await self.db_connection.execute("SELECT * from derivation_paths")
rows = await cursor.fetchall()
await cursor.close()
result: Set[bytes32] = set()
for row in rows:
result.add(bytes32(bytes.fromhex(row[2])))
return result
async def get_last_derivation_path(self) -> Optional[uint32]:
"""
Returns the last derivation path by derivation_index.
"""
cursor = await self.db_connection.execute("SELECT MAX(derivation_index) FROM derivation_paths;")
row = await cursor.fetchone()
await cursor.close()
if row is not None and row[0] is not None:
return uint32(row[0])
return None
async def get_last_derivation_path_for_wallet(self, wallet_id: int) -> Optional[uint32]:
"""
Returns the last derivation path by derivation_index.
"""
cursor = await self.db_connection.execute(
f"SELECT MAX(derivation_index) FROM derivation_paths WHERE wallet_id={wallet_id};"
)
row = await cursor.fetchone()
await cursor.close()
if row is not None and row[0] is not None:
return uint32(row[0])
return None
async def get_current_derivation_record_for_wallet(self, wallet_id: uint32) -> Optional[DerivationRecord]:
"""
Returns the current derivation record by derivation_index.
"""
cursor = await self.db_connection.execute(
f"SELECT MAX(derivation_index) FROM derivation_paths WHERE wallet_id={wallet_id} and used=1;"
)
row = await cursor.fetchone()
await cursor.close()
if row is not None and row[0] is not None:
index = uint32(row[0])
return await self.get_derivation_record(index, wallet_id)
return None
async def get_unused_derivation_path(self) -> Optional[uint32]:
"""
Returns the first unused derivation path by derivation_index.
"""
cursor = await self.db_connection.execute("SELECT MIN(derivation_index) FROM derivation_paths WHERE used=0;")
row = await cursor.fetchone()
await cursor.close()
if row is not None and row[0] is not None:
return uint32(row[0])
return None
| 33.074286
| 117
| 0.600726
|
377cccfdc389d44c13dd2e679a358b2c6004b336
| 1,781
|
py
|
Python
|
samples/generated_samples/aiplatform_generated_aiplatform_v1_dataset_service_import_data_sync.py
|
sakagarwal/python-aiplatform
|
62b4a1ea589235910c6e87f027899a29bf1bacb1
|
[
"Apache-2.0"
] | 1
|
2022-03-30T05:23:29.000Z
|
2022-03-30T05:23:29.000Z
|
samples/generated_samples/aiplatform_generated_aiplatform_v1_dataset_service_import_data_sync.py
|
sakagarwal/python-aiplatform
|
62b4a1ea589235910c6e87f027899a29bf1bacb1
|
[
"Apache-2.0"
] | null | null | null |
samples/generated_samples/aiplatform_generated_aiplatform_v1_dataset_service_import_data_sync.py
|
sakagarwal/python-aiplatform
|
62b4a1ea589235910c6e87f027899a29bf1bacb1
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ImportData
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-aiplatform
# [START aiplatform_generated_aiplatform_v1_DatasetService_ImportData_sync]
from google.cloud import aiplatform_v1
def sample_import_data():
# Create a client
client = aiplatform_v1.DatasetServiceClient()
# Initialize request argument(s)
import_configs = aiplatform_v1.ImportDataConfig()
import_configs.gcs_source.uris = ['uris_value_1', 'uris_value_2']
import_configs.import_schema_uri = "import_schema_uri_value"
request = aiplatform_v1.ImportDataRequest(
name="name_value",
import_configs=import_configs,
)
# Make the request
operation = client.import_data(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
# [END aiplatform_generated_aiplatform_v1_DatasetService_ImportData_sync]
| 32.381818
| 85
| 0.758563
|
70008805d3ac2bacc70e355e34e30394cbeaf742
| 12,417
|
py
|
Python
|
eve/utils.py
|
rs/eve
|
32fd02f1cea6d1db9192fac8dcf1a38681c0307b
|
[
"BSD-3-Clause"
] | 1
|
2017-10-31T17:36:58.000Z
|
2017-10-31T17:36:58.000Z
|
eve/utils.py
|
rs/eve
|
32fd02f1cea6d1db9192fac8dcf1a38681c0307b
|
[
"BSD-3-Clause"
] | null | null | null |
eve/utils.py
|
rs/eve
|
32fd02f1cea6d1db9192fac8dcf1a38681c0307b
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
eve.utils
~~~~~~~~~
Utility functions and classes.
:copyright: (c) 2014 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
import eve
import hashlib
import werkzeug.exceptions
from flask import request
from flask import current_app as app
from datetime import datetime, timedelta
from bson.json_util import dumps
from eve import RFC1123_DATE_FORMAT
class Config(object):
""" Helper class used trorough the code to access configuration settings.
If the main flaskapp object is not instantiated yet, returns the default
setting in the eve __init__.py module, otherwise returns the flaskapp
config value (which value might override the static defaults).
"""
def __getattr__(self, name):
try:
# will return 'working outside of application context' if the
# current_app is not available yet
return app.config.get(name)
except:
# fallback to the module-level default value
return getattr(eve, name)
# makes an instance of the Config helper class available to all the modules
# importing eve.utils.
config = Config()
class ParsedRequest(object):
""" This class, by means of its attributes, describes a client request.
.. versuinchanged;; 9,5
'args' keyword.
.. versonchanged:: 0.1.0
'embedded' keyword.
.. versionchanged:: 0.0.6
Projection queries ('?projection={"name": 1}')
"""
# `where` value of the query string (?where). Defaults to None.
where = None
# `projection` value of the query string (?projection). Defaults to None.
projection = None
# `sort` value of the query string (?sort). Defaults to None.
sort = None
# `page` value of the query string (?page). Defaults to 1.
page = 1
# `max_result` value of the query string (?max_results). Defaults to
# `PAGINATION_DEFAULT` unless pagination is disabled.
max_results = 0
# `If-Modified-Since` request header value. Defaults to None.
if_modified_since = None
# `If-None_match` request header value. Defaults to None.
if_none_match = None
# `If-Match` request header value. Default to None.
if_match = None
# `embedded` value of the query string (?embedded). Defaults to None.
embedded = None
# `args` value of the original request. Defaults to None.
args = None
def parse_request(resource):
""" Parses a client request, returning instance of :class:`ParsedRequest`
containing relevant request data.
:param resource: the resource currently being accessed by the client.
.. versionchanged:: 0.5
Support for custom query parameters via configuration settings.
Minor DRY updates.
.. versionchagend:: 0.1.0
Support for embedded documents.
.. versionchanged:: 0.0.6
projection queries ('?projection={"name": 1}')
.. versionchanged: 0.0.5
Support for optional filters, sorting and pagination.
"""
args = request.args
headers = request.headers
r = ParsedRequest()
r.args = args
settings = config.DOMAIN[resource]
if settings['allowed_filters']:
r.where = args.get(config.QUERY_WHERE)
if settings['projection']:
r.projection = args.get(config.QUERY_PROJECTION)
if settings['sorting']:
r.sort = args.get(config.QUERY_SORT)
if settings['embedding']:
r.embedded = args.get(config.QUERY_EMBEDDED)
max_results_default = config.PAGINATION_DEFAULT if \
settings['pagination'] else 0
try:
r.max_results = int(float(args[config.QUERY_MAX_RESULTS]))
assert r.max_results > 0
except (ValueError, werkzeug.exceptions.BadRequestKeyError,
AssertionError):
r.max_results = max_results_default
if settings['pagination']:
# TODO should probably return a 400 if 'page' is < 1 or non-numeric
if config.QUERY_PAGE in args:
try:
r.page = abs(int(args.get(config.QUERY_PAGE))) or 1
except ValueError:
pass
# TODO should probably return a 400 if 'max_results' < 1 or
# non-numeric
if r.max_results > config.PAGINATION_LIMIT:
r.max_results = config.PAGINATION_LIMIT
if headers:
r.if_modified_since = weak_date(headers.get('If-Modified-Since'))
# TODO if_none_match and if_match should probably be validated as
# valid etags, returning 400 on fail. Not sure however since
# we're just going to use these for string-type comparision
r.if_none_match = headers.get('If-None-Match')
r.if_match = headers.get('If-Match')
return r
def weak_date(date):
""" Returns a RFC-1123 string corresponding to a datetime value plus
a 1 second timedelta. This is needed because when saved, documents
LAST_UPDATED values have higher resolution than If-Modified-Since's, which
is limited to seconds.
:param date: the date to be adjusted.
"""
return datetime.strptime(date, RFC1123_DATE_FORMAT) + \
timedelta(seconds=1) if date else None
def str_to_date(string):
""" Converts a date string formatted as defined in the configuration
to the corresponding datetime value.
:param string: the RFC-1123 string to convert to datetime value.
"""
return datetime.strptime(string, config.DATE_FORMAT) if string else None
def date_to_str(date):
""" Converts a datetime value to the format defined in the configuration file.
:param date: the datetime value to convert.
"""
return datetime.strftime(date, config.DATE_FORMAT) if date else None
def date_to_rfc1123(date):
""" Converts a datetime value to the corresponding RFC-1123 string.
:param date: the datetime value to convert.
"""
return datetime.strftime(date, RFC1123_DATE_FORMAT) if date else None
def home_link():
""" Returns a link to the API entry point/home page.
.. versionchanged:: 0.5
Link is relative to API root.
.. versionchanged:: 0.0.3
Now returning a JSON link.
"""
return {'title': 'home', 'href': '/'}
def api_prefix(url_prefix=None, api_version=None):
""" Returns the prefix to API endpoints, according to the URL_PREFIX and
API_VERSION configuration settings.
:param url_prefix: the prefix string. If `None`, defaults to the current
:class:`~eve.flaskapp` configuration setting.
The class itself will call this function while
initializing. In that case, it will pass its settings
as arguments (as they are not externally available yet)
:param api_version: the api version string. If `None`, defaults to the
current :class:`~eve.flaskapp` configuration setting.
The class itself will call this function while
initializing. In that case, it will pass its settings
as arguments (as they are not externally available yet)
.. versionadded:: 0.0.3
"""
if url_prefix is None:
url_prefix = config.URL_PREFIX
if api_version is None:
api_version = config.API_VERSION
prefix = '/%s' % url_prefix if url_prefix else ''
version = '/%s' % api_version if api_version else ''
return prefix + version
def querydef(max_results=config.PAGINATION_DEFAULT, where=None, sort=None,
version=None, page=None):
""" Returns a valid query string.
:param max_results: `max_result` part of the query string. Defaults to
`PAGINATION_DEFAULT`
:param where: `where` part of the query string. Defaults to None.
:param sort: `sort` part of the query string. Defaults to None.
:param page: `version` part of the query string. Defaults to None.
:param page: `page` part of the query string. Defaults to None.
.. versionchanged:: 0.5
Support for customizable query parameters.
Add version to query string (#475).
"""
where_part = '&%s=%s' % (config.QUERY_WHERE, where) if where else ''
sort_part = '&%s=%s' % (config.QUERY_SORT, sort) if sort else ''
page_part = '&%s=%s' % (config.QUERY_PAGE, page) if page and page > 1 \
else ''
version_part = '&%s=%s' % (config.VERSION_PARAM, version) if version \
else ''
max_results_part = '%s=%s' % (config.QUERY_MAX_RESULTS, max_results) \
if max_results != config.PAGINATION_DEFAULT else ''
# remove sort set by Eve if version is set
if version and sort is not None:
sort_part = '&%s=%s' % (config.QUERY_SORT, sort) \
if sort != '[("%s", 1)]' % config.VERSION else ''
return ('?' + ''.join([max_results_part, where_part, sort_part,
version_part, page_part]).lstrip('&')).rstrip('?')
def document_etag(value):
""" Computes and returns a valid ETag for the input value.
:param value: the value to compute the ETag with.
.. versionchanged:: 0.0.4
Using bson.json_util.dumps over str(value) to make etag computation
consistent between different runs and/or server instances (#16).
"""
h = hashlib.sha1()
h.update(dumps(value, sort_keys=True).encode('utf-8'))
return h.hexdigest()
def extract_key_values(key, d):
""" Extracts all values that match a key, even in nested dicts.
:param key: the lookup key.
:param d: the dict to scan.
.. versionadded: 0.0.7
"""
if key in d:
yield d[key]
for k in d:
if isinstance(d[k], dict):
for j in extract_key_values(key, d[k]):
yield j
def request_method():
""" Returns the proper request method, also taking into account the
possibile override requested by the client (via 'X-HTTP-Method-Override'
header).
.. versionchanged: 0.1.0
Supports overriding of any HTTP Method (#95).
.. versionadded: 0.0.7
"""
return request.headers.get('X-HTTP-Method-Override', request.method)
def debug_error_message(msg):
""" Returns the error message `msg` if config.DEBUG is True
otherwise returns `None` which will cause Werkzeug to provide
a generic error message
:param msg: The error message to return if config.DEBUG is True
.. versionadded: 0.0.9
"""
if getattr(config, 'DEBUG', False):
return msg
return None
def validate_filters(where, resource):
""" Report any filter which is not allowed by `allowed_filters`
:param where: the where clause, as a dict.
:param resource: the resource being inspected.
.. versionchanged: 0.5
If the data layer supports a list of allowed operators, take them
into consideration when validating the query string (#388).
Recursively validate the whole query string.
.. versionadded: 0.0.9
"""
operators = getattr(app.data, 'operators', set())
allowed = config.DOMAIN[resource]['allowed_filters'] + list(operators)
def validate_filter(filters):
r = None
for d in filters:
for key, value in d.items():
if key not in allowed:
return "filter on '%s' not allowed" % key
if isinstance(value, dict):
r = validate_filter([value])
elif isinstance(value, list):
r = validate_filter(value)
if r:
break
if r:
break
return r
return validate_filter([where]) if '*' not in allowed else None
def auto_fields(resource):
""" Returns a list of automatically handled fields for a resource.
:param resource: the resource currently being accessed by the client.
.. versionchanged: 0.5
ETAG is now a preserved meta data (#369).
.. versionadded:: 0.4
"""
# preserved meta data
fields = [config.ID_FIELD, config.LAST_UPDATED, config.DATE_CREATED,
config.ETAG]
# on-the-fly meta data (not in data store)
fields += [config.ISSUES, config.STATUS, config.LINKS]
if config.DOMAIN[resource]['versioning'] is True:
fields.append(config.VERSION)
fields.append(config.LATEST_VERSION) # on-the-fly meta data
fields.append(config.ID_FIELD + config.VERSION_ID_SUFFIX)
return fields
| 32.420366
| 82
| 0.646372
|
0869285fe228a715d39728b6e6ddb54ca171517d
| 9,177
|
py
|
Python
|
src/io_utils/read/geo_ts_readers/mixins.py
|
wpreimes/io_utils
|
9ef4161a5bc65ab2fabee0e2c7cf873f19cf7a17
|
[
"MIT"
] | null | null | null |
src/io_utils/read/geo_ts_readers/mixins.py
|
wpreimes/io_utils
|
9ef4161a5bc65ab2fabee0e2c7cf873f19cf7a17
|
[
"MIT"
] | 4
|
2021-07-09T09:03:12.000Z
|
2021-12-20T17:24:32.000Z
|
src/io_utils/read/geo_ts_readers/mixins.py
|
wpreimes/io_utils
|
9ef4161a5bc65ab2fabee0e2c7cf873f19cf7a17
|
[
"MIT"
] | null | null | null |
import warnings
import os
import numpy as np
import pandas as pd
from datetime import datetime, timedelta
import netCDF4 as nc
from pygeogrids.grids import CellGrid
import xarray as xr
class ContiguousRaggedTsCellReaderMixin:
"""
Adds functionality to read whole cells. Can be added to time series
readers that use the cell file structure of pynetcf.
"""
path: str
grid: CellGrid
def read_cell_file(self, cell, param='sm'):
"""
Reads a single variable for all points of a cell.
Parameters
----------
cell: int
Cell number, will look for a file <cell>.nc that must exist.
The file must contain a variable `location_id` and `time`.
Time must have an attribute of form '<unit> since <refdate>'
param: str, optional (default: 'sm')
Variable to extract from files
Returns
-------
df: pd.DataFrame
A data frame holding all data for the cell.
"""
try:
fnformat = getattr(self, 'fn_format') + '.nc'
except AttributeError:
fnformat = "{:04d}.nc"
file_path = os.path.join(self.path, fnformat.format(cell))
with nc.Dataset(file_path) as ncfile:
loc_id = ncfile.variables['location_id'][:]
loc_id = loc_id[~loc_id.mask].data.flatten()
row_size = ncfile.variables['row_size'][:]
row_size = row_size[~row_size.mask].data
time = ncfile.variables['time'][:].data
unit_time = ncfile.variables['time'].units
variable = ncfile.variables[param][:].filled(np.nan)
cutoff_points = np.cumsum(row_size)
index = np.sort(np.unique(time))
times = np.split(time, cutoff_points)[:-1]
datas = np.split(variable, cutoff_points)[:-1]
assert len(times) == len(datas)
filled = np.full((len(datas), len(index)), fill_value=np.nan)
idx = np.array([np.isin(index, t) for t in times])
filled[idx] = variable
delta = lambda t: timedelta(t)
vfunc = np.vectorize(delta)
since = pd.Timestamp(unit_time.split('since ')[1])
index = since + vfunc(index)
filled = np.transpose(np.array(filled))
data = pd.DataFrame(index=index, data=filled, columns=loc_id)
if hasattr(self, 'clip_dates') and self.clip_dates:
if hasattr(self, '_clip_dates'):
data = self._clip_dates(data)
else:
warnings.warn("No method `_clip_dates` found.")
return data
class OrthoMultiTsCellReaderMixin:
"""
Adds functionality to read whole cells. Can be added to time series
readers that use the cell file structure of pynetcf.
"""
path: str
grid: CellGrid
def read_cell_file(self, cell, param='sm'):
"""
Reads a single variable for all points of a cell.
Parameters
----------
cell: int
Cell number, will look for a file <cell>.nc that must exist.
The file must contain a variable `location_id` and `time`.
Time must have an attribute of form '<unit> since <refdate>'
param: str, optional (default: 'sm')
Variable to extract from files
Returns
-------
df: pd.DataFrame
A data frame holding all data for the cell.
"""
try:
fnformat = getattr(self, 'fn_format') + '.nc'
except AttributeError:
fnformat = "{:04d}.nc"
file_path = os.path.join(self.path, fnformat.format(cell))
with nc.Dataset(file_path) as ncfile:
loc_id = ncfile.variables['location_id'][:]
time = ncfile.variables['time'][:]
unit_time = ncfile.variables['time'].units
delta = lambda t: timedelta(t)
vfunc = np.vectorize(delta)
since = pd.Timestamp(unit_time.split('since ')[1])
time = since + vfunc(time)
variable = ncfile.variables[param][:]
variable = np.transpose(variable)
data = pd.DataFrame(variable, columns=loc_id, index=time)
if hasattr(self, 'clip_dates') and self.clip_dates:
if hasattr(self, '_clip_dates'):
data = self._clip_dates(data)
else:
warnings.warn("No method `_clip_dates` found.")
return data
def read_cells(self, cells, param=None):
"""
Read all data for one or multiple cells as a data frame.
Can read multiple parameters at once, and will return a dataframe
with a MultiIndex as columns.
This will iterate over each point in each cell. So it is much slower
than the `read_cell_file` function!
Parameters:
-----------
cells: int or np.ndarray
a list of cells to read.
params: list or str, optional (default: None)
Parameter(s) to read from the file.
If None are passed, all are selected.
"""
cell_data = []
gpis, lons, lats = self.grid.grid_points_for_cell(list(cells))
for gpi, lon, lat in zip(gpis, lons, lats):
df = self.read(lon, lat)
if param is not None:
df = df[np.atleast_1d(param)]
df.columns = pd.MultiIndex.from_tuples((gpi, c) for c in df.columns)
if not df.empty:
cell_data.append(df)
if len(cell_data) == 0:
return pd.DataFrame()
else:
axis = 0
# if hasattr(self, 'exact_index') and self.exact_index:
# axis = 1
return pd.concat(cell_data, axis=axis)
def read_agg_cell_data(self, cell, param, format='pd_multidx_df',
drop_coord_vars=True, to_replace=None,
**kwargs) -> dict or pd.DataFrame:
"""
Read all time series for a single variable in the selected cell.
Parameters
----------
cell: int
Cell number as in the c3s grid
param: list or str
Name of the variable(s) to read.
format : str, optional (default: 'multiindex')
* pd_multidx_df (default):
Returns one data frame with gpi as first, and time as
second index level.
* gpidict : Returns a dictionary of dataframes, with gpis as keys
and time series data as values.
* var_np_arrays : Returns 2d arrays for each variable and a variable
'index' with time stamps.
to_replace : dict of dicts, optional (default: None)
Dict for parameters of values to replace.
e.g. {'sm': {-999999.0: -9999}}
see pandas.to_replace()
Additional kwargs are given to xarray to open the dataset.W
Returns
-------
data : dict or pd.DataFrame
A DataFrame if a single variable was passed, otherwise
a dict of DataFrames with parameter name as key.
"""
if hasattr(self, 'exact_index') and self.exact_index:
warnings.warn("Reading cell with exact index not yet supported. "
"Use read_cells()")
try:
fnformat = getattr(self, 'fn_format') + '.nc'
except AttributeError:
fnformat = "{:04d}.nc"
file_path = os.path.join(self.path, fnformat.format(cell))
params = np.atleast_1d(param)
if 'location_id' not in params:
params = np.append(params, 'location_id')
with xr.open_dataset(file_path, **kwargs) as ds:
gpis = ds['location_id'].values
mask = (gpis >= 0)
gpis = gpis[mask]
df = ds[params].to_dataframe(dim_order=['locations', 'time'])
df = df.loc[ds['locations'].values[mask], :]
df.rename(columns={'location_id': 'gpi'}, inplace=True)
if drop_coord_vars:
df.drop(columns=['alt', 'lon', 'lat'], inplace=True)
if hasattr(self, 'clip_dates') and self.clip_dates:
if hasattr(self, '_clip_dates'):
df = self._clip_dates(df)
else:
warnings.warn("No method `_clip_dates` found.")
if to_replace is not None:
df = df.replace(to_replace=to_replace)
if format.lower() == 'pd_multidx_df':
index = df.index.set_levels(gpis, level=0) \
.set_names('gpi', level=0)
data = df.set_index(index)
elif format.lower() == 'gpidict':
df = df.set_index(df.index.droplevel(0))
data = dict(tuple(df.groupby(df.pop('gpi'))))
elif format.lower() == 'var_np_arrays':
df = df.set_index(df.index.droplevel(0))
index = df.index.unique()
data = {'index': index}
for col in df.columns:
if col == 'gpi': continue
data[col] = df.groupby('gpi')[col].apply(np.array)
else:
raise NotImplementedError
return data
| 34.115242
| 80
| 0.562275
|
4c8233ac9d0f24c1922c19bdb6400df65c615035
| 62,527
|
py
|
Python
|
code/vox_fluoro/history/vox_fluoro_norm_nadam_elu_act_final/vox_fluoro_norm_nadam_elu_act_final.py
|
john-drago/fluoro
|
b757af60940c4395101a39a15f3ac4213f40fdce
|
[
"MIT"
] | 4
|
2021-03-01T15:34:37.000Z
|
2021-06-03T08:56:39.000Z
|
code/vox_fluoro/history/vox_fluoro_norm_nadam_elu_act_final/vox_fluoro_norm_nadam_elu_act_final.py
|
john-drago/fluoro
|
b757af60940c4395101a39a15f3ac4213f40fdce
|
[
"MIT"
] | null | null | null |
code/vox_fluoro/history/vox_fluoro_norm_nadam_elu_act_final/vox_fluoro_norm_nadam_elu_act_final.py
|
john-drago/fluoro
|
b757af60940c4395101a39a15f3ac4213f40fdce
|
[
"MIT"
] | null | null | null |
import numpy as np
import h5py
import tensorflow as tf
# import keras
import os
import sys
import pickle
# 2019-09-18
# We are continuing the usage of the architecture based on the residual nets
# In this file, we are going to normalize the calibration inputs from -1 to 1.
# We likewise are going to normalize the label dataset based on the training and validation datasets. We are going to normalize for each instance over all of the instances.
# We are going to also do per image normalization between -1 and 1.
# Similar to previous file, but we are going to try the final later with an 'elu' acfitvation function, instead of the linear activation function.
# Also going to use MAE for the loss.
expr_name = sys.argv[0][:-3]
expr_no = '1'
save_dir = os.path.abspath(os.path.join(os.path.expanduser('~/fluoro/code/jupyt/vox_fluoro'), expr_name))
os.makedirs(save_dir, exist_ok=True)
# -----------------------------------------------------------------
def cust_mean_squared_error_var(y_true, y_pred):
base_dir = os.path.expanduser('~/fluoro/data/compilation')
stats_file = h5py.File(os.path.join(base_dir, 'labels_stats.h5py'), 'r')
# mean_dset = stats_file['mean']
# std_dset = stats_file['std']
var_dset = stats_file['var']
# mean_v = mean_dset[:]
# std_v = std_dset[:]
var_v = var_dset[:]
stats_file.close()
return tf.keras.backend.mean(tf.keras.backend.square(y_pred - y_true) / var_v)
# -----------------------------------------------------------------
params = {
# ---
# 3D CONV
# ---
# Entry Layers
'v_intra_act_fn': None,
'v_res_act_fn': 'elu',
'v_conv_0_filters': 30,
'v_conv_0_kernel': 9,
'v_conv_0_strides_0': 2,
'v_conv_0_strides_1': 2,
'v_conv_0_strides_2': 2,
'v_conv_0_pad': 'same',
'v_spatial_drop_rate_0': 0.4,
'v_conv_1_filters': 30,
'v_conv_1_kernel': 7,
'v_conv_1_strides_0': 2,
'v_conv_1_strides_1': 2,
'v_conv_1_strides_2': 3,
'v_conv_1_pad': 'same',
# ---
# Pool After Initial Layers
'v_pool_0_size': 2,
'v_pool_0_pad': 'same',
# ---
# Second Run of Entry Layers
'v_conv_2_filters': 30,
'v_conv_2_kernel': 5,
'v_conv_2_strides_0': 2,
'v_conv_2_strides_1': 2,
'v_conv_2_strides_2': 2,
'v_conv_2_pad': 'same',
# ---
# Run of Residual Layers
# 1
'v_conv_3_filters': 30,
'v_conv_3_kernel': 3,
'v_conv_3_strides_0': 1,
'v_conv_3_strides_1': 1,
'v_conv_3_strides_2': 1,
'v_conv_3_pad': 'same',
'v_spatial_drop_rate_2': 0.4,
'v_conv_4_filters': 30,
'v_conv_4_kernel': 3,
'v_conv_4_strides_0': 1,
'v_conv_4_strides_1': 1,
'v_conv_4_strides_2': 1,
'v_conv_4_pad': 'same',
# 2
'v_conv_5_filters': 30,
'v_conv_5_kernel': 3,
'v_conv_5_strides_0': 1,
'v_conv_5_strides_1': 1,
'v_conv_5_strides_2': 1,
'v_conv_5_pad': 'same',
'v_spatial_drop_rate_3': 0.4,
'v_conv_6_filters': 30,
'v_conv_6_kernel': 3,
'v_conv_6_strides_0': 1,
'v_conv_6_strides_1': 1,
'v_conv_6_strides_2': 1,
'v_conv_6_pad': 'same',
# 3
'v_conv_7_filters': 30,
'v_conv_7_kernel': 3,
'v_conv_7_strides_0': 1,
'v_conv_7_strides_1': 1,
'v_conv_7_strides_2': 1,
'v_conv_7_pad': 'same',
'v_spatial_drop_rate_4': 0.4,
'v_conv_8_filters': 30,
'v_conv_8_kernel': 3,
'v_conv_8_strides_0': 1,
'v_conv_8_strides_1': 1,
'v_conv_8_strides_2': 1,
'v_conv_8_pad': 'same',
# 4
'v_conv_9_filters': 40,
'v_conv_9_kernel': 3,
'v_conv_9_strides_0': 2,
'v_conv_9_strides_1': 2,
'v_conv_9_strides_2': 2,
'v_conv_9_pad': 'same',
'v_spatial_drop_rate_5': 0.,
'v_conv_10_filters': 40,
'v_conv_10_kernel': 3,
'v_conv_10_strides_0': 1,
'v_conv_10_strides_1': 1,
'v_conv_10_strides_2': 1,
'v_conv_10_pad': 'same',
'v_conv_11_filters': 40,
'v_conv_11_kernel': 3,
'v_conv_11_strides_0': 2,
'v_conv_11_strides_1': 2,
'v_conv_11_strides_2': 2,
'v_conv_11_pad': 'same',
# 5
'v_conv_12_filters': 50,
'v_conv_12_kernel': 2,
'v_conv_12_strides_0': 2,
'v_conv_12_strides_1': 2,
'v_conv_12_strides_2': 2,
'v_conv_12_pad': 'same',
'v_spatial_drop_rate_6': 0.4,
'v_conv_13_filters': 50,
'v_conv_13_kernel': 2,
'v_conv_13_strides_0': 1,
'v_conv_13_strides_1': 1,
'v_conv_13_strides_2': 1,
'v_conv_13_pad': 'same',
'v_conv_14_filters': 50,
'v_conv_14_kernel': 1,
'v_conv_14_strides_0': 2,
'v_conv_14_strides_1': 2,
'v_conv_14_strides_2': 2,
'v_conv_14_pad': 'same',
# 6
'v_conv_15_filters': 50,
'v_conv_15_kernel': 2,
'v_conv_15_strides_0': 2,
'v_conv_15_strides_1': 2,
'v_conv_15_strides_2': 2,
'v_conv_15_pad': 'same',
'v_spatial_drop_rate_7': 0.4,
'v_conv_16_filters': 50,
'v_conv_16_kernel': 2,
'v_conv_16_strides_0': 1,
'v_conv_16_strides_1': 1,
'v_conv_16_strides_2': 1,
'v_conv_16_pad': 'same',
'v_conv_17_filters': 50,
'v_conv_17_kernel': 1,
'v_conv_17_strides_0': 2,
'v_conv_17_strides_1': 2,
'v_conv_17_strides_2': 2,
'v_conv_17_pad': 'same',
# ---
# Final Convs
'v_spatial_drop_rate_8': 0.4,
'v_conv_18_filters': 50,
'v_conv_18_kernel': 2,
'v_conv_18_strides_0': 1,
'v_conv_18_strides_1': 1,
'v_conv_18_strides_2': 1,
'v_conv_18_pad': 'valid',
'drop_1_v_rate': 0.3,
'dense_1_v_units': 75,
'drop_2_v_rate': 0.3,
'dense_2_v_units': 50,
# ---
# 2D CONV
# ---
'intra_act_fn': None,
'res_act_fn': 'elu',
# Entry Fluoro Layers
'conv_0_filters': 30,
'conv_0_kernel': 5,
'conv_0_strides': 2,
'conv_0_pad': 'same',
'spatial_drop_rate_0': 0.4,
'conv_1_filters': 30,
'conv_1_kernel': 5,
'conv_1_strides': 2,
'conv_1_pad': 'same',
# ---
# Pool After Initial Layers
'pool_0_size': 2,
'pool_0_pad': 'same',
# ---
# Run Of Residual Layers
# 1
'conv_2_filters': 30,
'conv_2_kernel': 3,
'conv_2_strides': 1,
'conv_2_pad': 'same',
'spatial_drop_rate_1': 0.4,
'conv_3_filters': 30,
'conv_3_kernel': 3,
'conv_3_strides': 1,
'conv_3_pad': 'same',
# 2
'conv_4_filters': 30,
'conv_4_kernel': 3,
'conv_4_strides': 1,
'conv_4_pad': 'same',
'spatial_drop_rate_2': 0.4,
'conv_5_filters': 30,
'conv_5_kernel': 3,
'conv_5_strides': 1,
'conv_5_pad': 'same',
# 3
'conv_6_filters': 30,
'conv_6_kernel': 3,
'conv_6_strides': 1,
'conv_6_pad': 'same',
'spatial_drop_rate_3': 0.4,
'conv_7_filters': 30,
'conv_7_kernel': 3,
'conv_7_strides': 1,
'conv_7_pad': 'same',
# 4
'conv_8_filters': 30,
'conv_8_kernel': 3,
'conv_8_strides': 1,
'conv_8_pad': 'same',
'spatial_drop_rate_4': 0.4,
'conv_9_filters': 30,
'conv_9_kernel': 3,
'conv_9_strides': 1,
'conv_9_pad': 'same',
# 5
'conv_10_filters': 30,
'conv_10_kernel': 3,
'conv_10_strides': 1,
'conv_10_pad': 'same',
'spatial_drop_rate_5': 0.4,
'conv_11_filters': 30,
'conv_11_kernel': 3,
'conv_11_strides': 1,
'conv_11_pad': 'same',
# 6
'conv_12_filters': 30,
'conv_12_kernel': 3,
'conv_12_strides': 1,
'conv_12_pad': 'same',
'spatial_drop_rate_6': 0.4,
'conv_13_filters': 30,
'conv_13_kernel': 3,
'conv_13_strides': 1,
'conv_13_pad': 'same',
# ---
# COMB FLUOROS
# ---
# ---
# RES NET AFTER COMB FLUORO
# ---
'c_intra_act_fn': None,
'c_res_act_fn': 'elu',
# 0
'comb_0_filters': 60,
'comb_0_kernel': 3,
'comb_0_strides': 1,
'comb_0_pad': 'same',
'comb_spatial_0': 0.4,
'comb_1_filters': 60,
'comb_1_kernel': 3,
'comb_1_strides': 1,
'comb_1_pad': 'same',
# 1
'comb_2_filters': 60,
'comb_2_kernel': 3,
'comb_2_strides': 1,
'comb_2_pad': 'same',
'comb_spatial_1': 0.4,
'comb_3_filters': 60,
'comb_3_kernel': 3,
'comb_3_strides': 1,
'comb_3_pad': 'same',
# 2
'comb_4_filters': 60,
'comb_4_kernel': 3,
'comb_4_strides': 1,
'comb_4_pad': 'same',
'comb_spatial_2': 0.4,
'comb_5_filters': 60,
'comb_5_kernel': 3,
'comb_5_strides': 1,
'comb_5_pad': 'same',
# 3
'comb_6_filters': 60,
'comb_6_kernel': 3,
'comb_6_strides': 1,
'comb_6_pad': 'same',
'comb_spatial_3': 0.4,
'comb_7_filters': 60,
'comb_7_kernel': 3,
'comb_7_strides': 1,
'comb_7_pad': 'same',
# 4
'comb_8_filters': 60,
'comb_8_kernel': 3,
'comb_8_strides': 1,
'comb_8_pad': 'same',
'comb_spatial_4': 0.4,
'comb_9_filters': 60,
'comb_9_kernel': 3,
'comb_9_strides': 1,
'comb_9_pad': 'same',
# 5
'comb_10_filters': 60,
'comb_10_kernel': 2,
'comb_10_strides': 2,
'comb_10_pad': 'same',
'comb_spatial_5': 0.4,
'comb_11_filters': 60,
'comb_11_kernel': 2,
'comb_11_strides': 1,
'comb_11_pad': 'same',
'comb_12_filters': 60,
'comb_12_kernel': 1,
'comb_12_strides': 2,
'comb_12_pad': 'same',
# 6
'comb_13_filters': 60,
'comb_13_kernel': 2,
'comb_13_strides': 2,
'comb_13_pad': 'same',
'comb_spatial_6': 0.4,
'comb_14_filters': 60,
'comb_14_kernel': 2,
'comb_14_strides': 1,
'comb_14_pad': 'same',
'comb_15_filters': 60,
'comb_15_kernel': 1,
'comb_15_strides': 2,
'comb_15_pad': 'same',
# 7
'comb_16_filters': 60,
'comb_16_kernel': 2,
'comb_16_strides': 2,
'comb_16_pad': 'same',
'comb_spatial_7': 0.4,
'comb_17_filters': 60,
'comb_17_kernel': 2,
'comb_17_strides': 1,
'comb_17_pad': 'same',
'comb_18_filters': 60,
'comb_18_kernel': 1,
'comb_18_strides': 2,
'comb_18_pad': 'same',
# ---
# Final Convs After Fluoro
'comb_19_filters': 60,
'comb_19_kernel': 2,
'comb_19_strides': 1,
'comb_19_pad': 'valid',
# ---
# Dense After Fluoro Convs
'dense_comb_0_units': 50,
'drop_1_comb': 0.3,
'dense_comb_1_units': 50,
# ---
# Activation Function for Fluoro Vox Comb
'flu_vox_act_fn': 'elu',
# ---
# Combine Fluoro and Vox
'vox_flu_units_0': 100,
'vox_flu_drop_1': 0.3,
'vox_flu_units_1': 75,
'vox_flu_drop_2': 0.3,
'vox_flu_units_2': 50,
'vox_flu_drop_3': 0.3,
'vox_flu_units_3': 15,
'vox_flu_units_4': 6,
# ---
# Cali Units
'cali_0_units': 20,
'drop_1_cali': 0.3,
'cali_1_units': 20,
'drop_2_cali': 0.3,
'cali_2_units': 20,
'cali_3_units': 6,
# ---
# Activation Function for Top Level Comb
'top_level_act_fn': 'elu',
'top_level_intra': None,
# ---
# Top Level Dense
'top_drop_0': 0.2,
'top_dense_0': 6,
'top_dense_1': 6,
'top_dense_2': 6,
'top_drop_1': 0.2,
'top_dense_3': 6,
'top_dense_4': 6,
'top_drop_2': 0.2,
'top_dense_5': 6,
'top_dense_6': 6,
'top_drop_3': 0.2,
'top_dense_7': 6,
'top_dense_8': 6,
'top_drop_4': 0.2,
'top_dense_9': 6,
'top_dense_10': 6,
'top_drop_5': 0.2,
'top_dense_11': 6,
'top_dense_12': 6,
# Main Output
'main_output_units': 6,
'main_output_act': 'linear',
# General Housekeeping
'v_conv_regularizer': None,
'conv_regularizer': None,
'dense_regularizer_1': None,
'dense_regularizer_2': None,
# 'v_conv_regularizer': tf.keras.regularizers.l1(1e-7),
# 'conv_regularizer': tf.keras.regularizers.l1(1e-7),
# 'dense_regularizer_1': tf.keras.regularizers.l1(1e-7),
# 'dense_regularizer_2': tf.keras.regularizers.l1(1e-7),
'activation_fn': 'elu',
'kern_init': 'he_uniform',
'model_opt': tf.keras.optimizers.Nadam,
'learning_rate': 0.01,
'model_epochs': 30,
'model_batchsize': 6,
'model_loss': 'mae',
'model_metric': 'mae'
}
# -----------------------------------------------------------------
channel_order = 'channels_last'
img_input_shape = (128, 128, 1)
vox_input_shape = (197, 162, 564, 1)
cali_input_shape = (6,)
# Input Layers
input_vox = tf.keras.Input(shape=vox_input_shape, name='input_vox', dtype='float32')
input_fluoro_1 = tf.keras.Input(shape=img_input_shape, name='input_fluoro_1', dtype='float32')
input_fluoro_2 = tf.keras.Input(shape=img_input_shape, name='input_fluoro_2', dtype='float32')
input_cali = tf.keras.Input(shape=cali_input_shape, name='input_cali', dtype='float32')
# -----------------------------------------------------------------
# VOXEL CONVS
# -----------------------------------------------------------------
# ---
# Entry Layers
v_conv_0 = tf.keras.layers.Conv3D(filters=params['v_conv_0_filters'], kernel_size=params['v_conv_0_kernel'], strides=(params['v_conv_0_strides_0'], params['v_conv_0_strides_1'], params['v_conv_0_strides_2']), padding=params['v_conv_0_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['v_conv_regularizer'])(input_vox)
bn_0 = tf.keras.layers.BatchNormalization()(v_conv_0)
v_spat_0 = tf.keras.layers.SpatialDropout3D(rate=params['v_spatial_drop_rate_0'])(bn_0)
v_conv_1 = tf.keras.layers.Conv3D(filters=params['v_conv_1_filters'], kernel_size=params['v_conv_1_kernel'], strides=(params['v_conv_1_strides_0'], params['v_conv_1_strides_1'], params['v_conv_1_strides_2']), padding=params['v_conv_1_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['v_conv_regularizer'])(v_spat_0)
# ---
# Pool After Initial Layers
v_pool_0 = tf.keras.layers.MaxPooling3D(pool_size=params['v_pool_0_size'], padding=params['v_pool_0_pad'], data_format=channel_order)(v_conv_1)
# ---
# Second Run of Entry Layers
bn_1 = tf.keras.layers.BatchNormalization()(v_pool_0)
v_conv_2 = tf.keras.layers.Conv3D(filters=params['v_conv_2_filters'], kernel_size=params['v_conv_2_kernel'], strides=(params['v_conv_2_strides_0'], params['v_conv_2_strides_1'], params['v_conv_2_strides_2']), padding=params['v_conv_2_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['v_conv_regularizer'])(bn_1)
# ---
# Run of Residual Layers
bn_2 = tf.keras.layers.BatchNormalization()(v_conv_2)
# 1
v_conv_3 = tf.keras.layers.Conv3D(filters=params['v_conv_3_filters'], kernel_size=params['v_conv_3_kernel'], strides=(params['v_conv_3_strides_0'], params['v_conv_3_strides_1'], params['v_conv_3_strides_2']), padding=params['v_conv_3_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['v_conv_regularizer'])(bn_2)
bn_3 = tf.keras.layers.BatchNormalization()(v_conv_3)
v_spat_2 = tf.keras.layers.SpatialDropout3D(rate=params['v_spatial_drop_rate_2'])(bn_3)
v_conv_4 = tf.keras.layers.Conv3D(filters=params['v_conv_4_filters'], kernel_size=params['v_conv_4_kernel'], strides=(params['v_conv_4_strides_0'], params['v_conv_4_strides_1'], params['v_conv_4_strides_2']), padding=params['v_conv_4_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['v_intra_act_fn'])(v_spat_2)
bn_4 = tf.keras.layers.BatchNormalization()(v_conv_4)
v_add_0 = tf.keras.layers.Add()([bn_4, bn_2])
v_act_0 = tf.keras.layers.Activation(activation=params['v_res_act_fn'])(v_add_0)
# 2
v_conv_5 = tf.keras.layers.Conv3D(filters=params['v_conv_5_filters'], kernel_size=params['v_conv_5_kernel'], strides=(params['v_conv_5_strides_0'], params['v_conv_5_strides_1'], params['v_conv_5_strides_2']), padding=params['v_conv_5_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['v_conv_regularizer'])(v_act_0)
bn_5 = tf.keras.layers.BatchNormalization()(v_conv_5)
v_spat_3 = tf.keras.layers.SpatialDropout3D(rate=params['v_spatial_drop_rate_3'])(bn_5)
v_conv_6 = tf.keras.layers.Conv3D(filters=params['v_conv_6_filters'], kernel_size=params['v_conv_6_kernel'], strides=(params['v_conv_6_strides_0'], params['v_conv_6_strides_1'], params['v_conv_6_strides_2']), padding=params['v_conv_6_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['v_intra_act_fn'])(v_spat_3)
bn_6 = tf.keras.layers.BatchNormalization()(v_conv_6)
v_add_1 = tf.keras.layers.Add()([bn_6, v_act_0])
v_act_1 = tf.keras.layers.Activation(activation=params['v_res_act_fn'])(v_add_1)
# 3
v_conv_7 = tf.keras.layers.Conv3D(filters=params['v_conv_7_filters'], kernel_size=params['v_conv_7_kernel'], strides=(params['v_conv_7_strides_0'], params['v_conv_7_strides_1'], params['v_conv_7_strides_2']), padding=params['v_conv_7_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['v_conv_regularizer'])(v_act_1)
bn_7 = tf.keras.layers.BatchNormalization()(v_conv_7)
v_spat_4 = tf.keras.layers.SpatialDropout3D(rate=params['v_spatial_drop_rate_4'])(bn_7)
v_conv_8 = tf.keras.layers.Conv3D(filters=params['v_conv_8_filters'], kernel_size=params['v_conv_8_kernel'], strides=(params['v_conv_8_strides_0'], params['v_conv_8_strides_1'], params['v_conv_8_strides_2']), padding=params['v_conv_8_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['v_intra_act_fn'])(v_spat_4)
bn_8 = tf.keras.layers.BatchNormalization()(v_conv_8)
v_add_2 = tf.keras.layers.Add()([bn_8, v_act_1])
v_act_2 = tf.keras.layers.Activation(activation=params['v_res_act_fn'])(v_add_2)
# 4
v_conv_9 = tf.keras.layers.Conv3D(filters=params['v_conv_9_filters'], kernel_size=params['v_conv_9_kernel'], strides=(params['v_conv_9_strides_0'], params['v_conv_9_strides_1'], params['v_conv_9_strides_2']), padding=params['v_conv_9_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['v_conv_regularizer'])(v_act_2)
bn_9 = tf.keras.layers.BatchNormalization()(v_conv_9)
v_spat_5 = tf.keras.layers.SpatialDropout3D(rate=params['v_spatial_drop_rate_5'])(bn_9)
v_conv_10 = tf.keras.layers.Conv3D(filters=params['v_conv_10_filters'], kernel_size=params['v_conv_10_kernel'], strides=(params['v_conv_10_strides_0'], params['v_conv_10_strides_1'], params['v_conv_10_strides_2']), padding=params['v_conv_10_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['v_intra_act_fn'])(v_spat_5)
bn_10 = tf.keras.layers.BatchNormalization()(v_conv_10)
v_conv_11 = tf.keras.layers.Conv3D(filters=params['v_conv_11_filters'], kernel_size=params['v_conv_11_kernel'], strides=(params['v_conv_11_strides_0'], params['v_conv_11_strides_1'], params['v_conv_11_strides_2']), padding=params['v_conv_11_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['v_intra_act_fn'])(v_act_2)
bn_11 = tf.keras.layers.BatchNormalization()(v_conv_11)
v_add_3 = tf.keras.layers.Add()([bn_10, bn_11])
v_act_3 = tf.keras.layers.Activation(activation=params['v_res_act_fn'])(v_add_3)
# 5
v_conv_12 = tf.keras.layers.Conv3D(filters=params['v_conv_12_filters'], kernel_size=params['v_conv_12_kernel'], strides=(params['v_conv_12_strides_0'], params['v_conv_12_strides_1'], params['v_conv_12_strides_2']), padding=params['v_conv_12_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['v_conv_regularizer'])(v_act_3)
bn_12 = tf.keras.layers.BatchNormalization()(v_conv_12)
v_spat_6 = tf.keras.layers.SpatialDropout3D(rate=params['v_spatial_drop_rate_6'])(bn_12)
v_conv_13 = tf.keras.layers.Conv3D(filters=params['v_conv_13_filters'], kernel_size=params['v_conv_13_kernel'], strides=(params['v_conv_13_strides_0'], params['v_conv_13_strides_1'], params['v_conv_13_strides_2']), padding=params['v_conv_13_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['v_intra_act_fn'])(v_spat_6)
bn_13 = tf.keras.layers.BatchNormalization()(v_conv_13)
v_conv_14 = tf.keras.layers.Conv3D(filters=params['v_conv_14_filters'], kernel_size=params['v_conv_14_kernel'], strides=(params['v_conv_14_strides_0'], params['v_conv_14_strides_1'], params['v_conv_14_strides_2']), padding=params['v_conv_14_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['v_intra_act_fn'])(v_act_3)
bn_14 = tf.keras.layers.BatchNormalization()(v_conv_14)
v_add_4 = tf.keras.layers.Add()([bn_13, bn_14])
v_act_4 = tf.keras.layers.Activation(activation=params['v_res_act_fn'])(v_add_4)
# 6
v_conv_15 = tf.keras.layers.Conv3D(filters=params['v_conv_12_filters'], kernel_size=params['v_conv_12_kernel'], strides=(params['v_conv_12_strides_0'], params['v_conv_12_strides_1'], params['v_conv_12_strides_2']), padding=params['v_conv_12_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['v_conv_regularizer'])(v_act_4)
bn_15 = tf.keras.layers.BatchNormalization()(v_conv_15)
v_spat_7 = tf.keras.layers.SpatialDropout3D(rate=params['v_spatial_drop_rate_6'])(bn_15)
v_conv_16 = tf.keras.layers.Conv3D(filters=params['v_conv_13_filters'], kernel_size=params['v_conv_13_kernel'], strides=(params['v_conv_13_strides_0'], params['v_conv_13_strides_1'], params['v_conv_13_strides_2']), padding=params['v_conv_13_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['v_intra_act_fn'])(v_spat_7)
bn_16 = tf.keras.layers.BatchNormalization()(v_conv_16)
v_conv_17 = tf.keras.layers.Conv3D(filters=params['v_conv_14_filters'], kernel_size=params['v_conv_14_kernel'], strides=(params['v_conv_14_strides_0'], params['v_conv_14_strides_1'], params['v_conv_14_strides_2']), padding=params['v_conv_14_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['v_intra_act_fn'])(v_act_4)
bn_17 = tf.keras.layers.BatchNormalization()(v_conv_17)
v_add_5 = tf.keras.layers.Add()([bn_16, bn_17])
v_act_5 = tf.keras.layers.Activation(activation=params['v_res_act_fn'])(v_add_5)
# ---
# Final Conv Layers
bn_18 = tf.keras.layers.BatchNormalization()(v_act_5)
v_spat_8 = tf.keras.layers.SpatialDropout3D(rate=params['v_spatial_drop_rate_8'])(bn_18)
v_conv_18 = tf.keras.layers.Conv3D(filters=params['v_conv_18_filters'], kernel_size=params['v_conv_18_kernel'], strides=(params['v_conv_18_strides_0'], params['v_conv_18_strides_1'], params['v_conv_18_strides_2']), padding=params['v_conv_18_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['v_conv_regularizer'])(v_spat_8)
# ---
# Dense Layers
v_flatten_0 = tf.keras.layers.Flatten()(v_conv_18)
bn_19 = tf.keras.layers.BatchNormalization()(v_flatten_0)
dense_1_v_drop = tf.keras.layers.Dropout(params['drop_1_v_rate'])(bn_19)
dense_1_v = tf.keras.layers.Dense(units=params['dense_1_v_units'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_1'])(dense_1_v_drop)
bn_20 = tf.keras.layers.BatchNormalization()(dense_1_v)
dense_2_v_drop = tf.keras.layers.Dropout(params['drop_2_v_rate'])(bn_20)
dense_2_v = tf.keras.layers.Dense(units=params['dense_2_v_units'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_1'])(dense_2_v_drop)
bn_21_v = tf.keras.layers.BatchNormalization()(dense_2_v)
# -----------------------------------------------------------------
# FLUORO ANALYSIS 1
# -----------------------------------------------------------------
# ---
# Entry Fluoro Layers
# per_image_stand_1 = tf.keras.layers.Lambda(lambda frame: tf.image.per_image_standardization(frame))(input_fluoro_1)
# bn_0 = tf.keras.layers.BatchNormalization()(per_image_stand_1)
conv_0_1 = tf.keras.layers.Conv2D(filters=params['conv_0_filters'], kernel_size=params['conv_0_kernel'], strides=params['conv_0_strides'], padding=params['conv_0_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(input_fluoro_1)
bn_1 = tf.keras.layers.BatchNormalization()(conv_0_1)
spat_0_1 = tf.keras.layers.SpatialDropout2D(rate=params['spatial_drop_rate_0'])(bn_1)
conv_1_1 = tf.keras.layers.Conv2D(filters=params['conv_1_filters'], kernel_size=params['conv_1_kernel'], strides=params['conv_1_strides'], padding=params['conv_1_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_0_1)
# ---
# Pool After Initial Layers
pool_0_1 = tf.keras.layers.AveragePooling2D(pool_size=params['pool_0_size'], padding=params['pool_0_pad'])(conv_1_1)
# ---
# Run of Residual Layers
bn_2 = tf.keras.layers.BatchNormalization()(pool_0_1)
# 1
conv_2_1 = tf.keras.layers.Conv2D(filters=params['conv_2_filters'], kernel_size=params['conv_2_kernel'], strides=params['conv_2_strides'], padding=params['conv_2_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(bn_2)
bn_3 = tf.keras.layers.BatchNormalization()(conv_2_1)
spat_1_1 = tf.keras.layers.SpatialDropout2D(rate=params['spatial_drop_rate_1'])(bn_3)
conv_3_1 = tf.keras.layers.Conv2D(filters=params['conv_3_filters'], kernel_size=params['conv_3_kernel'], strides=params['conv_3_strides'], padding=params['conv_3_pad'], data_format=channel_order, activation=params['intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_1_1)
bn_4 = tf.keras.layers.BatchNormalization()(conv_3_1)
add_0 = tf.keras.layers.Add()([bn_4, bn_2])
act_0 = tf.keras.layers.Activation(activation=params['res_act_fn'])(add_0)
# 2
conv_4_1 = tf.keras.layers.Conv2D(filters=params['conv_4_filters'], kernel_size=params['conv_4_kernel'], strides=params['conv_4_strides'], padding=params['conv_4_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_0)
bn_5 = tf.keras.layers.BatchNormalization()(conv_4_1)
spat_2_1 = tf.keras.layers.SpatialDropout2D(rate=params['spatial_drop_rate_1'])(bn_5)
conv_5_1 = tf.keras.layers.Conv2D(filters=params['conv_3_filters'], kernel_size=params['conv_3_kernel'], strides=params['conv_3_strides'], padding=params['conv_3_pad'], data_format=channel_order, activation=params['intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_2_1)
bn_6 = tf.keras.layers.BatchNormalization()(conv_5_1)
add_1 = tf.keras.layers.Add()([act_0, bn_6])
act_1 = tf.keras.layers.Activation(activation=params['res_act_fn'])(add_1)
# 3
conv_6_1 = tf.keras.layers.Conv2D(filters=params['conv_6_filters'], kernel_size=params['conv_6_kernel'], strides=params['conv_6_strides'], padding=params['conv_6_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_1)
bn_7 = tf.keras.layers.BatchNormalization()(conv_6_1)
spat_3_1 = tf.keras.layers.SpatialDropout2D(rate=params['spatial_drop_rate_3'])(bn_7)
conv_7_1 = tf.keras.layers.Conv2D(filters=params['conv_7_filters'], kernel_size=params['conv_7_kernel'], strides=params['conv_7_strides'], padding=params['conv_7_pad'], data_format=channel_order, activation=params['intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_3_1)
bn_8 = tf.keras.layers.BatchNormalization()(conv_7_1)
add_2 = tf.keras.layers.Add()([act_1, bn_8])
act_2 = tf.keras.layers.Activation(activation=params['res_act_fn'])(add_2)
# 4
conv_8_1 = tf.keras.layers.Conv2D(filters=params['conv_8_filters'], kernel_size=params['conv_8_kernel'], strides=params['conv_8_strides'], padding=params['conv_8_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_2)
bn_9 = tf.keras.layers.BatchNormalization()(conv_8_1)
spat_4_1 = tf.keras.layers.SpatialDropout2D(rate=params['spatial_drop_rate_4'])(bn_9)
conv_9_1 = tf.keras.layers.Conv2D(filters=params['conv_9_filters'], kernel_size=params['conv_9_kernel'], strides=params['conv_9_strides'], padding=params['conv_9_pad'], data_format=channel_order, activation=params['intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_4_1)
bn_10 = tf.keras.layers.BatchNormalization()(conv_9_1)
add_3 = tf.keras.layers.Add()([act_2, bn_10])
act_3 = tf.keras.layers.Activation(activation=params['res_act_fn'])(add_3)
# 5
conv_10_1 = tf.keras.layers.Conv2D(filters=params['conv_10_filters'], kernel_size=params['conv_10_kernel'], strides=params['conv_10_strides'], padding=params['conv_10_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_3)
bn_11 = tf.keras.layers.BatchNormalization()(conv_10_1)
spat_5_1 = tf.keras.layers.SpatialDropout2D(rate=params['spatial_drop_rate_5'])(bn_11)
conv_11_1 = tf.keras.layers.Conv2D(filters=params['conv_11_filters'], kernel_size=params['conv_11_kernel'], strides=params['conv_11_strides'], padding=params['conv_11_pad'], data_format=channel_order, activation=params['intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_5_1)
bn_12 = tf.keras.layers.BatchNormalization()(conv_11_1)
add_4 = tf.keras.layers.Add()([act_3, bn_12])
act_4 = tf.keras.layers.Activation(activation=params['res_act_fn'])(add_4)
# 6
conv_12_1 = tf.keras.layers.Conv2D(filters=params['conv_12_filters'], kernel_size=params['conv_12_kernel'], strides=params['conv_12_strides'], padding=params['conv_12_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_4)
bn_13 = tf.keras.layers.BatchNormalization()(conv_12_1)
spat_6_1 = tf.keras.layers.SpatialDropout2D(rate=params['spatial_drop_rate_6'])(bn_13)
conv_13_1 = tf.keras.layers.Conv2D(filters=params['conv_13_filters'], kernel_size=params['conv_13_kernel'], strides=params['conv_13_strides'], padding=params['conv_13_pad'], data_format=channel_order, activation=params['intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_6_1)
bn_14 = tf.keras.layers.BatchNormalization()(conv_13_1)
add_5 = tf.keras.layers.Add()([act_4, bn_14])
act_5_1 = tf.keras.layers.Activation(activation=params['res_act_fn'])(add_5)
# -----------------------------------------------------------------
# FLUORO ANALYSIS 2
# -----------------------------------------------------------------
# ---
# Entry Fluoro Layers
# per_image_stand_2 = tf.keras.layers.Lambda(lambda frame: tf.image.per_image_standardization(frame))(input_fluoro_2)
# bn_0 = tf.keras.layers.BatchNormalization()(per_image_stand_2)
conv_0_1 = tf.keras.layers.Conv2D(filters=params['conv_0_filters'], kernel_size=params['conv_0_kernel'], strides=params['conv_0_strides'], padding=params['conv_0_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(input_fluoro_2)
bn_1 = tf.keras.layers.BatchNormalization()(conv_0_1)
spat_0_1 = tf.keras.layers.SpatialDropout2D(rate=params['spatial_drop_rate_0'])(bn_1)
conv_1_1 = tf.keras.layers.Conv2D(filters=params['conv_1_filters'], kernel_size=params['conv_1_kernel'], strides=params['conv_1_strides'], padding=params['conv_1_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_0_1)
# ---
# Pool After Initial Layers
pool_0_1 = tf.keras.layers.AveragePooling2D(pool_size=params['pool_0_size'], padding=params['pool_0_pad'])(conv_1_1)
# ---
# Run of Residual Layers
bn_2 = tf.keras.layers.BatchNormalization()(pool_0_1)
# 1
conv_2_1 = tf.keras.layers.Conv2D(filters=params['conv_2_filters'], kernel_size=params['conv_2_kernel'], strides=params['conv_2_strides'], padding=params['conv_2_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(bn_2)
bn_3 = tf.keras.layers.BatchNormalization()(conv_2_1)
spat_1_1 = tf.keras.layers.SpatialDropout2D(rate=params['spatial_drop_rate_1'])(bn_3)
conv_3_1 = tf.keras.layers.Conv2D(filters=params['conv_3_filters'], kernel_size=params['conv_3_kernel'], strides=params['conv_3_strides'], padding=params['conv_3_pad'], data_format=channel_order, activation=params['intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_1_1)
bn_4 = tf.keras.layers.BatchNormalization()(conv_3_1)
add_0 = tf.keras.layers.Add()([bn_4, bn_2])
act_0 = tf.keras.layers.Activation(activation=params['res_act_fn'])(add_0)
# 2
conv_4_1 = tf.keras.layers.Conv2D(filters=params['conv_4_filters'], kernel_size=params['conv_4_kernel'], strides=params['conv_4_strides'], padding=params['conv_4_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_0)
bn_5 = tf.keras.layers.BatchNormalization()(conv_4_1)
spat_2_1 = tf.keras.layers.SpatialDropout2D(rate=params['spatial_drop_rate_1'])(bn_5)
conv_5_1 = tf.keras.layers.Conv2D(filters=params['conv_3_filters'], kernel_size=params['conv_3_kernel'], strides=params['conv_3_strides'], padding=params['conv_3_pad'], data_format=channel_order, activation=params['intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_2_1)
bn_6 = tf.keras.layers.BatchNormalization()(conv_5_1)
add_1 = tf.keras.layers.Add()([act_0, bn_6])
act_1 = tf.keras.layers.Activation(activation=params['res_act_fn'])(add_1)
# 3
conv_6_1 = tf.keras.layers.Conv2D(filters=params['conv_6_filters'], kernel_size=params['conv_6_kernel'], strides=params['conv_6_strides'], padding=params['conv_6_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_1)
bn_7 = tf.keras.layers.BatchNormalization()(conv_6_1)
spat_3_1 = tf.keras.layers.SpatialDropout2D(rate=params['spatial_drop_rate_3'])(bn_7)
conv_7_1 = tf.keras.layers.Conv2D(filters=params['conv_7_filters'], kernel_size=params['conv_7_kernel'], strides=params['conv_7_strides'], padding=params['conv_7_pad'], data_format=channel_order, activation=params['intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_3_1)
bn_8 = tf.keras.layers.BatchNormalization()(conv_7_1)
add_2 = tf.keras.layers.Add()([act_1, bn_8])
act_2 = tf.keras.layers.Activation(activation=params['res_act_fn'])(add_2)
# 4
conv_8_1 = tf.keras.layers.Conv2D(filters=params['conv_8_filters'], kernel_size=params['conv_8_kernel'], strides=params['conv_8_strides'], padding=params['conv_8_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_2)
bn_9 = tf.keras.layers.BatchNormalization()(conv_8_1)
spat_4_1 = tf.keras.layers.SpatialDropout2D(rate=params['spatial_drop_rate_4'])(bn_9)
conv_9_1 = tf.keras.layers.Conv2D(filters=params['conv_9_filters'], kernel_size=params['conv_9_kernel'], strides=params['conv_9_strides'], padding=params['conv_9_pad'], data_format=channel_order, activation=params['intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_4_1)
bn_10 = tf.keras.layers.BatchNormalization()(conv_9_1)
add_3 = tf.keras.layers.Add()([act_2, bn_10])
act_3 = tf.keras.layers.Activation(activation=params['res_act_fn'])(add_3)
# 5
conv_10_1 = tf.keras.layers.Conv2D(filters=params['conv_10_filters'], kernel_size=params['conv_10_kernel'], strides=params['conv_10_strides'], padding=params['conv_10_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_3)
bn_11 = tf.keras.layers.BatchNormalization()(conv_10_1)
spat_5_1 = tf.keras.layers.SpatialDropout2D(rate=params['spatial_drop_rate_5'])(bn_11)
conv_11_1 = tf.keras.layers.Conv2D(filters=params['conv_11_filters'], kernel_size=params['conv_11_kernel'], strides=params['conv_11_strides'], padding=params['conv_11_pad'], data_format=channel_order, activation=params['intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_5_1)
bn_12 = tf.keras.layers.BatchNormalization()(conv_11_1)
add_4 = tf.keras.layers.Add()([act_3, bn_12])
act_4 = tf.keras.layers.Activation(activation=params['res_act_fn'])(add_4)
# 6
conv_12_1 = tf.keras.layers.Conv2D(filters=params['conv_12_filters'], kernel_size=params['conv_12_kernel'], strides=params['conv_12_strides'], padding=params['conv_12_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_4)
bn_13 = tf.keras.layers.BatchNormalization()(conv_12_1)
spat_6_1 = tf.keras.layers.SpatialDropout2D(rate=params['spatial_drop_rate_6'])(bn_13)
conv_13_1 = tf.keras.layers.Conv2D(filters=params['conv_13_filters'], kernel_size=params['conv_13_kernel'], strides=params['conv_13_strides'], padding=params['conv_13_pad'], data_format=channel_order, activation=params['intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_6_1)
bn_14 = tf.keras.layers.BatchNormalization()(conv_13_1)
add_5 = tf.keras.layers.Add()([act_4, bn_14])
act_5_2 = tf.keras.layers.Activation(activation=params['res_act_fn'])(add_5)
# -----------------------------------------------------------------
# COMBINE FLUOROS
# -----------------------------------------------------------------
comb_fluoro_0 = tf.keras.layers.concatenate([act_5_1, act_5_2])
# -----------------------------------------------------------------
# RES NETS AFTER COMBINED FLUORO
# -----------------------------------------------------------------
# 0
comb_0 = tf.keras.layers.Conv2D(filters=params['comb_0_filters'], kernel_size=params['comb_0_kernel'], strides=params['comb_0_strides'], padding=params['comb_0_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(comb_fluoro_0)
bn_0 = tf.keras.layers.BatchNormalization()(comb_0)
spat_0 = tf.keras.layers.SpatialDropout2D(rate=params['comb_spatial_0'])(bn_0)
comb_1 = tf.keras.layers.Conv2D(filters=params['comb_1_filters'], kernel_size=params['comb_1_kernel'], strides=params['comb_1_strides'], padding=params['comb_1_pad'], data_format=channel_order, activation=params['c_intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_0)
bn_1 = tf.keras.layers.BatchNormalization()(comb_1)
add_0 = tf.keras.layers.Add()([comb_fluoro_0, bn_1])
act_0 = tf.keras.layers.Activation(activation=params['c_res_act_fn'])(add_0)
# 1
comb_2 = tf.keras.layers.Conv2D(filters=params['comb_2_filters'], kernel_size=params['comb_2_kernel'], strides=params['comb_2_strides'], padding=params['comb_2_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_0)
bn_2 = tf.keras.layers.BatchNormalization()(comb_2)
spat_1 = tf.keras.layers.SpatialDropout2D(rate=params['comb_spatial_1'])(bn_2)
comb_3 = tf.keras.layers.Conv2D(filters=params['comb_3_filters'], kernel_size=params['comb_3_kernel'], strides=params['comb_3_strides'], padding=params['comb_3_pad'], data_format=channel_order, activation=params['c_intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_1)
bn_3 = tf.keras.layers.BatchNormalization()(comb_3)
add_1 = tf.keras.layers.Add()([act_0, bn_3])
act_1 = tf.keras.layers.Activation(activation=params['c_res_act_fn'])(add_1)
# 2
comb_4 = tf.keras.layers.Conv2D(filters=params['comb_4_filters'], kernel_size=params['comb_4_kernel'], strides=params['comb_4_strides'], padding=params['comb_4_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_1)
bn_4 = tf.keras.layers.BatchNormalization()(comb_4)
spat_2 = tf.keras.layers.SpatialDropout2D(rate=params['comb_spatial_2'])(bn_4)
comb_5 = tf.keras.layers.Conv2D(filters=params['comb_5_filters'], kernel_size=params['comb_5_kernel'], strides=params['comb_5_strides'], padding=params['comb_5_pad'], data_format=channel_order, activation=params['c_intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_2)
bn_5 = tf.keras.layers.BatchNormalization()(comb_5)
add_2 = tf.keras.layers.Add()([act_1, bn_5])
act_2 = tf.keras.layers.Activation(activation=params['c_res_act_fn'])(add_2)
# 3
comb_6 = tf.keras.layers.Conv2D(filters=params['comb_6_filters'], kernel_size=params['comb_6_kernel'], strides=params['comb_6_strides'], padding=params['comb_6_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_2)
bn_6 = tf.keras.layers.BatchNormalization()(comb_6)
spat_3 = tf.keras.layers.SpatialDropout2D(rate=params['comb_spatial_3'])(bn_6)
comb_7 = tf.keras.layers.Conv2D(filters=params['comb_7_filters'], kernel_size=params['comb_7_kernel'], strides=params['comb_7_strides'], padding=params['comb_7_pad'], data_format=channel_order, activation=params['c_intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_3)
bn_7 = tf.keras.layers.BatchNormalization()(comb_7)
add_3 = tf.keras.layers.Add()([act_2, bn_7])
act_3 = tf.keras.layers.Activation(activation=params['c_res_act_fn'])(add_3)
# 4
comb_8 = tf.keras.layers.Conv2D(filters=params['comb_8_filters'], kernel_size=params['comb_8_kernel'], strides=params['comb_8_strides'], padding=params['comb_8_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_3)
bn_8 = tf.keras.layers.BatchNormalization()(comb_8)
spat_4 = tf.keras.layers.SpatialDropout2D(rate=params['comb_spatial_4'])(bn_8)
comb_9 = tf.keras.layers.Conv2D(filters=params['comb_9_filters'], kernel_size=params['comb_9_kernel'], strides=params['comb_9_strides'], padding=params['comb_9_pad'], data_format=channel_order, activation=params['c_intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_4)
bn_9 = tf.keras.layers.BatchNormalization()(comb_9)
add_4 = tf.keras.layers.Add()([act_3, bn_9])
act_4 = tf.keras.layers.Activation(activation=params['c_res_act_fn'])(add_4)
# 5
comb_10 = tf.keras.layers.Conv2D(filters=params['comb_10_filters'], kernel_size=params['comb_10_kernel'], strides=params['comb_10_strides'], padding=params['comb_10_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_4)
bn_10 = tf.keras.layers.BatchNormalization()(comb_10)
spat_5 = tf.keras.layers.SpatialDropout2D(rate=params['comb_spatial_5'])(bn_10)
comb_11 = tf.keras.layers.Conv2D(filters=params['comb_11_filters'], kernel_size=params['comb_11_kernel'], strides=params['comb_11_strides'], padding=params['comb_11_pad'], data_format=channel_order, activation=params['c_intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_5)
bn_11 = tf.keras.layers.BatchNormalization()(comb_11)
comb_12 = tf.keras.layers.Conv2D(filters=params['comb_12_filters'], kernel_size=params['comb_12_kernel'], strides=params['comb_12_strides'], padding=params['comb_12_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_4)
bn_12 = tf.keras.layers.BatchNormalization()(comb_12)
add_5 = tf.keras.layers.Add()([bn_11, bn_12])
act_5 = tf.keras.layers.Activation(activation=params['c_res_act_fn'])(add_5)
# 6
comb_13 = tf.keras.layers.Conv2D(filters=params['comb_13_filters'], kernel_size=params['comb_13_kernel'], strides=params['comb_13_strides'], padding=params['comb_13_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_5)
bn_13 = tf.keras.layers.BatchNormalization()(comb_13)
spat_6 = tf.keras.layers.SpatialDropout2D(rate=params['comb_spatial_6'])(bn_13)
comb_14 = tf.keras.layers.Conv2D(filters=params['comb_14_filters'], kernel_size=params['comb_14_kernel'], strides=params['comb_14_strides'], padding=params['comb_14_pad'], data_format=channel_order, activation=params['c_intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_6)
bn_14 = tf.keras.layers.BatchNormalization()(comb_14)
comb_15 = tf.keras.layers.Conv2D(filters=params['comb_15_filters'], kernel_size=params['comb_15_kernel'], strides=params['comb_15_strides'], padding=params['comb_15_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_5)
bn_15 = tf.keras.layers.BatchNormalization()(comb_15)
add_6 = tf.keras.layers.Add()([bn_14, bn_15])
act_6 = tf.keras.layers.Activation(activation=params['c_res_act_fn'])(add_6)
# 7
comb_16 = tf.keras.layers.Conv2D(filters=params['comb_16_filters'], kernel_size=params['comb_16_kernel'], strides=params['comb_16_strides'], padding=params['comb_16_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_6)
bn_16 = tf.keras.layers.BatchNormalization()(comb_16)
spat_7 = tf.keras.layers.SpatialDropout2D(rate=params['comb_spatial_7'])(bn_16)
comb_17 = tf.keras.layers.Conv2D(filters=params['comb_17_filters'], kernel_size=params['comb_17_kernel'], strides=params['comb_17_strides'], padding=params['comb_17_pad'], data_format=channel_order, activation=params['c_intra_act_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(spat_7)
bn_17 = tf.keras.layers.BatchNormalization()(comb_17)
comb_18 = tf.keras.layers.Conv2D(filters=params['comb_18_filters'], kernel_size=params['comb_18_kernel'], strides=params['comb_18_strides'], padding=params['comb_18_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_6)
bn_18 = tf.keras.layers.BatchNormalization()(comb_18)
add_7 = tf.keras.layers.Add()([bn_17, bn_18])
act_7 = tf.keras.layers.Activation(activation=params['c_res_act_fn'])(add_7)
# ---
# Conv After End of Res Net
comb_19 = tf.keras.layers.Conv2D(filters=params['comb_19_filters'], kernel_size=params['comb_19_kernel'], strides=params['comb_19_strides'], padding=params['comb_19_pad'], data_format=channel_order, activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['conv_regularizer'])(act_7)
# ---
# Dense At End of Convs
comb_flatten_1 = tf.keras.layers.Flatten()(comb_19)
bn_19 = tf.keras.layers.BatchNormalization()(comb_flatten_1)
dense_0_comb = tf.keras.layers.Dense(units=params['dense_comb_0_units'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_1'])(bn_19)
bn_20 = tf.keras.layers.BatchNormalization()(dense_0_comb)
dense_comb_1 = tf.keras.layers.Dropout(params['drop_1_comb'])(bn_20)
dense_1_comb = tf.keras.layers.Dense(units=params['dense_comb_1_units'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_1'])(dense_comb_1)
bn_21_f = tf.keras.layers.BatchNormalization()(dense_1_comb)
# -----------------------------------------------------------------
# COMBINE FLUORO NETS AND VOXEL NETS
# -----------------------------------------------------------------
fluoro_vox_comb = tf.keras.layers.concatenate([bn_21_f, bn_21_v])
# fluoro_vox_comb = tf.keras.layers.Add()([bn_21_f, bn_21_v])
# fluoro_vox_act = tf.keras.layers.Activation(activation=params['flu_vox_act_fn'])(fluoro_vox_comb)
# -----------------------------------------------------------------
# DENSE AFTER FLUORO AND VOXEL
# -----------------------------------------------------------------
bn_0 = tf.keras.layers.BatchNormalization()(fluoro_vox_comb)
vox_flu_0 = tf.keras.layers.Dense(units=params['vox_flu_units_0'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_1'])(fluoro_vox_comb)
bn_1 = tf.keras.layers.BatchNormalization()(vox_flu_0)
vox_flu_drop_1 = tf.keras.layers.Dropout(params['vox_flu_drop_1'])(bn_1)
vox_flu_1 = tf.keras.layers.Dense(units=params['vox_flu_units_1'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_1'])(vox_flu_drop_1)
bn_2 = tf.keras.layers.BatchNormalization()(vox_flu_1)
vox_flu_drop_2 = tf.keras.layers.Dropout(params['vox_flu_drop_2'])(bn_2)
vox_flu_2 = tf.keras.layers.Dense(units=params['vox_flu_units_2'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_1'])(vox_flu_drop_2)
bn_3 = tf.keras.layers.BatchNormalization()(vox_flu_2)
vox_flu_drop_3 = tf.keras.layers.Dropout(params['vox_flu_drop_3'])(bn_3)
vox_flu_3 = tf.keras.layers.Dense(units=params['vox_flu_units_3'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_1'])(vox_flu_drop_3)
bn_4 = tf.keras.layers.BatchNormalization()(vox_flu_3)
vox_flu_4 = tf.keras.layers.Dense(units=params['vox_flu_units_4'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_1'])(bn_4)
bn_5_comb = tf.keras.layers.BatchNormalization()(vox_flu_4)
# -----------------------------------------------------------------
# CALIBRATION DENSE
# -----------------------------------------------------------------
# bn_0 = tf.keras.layers.BatchNormalization()(input_cali)
cali_0 = tf.keras.layers.Dense(units=params['cali_0_units'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_1'])(input_cali)
bn_1 = tf.keras.layers.BatchNormalization()(cali_0)
drop_1_cali = tf.keras.layers.Dropout(params['drop_1_cali'])(bn_1)
cali_1 = tf.keras.layers.Dense(units=params['cali_1_units'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_1'])(drop_1_cali)
bn_2 = tf.keras.layers.BatchNormalization()(cali_1)
drop_2_cali = tf.keras.layers.Dropout(params['drop_2_cali'])(bn_2)
cali_2 = tf.keras.layers.Dense(units=params['cali_2_units'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_1'])(drop_2_cali)
bn_3 = tf.keras.layers.BatchNormalization()(cali_2)
cali_3 = tf.keras.layers.Dense(units=params['cali_3_units'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_1'])(bn_3)
bn_4_c = tf.keras.layers.BatchNormalization()(cali_3)
# -----------------------------------------------------------------
# COMBINE CALI AND VOX/FLUORO
# -----------------------------------------------------------------
top_level_comb = tf.keras.layers.Add()([bn_4_c, bn_5_comb])
top_level_act = tf.keras.layers.Activation(activation=params['top_level_act_fn'])(top_level_comb)
# -----------------------------------------------------------------
# TOP LEVEL DENSE TO OUTPUT
# -----------------------------------------------------------------
top_dense_0 = tf.keras.layers.Dense(units=params['top_dense_0'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_2'])(top_level_act)
bn_0 = tf.keras.layers.BatchNormalization()(top_dense_0)
top_drop_0 = tf.keras.layers.Dropout(params['top_drop_0'])(bn_0)
top_dense_1 = tf.keras.layers.Dense(units=params['top_dense_1'], activation=params['top_level_intra'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_2'])(top_drop_0)
bn_1 = tf.keras.layers.BatchNormalization()(top_dense_1)
add_0 = tf.keras.layers.Add()([bn_1, bn_4_c])
act_0 = tf.keras.layers.Activation(activation=params['top_level_act_fn'])(add_0)
top_dense_2 = tf.keras.layers.Dense(units=params['top_dense_2'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_2'])(act_0)
bn_2 = tf.keras.layers.BatchNormalization()(top_dense_2)
top_drop_1 = tf.keras.layers.Dropout(params['top_drop_1'])(bn_2)
top_dense_3 = tf.keras.layers.Dense(units=params['top_dense_3'], activation=params['top_level_intra'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_2'])(top_drop_1)
bn_3 = tf.keras.layers.BatchNormalization()(top_dense_3)
add_1 = tf.keras.layers.Add()([bn_3, act_0])
act_1 = tf.keras.layers.Activation(activation=params['top_level_act_fn'])(add_1)
top_dense_4 = tf.keras.layers.Dense(units=params['top_dense_4'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_2'])(act_1)
bn_4 = tf.keras.layers.BatchNormalization()(top_dense_4)
top_drop_2 = tf.keras.layers.Dropout(params['top_drop_2'])(bn_4)
top_dense_5 = tf.keras.layers.Dense(units=params['top_dense_5'], activation=params['top_level_intra'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_2'])(top_drop_2)
bn_5 = tf.keras.layers.BatchNormalization()(top_dense_5)
add_2 = tf.keras.layers.Add()([bn_5, act_1])
act_2 = tf.keras.layers.Activation(activation=params['top_level_act_fn'])(add_2)
top_dense_6 = tf.keras.layers.Dense(units=params['top_dense_6'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_2'])(act_2)
bn_6 = tf.keras.layers.BatchNormalization()(top_dense_6)
top_drop_3 = tf.keras.layers.Dropout(params['top_drop_3'])(bn_6)
top_dense_7 = tf.keras.layers.Dense(units=params['top_dense_7'], activation=params['top_level_intra'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_2'])(top_drop_3)
bn_7 = tf.keras.layers.BatchNormalization()(top_dense_7)
add_3 = tf.keras.layers.Add()([bn_7, act_2])
act_3 = tf.keras.layers.Activation(activation=params['top_level_act_fn'])(add_3)
top_dense_8 = tf.keras.layers.Dense(units=params['top_dense_8'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_2'])(act_3)
bn_8 = tf.keras.layers.BatchNormalization()(top_dense_8)
top_drop_4 = tf.keras.layers.Dropout(params['top_drop_4'])(bn_8)
top_dense_9 = tf.keras.layers.Dense(units=params['top_dense_9'], activation=params['top_level_intra'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_2'])(top_drop_4)
bn_9 = tf.keras.layers.BatchNormalization()(top_dense_9)
add_4 = tf.keras.layers.Add()([bn_9, act_3])
act_4 = tf.keras.layers.Activation(activation=params['top_level_act_fn'])(add_4)
top_dense_10 = tf.keras.layers.Dense(units=params['top_dense_10'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_2'])(act_4)
bn_10 = tf.keras.layers.BatchNormalization()(top_dense_10)
top_dense_11 = tf.keras.layers.Dense(units=params['top_dense_11'], activation=params['top_level_intra'], kernel_initializer=params['kern_init'], activity_regularizer=params['dense_regularizer_2'])(bn_10)
bn_11 = tf.keras.layers.BatchNormalization()(top_dense_11)
add_5 = tf.keras.layers.Add()([bn_11, act_4])
act_5 = tf.keras.layers.Activation(activation=params['top_level_act_fn'])(add_5)
top_dense_12 = tf.keras.layers.Dense(units=params['top_dense_4'], activation=params['activation_fn'], kernel_initializer=params['kern_init'], activity_regularizer=None)(act_5)
# -----------------------------------------------------------------
# Main Output
main_output = tf.keras.layers.Dense(units=params['main_output_units'], activation='elu', kernel_initializer=params['kern_init'], name='main_output')(top_dense_12)
# -----------------------------------------------------------------
# Model Housekeeping
model = tf.keras.Model(inputs=[input_vox, input_fluoro_1, input_fluoro_2, input_cali], outputs=main_output)
model.compile(optimizer=params['model_opt'](lr=params['learning_rate']), loss=params['model_loss'], metrics=[params['model_metric']])
tf.keras.utils.plot_model(model, os.path.abspath(os.path.join(save_dir, expr_name + '_' + expr_no + '.png')), show_shapes=True)
model.summary()
# -----------------------------------------------------------------
def min_max_norm(data_set, feature_range=(-1, 1), axis=0):
data_min = np.min(data_set, axis=axis)
data_max = np.max(data_set, axis=axis)
data_in_std_range = (data_set - data_min) / (data_max - data_min)
data_scaled = data_in_std_range * (feature_range[1] - feature_range[0]) + feature_range[0]
return data_scaled
# -----------------------------------------------------------------
def split_train_test(shape, num_of_samples=None, ratio=0.2):
if num_of_samples is None:
shuffled_indices = np.random.choice(shape, size=shape, replace=False)
else:
shuffled_indices = np.random.choice(shape, size=num_of_samples, replace=False)
test_set_size = int(len(shuffled_indices) * 0.2)
test_indx = shuffled_indices[:test_set_size]
train_indx = shuffled_indices[test_set_size:]
return test_indx, train_indx
vox_file = h5py.File(os.path.expanduser('~/fluoro/data/compilation/voxels_mark_origin_comp.h5py'), 'r')
vox_init = vox_file['vox_dset']
image_file = h5py.File(os.path.expanduser('~/fluoro/data/compilation/images_norm_std.h5py'), 'r')
image_grp_1 = image_file['image_1']
image_grp_2 = image_file['image_2']
image_init_1 = image_grp_1['min_max_dset_per_image']
image_init_2 = image_grp_2['min_max_dset_per_image']
cali_file = h5py.File(os.path.expanduser('~/fluoro/data/compilation/calibration_norm_std.h5py'), 'r')
cali_init = cali_file['min_max_dset']
label_file = h5py.File(os.path.expanduser('~/fluoro/data/compilation/labels.h5py'), 'r')
label_init = label_file['labels_dset']
num_of_samples = None
test_indxs, train_sup_indxs = split_train_test(len(label_init), num_of_samples=num_of_samples)
val_indxs, train_indxs = split_train_test(len(train_sup_indxs))
val_indxs = train_sup_indxs[val_indxs]
train_indxs = train_sup_indxs[train_indxs]
test_indxs = sorted(list(test_indxs))
val_indxs = sorted(list(val_indxs))
train_indxs = sorted(list(train_indxs))
hist_file = open(os.path.join(save_dir, expr_name + '_hist_objects_' + expr_no + '.pkl'), 'wb')
var_dict = {}
var_dict['test_indxs'] = test_indxs
var_dict['val_indxs'] = val_indxs
var_dict['train_indxs'] = train_indxs
vox_mat_train = vox_init[:]
vox_mat_val = vox_mat_train[val_indxs]
vox_mat_train = vox_mat_train[train_indxs]
vox_file.close()
image_mat_train_1 = image_init_1[:]
image_mat_val_1 = image_mat_train_1[val_indxs]
image_mat_train_1 = image_mat_train_1[train_indxs]
image_mat_train_2 = image_init_2[:]
image_mat_val_2 = image_mat_train_2[val_indxs]
image_mat_train_2 = image_mat_train_2[train_indxs]
image_file.close()
cali_mat_train = cali_init[:]
cali_mat_val = cali_mat_train[val_indxs]
cali_mat_train = cali_mat_train[train_indxs]
cali_file.close()
label_mat_sup = label_init[:]
label_mat_sup = label_mat_sup[list(list(train_indxs) + list(val_indxs))]
label_mat_sup_norm = min_max_norm(label_mat_sup)
label_mat_train = label_mat_sup_norm[:len(train_indxs)]
label_mat_val = label_mat_sup_norm[-len(val_indxs):]
label_t_mean = np.mean(label_mat_sup, axis=0)
label_t_std = np.std(label_mat_sup, axis=0)
label_t_min = np.min(label_mat_sup, axis=0)
label_t_max = np.max(label_mat_sup, axis=0)
var_dict['label_train_val_mean'] = label_t_mean
var_dict['label_train_val_std'] = label_t_std
var_dict['label_train_val_min'] = label_t_min
var_dict['label_train_val_max'] = label_t_max
label_file.close()
# -----------------------------------------------------------------
print('\n\ncompletely loaded...\n\n')
result = model.fit(x={'input_vox': np.expand_dims(vox_mat_train, axis=-1), 'input_fluoro_1': image_mat_train_1, 'input_fluoro_2': image_mat_train_2, 'input_cali': cali_mat_train}, y=label_mat_train, validation_data=([np.expand_dims(vox_mat_val, axis=-1), image_mat_val_1, image_mat_val_2, cali_mat_val], label_mat_val), epochs=params['model_epochs'], batch_size=params['model_batchsize'], shuffle=True, verbose=2)
model.save(os.path.abspath(os.path.join(save_dir, expr_name + '_' + expr_no + '.h5')))
var_dict['result'] = result.history
pickle.dump(var_dict, hist_file)
hist_file.close()
| 50.876322
| 413
| 0.737377
|
cba5312298851d3e2bc36cfb9dd566d07d38755e
| 5,951
|
py
|
Python
|
main.py
|
tomimara52/ConsoleSnake
|
023ab0bece1342d3efa70f02ae7f3609baaa7da1
|
[
"MIT"
] | 10
|
2021-01-29T07:50:49.000Z
|
2021-01-29T19:58:06.000Z
|
main.py
|
tomimara52/ConsoleSnake
|
023ab0bece1342d3efa70f02ae7f3609baaa7da1
|
[
"MIT"
] | null | null | null |
main.py
|
tomimara52/ConsoleSnake
|
023ab0bece1342d3efa70f02ae7f3609baaa7da1
|
[
"MIT"
] | 3
|
2021-01-29T16:27:53.000Z
|
2021-01-29T17:05:31.000Z
|
# -*- coding: UTF-8 -*-
import time
import os
from random import randint
from getch import KBHit
class Snake:
def __init__(self, x, y):
self.parts = [[1, 1]]
self.length = 1
self.dir = 'd'
self.skins = ['O']
self.fruit = [randint(2, x), randint(2, y)]
self.size = [x, y]
self.print_in_coords()
def get_opposites(self):
return {"w": "s", "s": "w", "d": "a", "a":"d"}
def set_skins(self):
"""
This iterates each snake part, and based where the adjacent ones are,
it gives it a skin between the following: │ ─ └ ┐ ┌ ┘
"""
skins = ['O']
coords_subtraction = lambda a, b: [x1 - x2 for (x1, x2) in zip(a, b)]
for i in range(1, len(self.parts)):
if i == len(self.parts)-1:
a = self.parts[-2]
b = self.parts[-1]
else:
b = self.parts[i+1]
a = self.parts[i-1]
diff = coords_subtraction(a, b)
if diff[0] == 0:
skins.append('│')
elif diff[1] == 0:
skins.append('─')
else:
a = self.parts[i-1]
b = self.parts[i]
diff2 = coords_subtraction(a, b)
if sum(diff) == 0:
if sum(diff2) == 1:
skins.append('└')
else:
skins.append('┐')
else:
if diff2[1] == -1 or diff2[0] == 1:
skins.append('┌')
else:
skins.append('┘')
self.skins = skins
def print_in_coords(self):
"""
Prints the field of game with '·',
prints the snake body parts,
prints the fruit ('X')
"""
coords = self.parts
os.system('cls' if os.name == 'nt' else 'clear')
for i in range(self.size[1], 0, -1):
for j in range(1, self.size[0]+1):
if [j, i] in coords:
print(self.skins[coords.index([j, i])], end=' ')
elif [j, i] == self.fruit:
print('X', end=' ')
else:
print('·', end=' ')
print('')
def update_coors(self):
"""
Makes every part of the snake move to where the following was,
except the head, that moves to the direction the user input
"""
for i in range(len(self.parts)-1, 0, -1):
self.parts[i] = self.parts[i-1][:]
if self.dir == 'w':
self.parts[0][1] += 1
elif self.dir == 'd':
self.parts[0][0] += 1
elif self.dir == 's':
self.parts[0][1] -= 1
elif self.dir == 'a':
self.parts[0][0] -= 1
def check_fruit(self):
"""
Checks if the snake's head is in the same place as the fruit,
if so, the snake grows and another fruit is spawned
"""
if self.parts[0] == self.fruit:
self.grow()
self.generate_fruit()
def alive(self):
"""
Check if the head hit a body part or has crossed the limits
"""
head = self.parts[0]
if (head in self.parts[1:]) or (not(0 < head[0] <= self.size[0])) or (not(0 < head[1] <= self.size[1])):
return False
return True
def get_action(self, character):
if (character in 'wasd') and (self.get_opposites()[character] != self.dir or len(self.parts) == 1):
self.dir = character
self.update_coors()
self.check_fruit()
self.set_skins()
self.print_in_coords()
return self.alive()
def generate_fruit(self):
new_coords = [randint(1,self.size[0]), randint(1,self.size[1])]
if new_coords in self.parts:
self.generate_fruit()
else:
self.fruit = new_coords
def grow(self):
if len(self.parts) > 1:
last = self.parts[-1]
sec_last = self.parts[-2]
diff = [x1 - x2 for (x1, x2) in zip(sec_last, last)]
if diff[0] == 0:
if diff[1] > 0:
self.parts.append([last[0], last[1]-1])
else:
self.parts.append([last[0], last[1]+1])
elif diff[0] > 0:
self.parts.append([last[0]-1, last[1]])
else:
self.parts.append([last[0]+1, last[1]])
else:
head = self.parts[0]
if self.dir == 'w':
self.parts.append([head[0], head[1]-1])
elif self.dir == 'd':
self.parts.append([head[0]-1, head[1]])
elif self.dir == 's':
self.parts.append([head[0], head[1]+1])
elif self.dir == 'a':
self.parts.append([head[0]+1, head[1]])
self.length += 1
def main():
snake = Snake(15, 10) # This means the game field is 15x10
update_time = .125 # This is how much time there is between updates, 1/update_time = fps
keep_playing = True
kb = KBHit()
while keep_playing:
t = 0
key_stroke = ' '
while t < update_time:
start = time.time()
if kb.kbhit():
key_stroke = kb.getch()
end = time.time()
t += end - start
keep_playing = snake.get_action(key_stroke)
if snake.size[0] * snake.size[1] <= snake.length:
print('You win!')
break
kb.set_normal_term()
print('Score:', snake.length)
while True:
again = input('Keep playing? (y/n) ')
if again.lower() == 'y':
main()
break
elif again.lower() == 'n':
print('Bye')
break
else:
print('Input a valid answer')
if __name__ == "__main__":
main()
| 31.321053
| 112
| 0.460259
|
ec73e057837951ae6c635c834fda609db9ff8268
| 1,421
|
py
|
Python
|
app.py
|
ArtyDev57/face_recon
|
c0a79b3fe41e0db37cb13ce54e17bef8f8dbf685
|
[
"MIT"
] | 4
|
2020-05-22T03:17:03.000Z
|
2021-07-29T04:24:02.000Z
|
app.py
|
ArtyDev57/face_recon
|
c0a79b3fe41e0db37cb13ce54e17bef8f8dbf685
|
[
"MIT"
] | null | null | null |
app.py
|
ArtyDev57/face_recon
|
c0a79b3fe41e0db37cb13ce54e17bef8f8dbf685
|
[
"MIT"
] | 1
|
2020-10-01T11:58:05.000Z
|
2020-10-01T11:58:05.000Z
|
#!/usr/bin/env python3
from flask import Flask, jsonify
from flask_jwt_extended import JWTManager
from routers import *
from werkzeug.exceptions import HTTPException
import flask_cors
import appconfig as conf
app = Flask(__name__)
# Cross Origin Resource
flask_cors.CORS(app, supports_credentials=True, resources={r"/api/*": {"origins": "*"}})
# upload file config
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
# JWT Config
app.config['JWT_SECRET_KEY'] = "hcoirie83748374834"
app.config['JWT_TOKEN_LOCATION'] = ['headers', 'query_string']
app.config['JWT_QUERY_STRING_NAME'] = 'token'
jwt = JWTManager(app)
# register Routers
url_api_prefix = conf.api_url_prefix
app.register_blueprint(users.users, url_prefix=url_api_prefix)
app.register_blueprint(monitor.monitor, url_prefix=url_api_prefix + "/monitor")
app.register_blueprint(camera.camera, url_prefix=url_api_prefix + '/camera')
app.register_blueprint(knowPeople.know_people, url_prefix=url_api_prefix + '/know')
app.register_blueprint(access.access, url_prefix=url_api_prefix + '/access')
@app.errorhandler(HTTPException)
def handle_error(e):
return jsonify({
"code": e.code,
"name": e.name,
"description": e.description,
}), e.code
if __name__ == "__main__":
app.run(host="0.0.0.0", port=conf.api_server.get('port', 8080), debug=conf.api_server.get('debug', False),
use_reloader=False, threaded=True)
| 31.577778
| 110
| 0.746657
|
1f53ce9471aa42602780d9169b575924aaa6b7fc
| 50,286
|
py
|
Python
|
openstackclient/tests/unit/volume/v2/test_volume.py
|
alvarosimon/python-openstackclient
|
2ab3396f19796935ddcb281b865d37839a4f84f7
|
[
"Apache-2.0"
] | null | null | null |
openstackclient/tests/unit/volume/v2/test_volume.py
|
alvarosimon/python-openstackclient
|
2ab3396f19796935ddcb281b865d37839a4f84f7
|
[
"Apache-2.0"
] | null | null | null |
openstackclient/tests/unit/volume/v2/test_volume.py
|
alvarosimon/python-openstackclient
|
2ab3396f19796935ddcb281b865d37839a4f84f7
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import argparse
import mock
from mock import call
from osc_lib import exceptions
from osc_lib import utils
from openstackclient.tests.unit.identity.v3 import fakes as identity_fakes
from openstackclient.tests.unit.image.v2 import fakes as image_fakes
from openstackclient.tests.unit import utils as tests_utils
from openstackclient.tests.unit.volume.v2 import fakes as volume_fakes
from openstackclient.volume.v2 import volume
class TestVolume(volume_fakes.TestVolume):
def setUp(self):
super(TestVolume, self).setUp()
self.volumes_mock = self.app.client_manager.volume.volumes
self.volumes_mock.reset_mock()
self.projects_mock = self.app.client_manager.identity.projects
self.projects_mock.reset_mock()
self.users_mock = self.app.client_manager.identity.users
self.users_mock.reset_mock()
self.images_mock = self.app.client_manager.image.images
self.images_mock.reset_mock()
self.snapshots_mock = self.app.client_manager.volume.volume_snapshots
self.snapshots_mock.reset_mock()
self.types_mock = self.app.client_manager.volume.volume_types
self.types_mock.reset_mock()
self.consistencygroups_mock = (
self.app.client_manager.volume.consistencygroups)
self.consistencygroups_mock.reset_mock()
def setup_volumes_mock(self, count):
volumes = volume_fakes.FakeVolume.create_volumes(count=count)
self.volumes_mock.get = volume_fakes.FakeVolume.get_volumes(
volumes,
0)
return volumes
class TestVolumeCreate(TestVolume):
project = identity_fakes.FakeProject.create_one_project()
user = identity_fakes.FakeUser.create_one_user()
columns = (
'attachments',
'availability_zone',
'bootable',
'description',
'id',
'name',
'properties',
'size',
'snapshot_id',
'status',
'type',
)
def setUp(self):
super(TestVolumeCreate, self).setUp()
self.new_volume = volume_fakes.FakeVolume.create_one_volume()
self.volumes_mock.create.return_value = self.new_volume
self.datalist = (
self.new_volume.attachments,
self.new_volume.availability_zone,
self.new_volume.bootable,
self.new_volume.description,
self.new_volume.id,
self.new_volume.name,
utils.format_dict(self.new_volume.metadata),
self.new_volume.size,
self.new_volume.snapshot_id,
self.new_volume.status,
self.new_volume.volume_type,
)
# Get the command object to test
self.cmd = volume.CreateVolume(self.app, None)
def test_volume_create_min_options(self):
arglist = [
'--size', str(self.new_volume.size),
self.new_volume.name,
]
verifylist = [
('size', self.new_volume.size),
('name', self.new_volume.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
self.volumes_mock.create.assert_called_with(
size=self.new_volume.size,
snapshot_id=None,
name=self.new_volume.name,
description=None,
volume_type=None,
availability_zone=None,
metadata=None,
imageRef=None,
source_volid=None,
consistencygroup_id=None,
scheduler_hints=None,
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
def test_volume_create_options(self):
consistency_group = (
volume_fakes.FakeConsistencyGroup.create_one_consistency_group())
self.consistencygroups_mock.get.return_value = consistency_group
arglist = [
'--size', str(self.new_volume.size),
'--description', self.new_volume.description,
'--type', self.new_volume.volume_type,
'--availability-zone', self.new_volume.availability_zone,
'--consistency-group', consistency_group.id,
'--hint', 'k=v',
self.new_volume.name,
]
verifylist = [
('size', self.new_volume.size),
('description', self.new_volume.description),
('type', self.new_volume.volume_type),
('availability_zone', self.new_volume.availability_zone),
('consistency_group', consistency_group.id),
('hint', {'k': 'v'}),
('name', self.new_volume.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
self.volumes_mock.create.assert_called_with(
size=self.new_volume.size,
snapshot_id=None,
name=self.new_volume.name,
description=self.new_volume.description,
volume_type=self.new_volume.volume_type,
availability_zone=self.new_volume.availability_zone,
metadata=None,
imageRef=None,
source_volid=None,
consistencygroup_id=consistency_group.id,
scheduler_hints={'k': 'v'},
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
def test_volume_create_user(self):
arglist = [
'--size', str(self.new_volume.size),
'--user', self.user.id,
self.new_volume.name,
]
verifylist = [
('size', self.new_volume.size),
('user', self.user.id),
('name', self.new_volume.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.assertRaises(exceptions.CommandError, self.cmd.take_action,
parsed_args)
self.volumes_mock.create.assert_not_called()
def test_volume_create_project(self):
arglist = [
'--size', str(self.new_volume.size),
'--project', self.project.id,
self.new_volume.name,
]
verifylist = [
('size', self.new_volume.size),
('project', self.project.id),
('name', self.new_volume.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.assertRaises(exceptions.CommandError, self.cmd.take_action,
parsed_args)
self.volumes_mock.create.assert_not_called()
def test_volume_create_properties(self):
arglist = [
'--property', 'Alpha=a',
'--property', 'Beta=b',
'--size', str(self.new_volume.size),
self.new_volume.name,
]
verifylist = [
('property', {'Alpha': 'a', 'Beta': 'b'}),
('size', self.new_volume.size),
('name', self.new_volume.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
self.volumes_mock.create.assert_called_with(
size=self.new_volume.size,
snapshot_id=None,
name=self.new_volume.name,
description=None,
volume_type=None,
availability_zone=None,
metadata={'Alpha': 'a', 'Beta': 'b'},
imageRef=None,
source_volid=None,
consistencygroup_id=None,
scheduler_hints=None,
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
def test_volume_create_image_id(self):
image = image_fakes.FakeImage.create_one_image()
self.images_mock.get.return_value = image
arglist = [
'--image', image.id,
'--size', str(self.new_volume.size),
self.new_volume.name,
]
verifylist = [
('image', image.id),
('size', self.new_volume.size),
('name', self.new_volume.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
self.volumes_mock.create.assert_called_with(
size=self.new_volume.size,
snapshot_id=None,
name=self.new_volume.name,
description=None,
volume_type=None,
availability_zone=None,
metadata=None,
imageRef=image.id,
source_volid=None,
consistencygroup_id=None,
scheduler_hints=None,
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
def test_volume_create_image_name(self):
image = image_fakes.FakeImage.create_one_image()
self.images_mock.get.return_value = image
arglist = [
'--image', image.name,
'--size', str(self.new_volume.size),
self.new_volume.name,
]
verifylist = [
('image', image.name),
('size', self.new_volume.size),
('name', self.new_volume.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
self.volumes_mock.create.assert_called_with(
size=self.new_volume.size,
snapshot_id=None,
name=self.new_volume.name,
description=None,
volume_type=None,
availability_zone=None,
metadata=None,
imageRef=image.id,
source_volid=None,
consistencygroup_id=None,
scheduler_hints=None,
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
def test_volume_create_with_snapshot(self):
snapshot = volume_fakes.FakeSnapshot.create_one_snapshot()
self.new_volume.snapshot_id = snapshot.id
arglist = [
'--snapshot', self.new_volume.snapshot_id,
self.new_volume.name,
]
verifylist = [
('snapshot', self.new_volume.snapshot_id),
('name', self.new_volume.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.snapshots_mock.get.return_value = snapshot
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
self.volumes_mock.create.assert_called_once_with(
size=snapshot.size,
snapshot_id=snapshot.id,
name=self.new_volume.name,
description=None,
volume_type=None,
availability_zone=None,
metadata=None,
imageRef=None,
source_volid=None,
consistencygroup_id=None,
scheduler_hints=None,
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
def test_volume_create_with_bootable_and_readonly(self):
arglist = [
'--bootable',
'--read-only',
'--size', str(self.new_volume.size),
self.new_volume.name,
]
verifylist = [
('bootable', True),
('non_bootable', False),
('read_only', True),
('read_write', False),
('size', self.new_volume.size),
('name', self.new_volume.name),
]
parsed_args = self.check_parser(
self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.volumes_mock.create.assert_called_with(
size=self.new_volume.size,
snapshot_id=None,
name=self.new_volume.name,
description=None,
volume_type=None,
availability_zone=None,
metadata=None,
imageRef=None,
source_volid=None,
consistencygroup_id=None,
scheduler_hints=None,
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
self.volumes_mock.set_bootable.assert_called_with(
self.new_volume.id, True)
self.volumes_mock.update_readonly_flag.assert_called_with(
self.new_volume.id, True)
def test_volume_create_with_nonbootable_and_readwrite(self):
arglist = [
'--non-bootable',
'--read-write',
'--size', str(self.new_volume.size),
self.new_volume.name,
]
verifylist = [
('bootable', False),
('non_bootable', True),
('read_only', False),
('read_write', True),
('size', self.new_volume.size),
('name', self.new_volume.name),
]
parsed_args = self.check_parser(
self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.volumes_mock.create.assert_called_with(
size=self.new_volume.size,
snapshot_id=None,
name=self.new_volume.name,
description=None,
volume_type=None,
availability_zone=None,
metadata=None,
imageRef=None,
source_volid=None,
consistencygroup_id=None,
scheduler_hints=None,
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
self.volumes_mock.set_bootable.assert_called_with(
self.new_volume.id, False)
self.volumes_mock.update_readonly_flag.assert_called_with(
self.new_volume.id, False)
@mock.patch.object(volume.LOG, 'error')
def test_volume_create_with_bootable_and_readonly_fail(
self, mock_error):
self.volumes_mock.set_bootable.side_effect = (
exceptions.CommandError())
self.volumes_mock.update_readonly_flag.side_effect = (
exceptions.CommandError())
arglist = [
'--bootable',
'--read-only',
'--size', str(self.new_volume.size),
self.new_volume.name,
]
verifylist = [
('bootable', True),
('non_bootable', False),
('read_only', True),
('read_write', False),
('size', self.new_volume.size),
('name', self.new_volume.name),
]
parsed_args = self.check_parser(
self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.volumes_mock.create.assert_called_with(
size=self.new_volume.size,
snapshot_id=None,
name=self.new_volume.name,
description=None,
volume_type=None,
availability_zone=None,
metadata=None,
imageRef=None,
source_volid=None,
consistencygroup_id=None,
scheduler_hints=None,
)
self.assertEqual(2, mock_error.call_count)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
self.volumes_mock.set_bootable.assert_called_with(
self.new_volume.id, True)
self.volumes_mock.update_readonly_flag.assert_called_with(
self.new_volume.id, True)
def test_volume_create_without_size(self):
arglist = [
self.new_volume.name,
]
verifylist = [
('name', self.new_volume.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.assertRaises(exceptions.CommandError, self.cmd.take_action,
parsed_args)
def test_volume_create_with_multi_source(self):
arglist = [
'--image', 'source_image',
'--source', 'source_volume',
'--snapshot', 'source_snapshot',
'--size', str(self.new_volume.size),
self.new_volume.name,
]
verifylist = [
('image', 'source_image'),
('source', 'source_volume'),
('snapshot', 'source_snapshot'),
('size', self.new_volume.size),
('name', self.new_volume.name),
]
self.assertRaises(tests_utils.ParserException, self.check_parser,
self.cmd, arglist, verifylist)
class TestVolumeDelete(TestVolume):
def setUp(self):
super(TestVolumeDelete, self).setUp()
self.volumes_mock.delete.return_value = None
# Get the command object to mock
self.cmd = volume.DeleteVolume(self.app, None)
def test_volume_delete_one_volume(self):
volumes = self.setup_volumes_mock(count=1)
arglist = [
volumes[0].id
]
verifylist = [
("force", False),
("purge", False),
("volumes", [volumes[0].id]),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.volumes_mock.delete.assert_called_once_with(
volumes[0].id, cascade=False)
self.assertIsNone(result)
def test_volume_delete_multi_volumes(self):
volumes = self.setup_volumes_mock(count=3)
arglist = [v.id for v in volumes]
verifylist = [
('force', False),
('purge', False),
('volumes', arglist),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
calls = [call(v.id, cascade=False) for v in volumes]
self.volumes_mock.delete.assert_has_calls(calls)
self.assertIsNone(result)
def test_volume_delete_multi_volumes_with_exception(self):
volumes = self.setup_volumes_mock(count=2)
arglist = [
volumes[0].id,
'unexist_volume',
]
verifylist = [
('force', False),
('purge', False),
('volumes', arglist),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
find_mock_result = [volumes[0], exceptions.CommandError]
with mock.patch.object(utils, 'find_resource',
side_effect=find_mock_result) as find_mock:
try:
self.cmd.take_action(parsed_args)
self.fail('CommandError should be raised.')
except exceptions.CommandError as e:
self.assertEqual('1 of 2 volumes failed to delete.',
str(e))
find_mock.assert_any_call(self.volumes_mock, volumes[0].id)
find_mock.assert_any_call(self.volumes_mock, 'unexist_volume')
self.assertEqual(2, find_mock.call_count)
self.volumes_mock.delete.assert_called_once_with(
volumes[0].id, cascade=False)
def test_volume_delete_with_purge(self):
volumes = self.setup_volumes_mock(count=1)
arglist = [
'--purge',
volumes[0].id,
]
verifylist = [
('force', False),
('purge', True),
('volumes', [volumes[0].id]),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.volumes_mock.delete.assert_called_once_with(
volumes[0].id, cascade=True)
self.assertIsNone(result)
def test_volume_delete_with_force(self):
volumes = self.setup_volumes_mock(count=1)
arglist = [
'--force',
volumes[0].id,
]
verifylist = [
('force', True),
('purge', False),
('volumes', [volumes[0].id]),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.volumes_mock.force_delete.assert_called_once_with(volumes[0].id)
self.assertIsNone(result)
class TestVolumeList(TestVolume):
project = identity_fakes.FakeProject.create_one_project()
user = identity_fakes.FakeUser.create_one_user()
columns = [
'ID',
'Name',
'Status',
'Size',
'Attached to',
]
def setUp(self):
super(TestVolumeList, self).setUp()
self.mock_volume = volume_fakes.FakeVolume.create_one_volume()
self.volumes_mock.list.return_value = [self.mock_volume]
self.users_mock.get.return_value = self.user
self.projects_mock.get.return_value = self.project
# Get the command object to test
self.cmd = volume.ListVolume(self.app, None)
def test_volume_list_no_options(self):
arglist = []
verifylist = [
('long', False),
('all_projects', False),
('name', None),
('status', None),
('marker', None),
('limit', None),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
search_opts = {
'all_tenants': False,
'project_id': None,
'user_id': None,
'name': None,
'status': None,
}
self.volumes_mock.list.assert_called_once_with(
search_opts=search_opts,
marker=None,
limit=None,
)
self.assertEqual(self.columns, columns)
server = self.mock_volume.attachments[0]['server_id']
device = self.mock_volume.attachments[0]['device']
msg = 'Attached to %s on %s ' % (server, device)
datalist = ((
self.mock_volume.id,
self.mock_volume.name,
self.mock_volume.status,
self.mock_volume.size,
msg,
), )
self.assertEqual(datalist, tuple(data))
def test_volume_list_project(self):
arglist = [
'--project', self.project.name,
]
verifylist = [
('project', self.project.name),
('long', False),
('all_projects', False),
('status', None),
('marker', None),
('limit', None),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
search_opts = {
'all_tenants': True,
'project_id': self.project.id,
'user_id': None,
'name': None,
'status': None,
}
self.volumes_mock.list.assert_called_once_with(
search_opts=search_opts,
marker=None,
limit=None,
)
self.assertEqual(self.columns, columns)
server = self.mock_volume.attachments[0]['server_id']
device = self.mock_volume.attachments[0]['device']
msg = 'Attached to %s on %s ' % (server, device)
datalist = ((
self.mock_volume.id,
self.mock_volume.name,
self.mock_volume.status,
self.mock_volume.size,
msg,
), )
self.assertEqual(datalist, tuple(data))
def test_volume_list_project_domain(self):
arglist = [
'--project', self.project.name,
'--project-domain', self.project.domain_id,
]
verifylist = [
('project', self.project.name),
('project_domain', self.project.domain_id),
('long', False),
('all_projects', False),
('status', None),
('marker', None),
('limit', None),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
search_opts = {
'all_tenants': True,
'project_id': self.project.id,
'user_id': None,
'name': None,
'status': None,
}
self.volumes_mock.list.assert_called_once_with(
search_opts=search_opts,
marker=None,
limit=None,
)
self.assertEqual(self.columns, columns)
server = self.mock_volume.attachments[0]['server_id']
device = self.mock_volume.attachments[0]['device']
msg = 'Attached to %s on %s ' % (server, device)
datalist = ((
self.mock_volume.id,
self.mock_volume.name,
self.mock_volume.status,
self.mock_volume.size,
msg,
), )
self.assertEqual(datalist, tuple(data))
def test_volume_list_user(self):
arglist = [
'--user', self.user.name,
]
verifylist = [
('user', self.user.name),
('long', False),
('all_projects', False),
('status', None),
('marker', None),
('limit', None),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
search_opts = {
'all_tenants': False,
'project_id': None,
'user_id': self.user.id,
'name': None,
'status': None,
}
self.volumes_mock.list.assert_called_once_with(
search_opts=search_opts,
marker=None,
limit=None,
)
self.assertEqual(self.columns, columns)
server = self.mock_volume.attachments[0]['server_id']
device = self.mock_volume.attachments[0]['device']
msg = 'Attached to %s on %s ' % (server, device)
datalist = ((
self.mock_volume.id,
self.mock_volume.name,
self.mock_volume.status,
self.mock_volume.size,
msg,
), )
self.assertEqual(datalist, tuple(data))
def test_volume_list_user_domain(self):
arglist = [
'--user', self.user.name,
'--user-domain', self.user.domain_id,
]
verifylist = [
('user', self.user.name),
('user_domain', self.user.domain_id),
('long', False),
('all_projects', False),
('status', None),
('marker', None),
('limit', None),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
search_opts = {
'all_tenants': False,
'project_id': None,
'user_id': self.user.id,
'name': None,
'status': None,
}
self.volumes_mock.list.assert_called_once_with(
search_opts=search_opts,
marker=None,
limit=None,
)
self.assertEqual(self.columns, columns)
server = self.mock_volume.attachments[0]['server_id']
device = self.mock_volume.attachments[0]['device']
msg = 'Attached to %s on %s ' % (server, device)
datalist = ((
self.mock_volume.id,
self.mock_volume.name,
self.mock_volume.status,
self.mock_volume.size,
msg,
), )
self.assertEqual(datalist, tuple(data))
def test_volume_list_name(self):
arglist = [
'--name', self.mock_volume.name,
]
verifylist = [
('long', False),
('all_projects', False),
('name', self.mock_volume.name),
('status', None),
('marker', None),
('limit', None),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
search_opts = {
'all_tenants': False,
'project_id': None,
'user_id': None,
'name': self.mock_volume.name,
'status': None,
}
self.volumes_mock.list.assert_called_once_with(
search_opts=search_opts,
marker=None,
limit=None,
)
self.assertEqual(self.columns, columns)
server = self.mock_volume.attachments[0]['server_id']
device = self.mock_volume.attachments[0]['device']
msg = 'Attached to %s on %s ' % (server, device)
datalist = ((
self.mock_volume.id,
self.mock_volume.name,
self.mock_volume.status,
self.mock_volume.size,
msg,
), )
self.assertEqual(datalist, tuple(data))
def test_volume_list_status(self):
arglist = [
'--status', self.mock_volume.status,
]
verifylist = [
('long', False),
('all_projects', False),
('name', None),
('status', self.mock_volume.status),
('marker', None),
('limit', None),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
search_opts = {
'all_tenants': False,
'project_id': None,
'user_id': None,
'name': None,
'status': self.mock_volume.status,
}
self.volumes_mock.list.assert_called_once_with(
search_opts=search_opts,
marker=None,
limit=None,
)
self.assertEqual(self.columns, columns)
server = self.mock_volume.attachments[0]['server_id']
device = self.mock_volume.attachments[0]['device']
msg = 'Attached to %s on %s ' % (server, device)
datalist = ((
self.mock_volume.id,
self.mock_volume.name,
self.mock_volume.status,
self.mock_volume.size,
msg,
), )
self.assertEqual(datalist, tuple(data))
def test_volume_list_all_projects(self):
arglist = [
'--all-projects',
]
verifylist = [
('long', False),
('all_projects', True),
('name', None),
('status', None),
('marker', None),
('limit', None),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
search_opts = {
'all_tenants': True,
'project_id': None,
'user_id': None,
'name': None,
'status': None,
}
self.volumes_mock.list.assert_called_once_with(
search_opts=search_opts,
marker=None,
limit=None,
)
self.assertEqual(self.columns, columns)
server = self.mock_volume.attachments[0]['server_id']
device = self.mock_volume.attachments[0]['device']
msg = 'Attached to %s on %s ' % (server, device)
datalist = ((
self.mock_volume.id,
self.mock_volume.name,
self.mock_volume.status,
self.mock_volume.size,
msg,
), )
self.assertEqual(datalist, tuple(data))
def test_volume_list_long(self):
arglist = [
'--long',
]
verifylist = [
('long', True),
('all_projects', False),
('name', None),
('status', None),
('marker', None),
('limit', None),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
search_opts = {
'all_tenants': False,
'project_id': None,
'user_id': None,
'name': None,
'status': None,
}
self.volumes_mock.list.assert_called_once_with(
search_opts=search_opts,
marker=None,
limit=None,
)
collist = [
'ID',
'Name',
'Status',
'Size',
'Type',
'Bootable',
'Attached to',
'Properties',
]
self.assertEqual(collist, columns)
server = self.mock_volume.attachments[0]['server_id']
device = self.mock_volume.attachments[0]['device']
msg = 'Attached to %s on %s ' % (server, device)
datalist = ((
self.mock_volume.id,
self.mock_volume.name,
self.mock_volume.status,
self.mock_volume.size,
self.mock_volume.volume_type,
self.mock_volume.bootable,
msg,
utils.format_dict(self.mock_volume.metadata),
), )
self.assertEqual(datalist, tuple(data))
def test_volume_list_with_marker_and_limit(self):
arglist = [
"--marker", self.mock_volume.id,
"--limit", "2",
]
verifylist = [
('long', False),
('all_projects', False),
('name', None),
('status', None),
('marker', self.mock_volume.id),
('limit', 2),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.assertEqual(self.columns, columns)
server = self.mock_volume.attachments[0]['server_id']
device = self.mock_volume.attachments[0]['device']
msg = 'Attached to %s on %s ' % (server, device)
datalist = ((
self.mock_volume.id,
self.mock_volume.name,
self.mock_volume.status,
self.mock_volume.size,
msg,
), )
self.volumes_mock.list.assert_called_once_with(
marker=self.mock_volume.id,
limit=2,
search_opts={
'status': None,
'project_id': None,
'user_id': None,
'name': None,
'all_tenants': False, }
)
self.assertEqual(datalist, tuple(data))
def test_volume_list_negative_limit(self):
arglist = [
"--limit", "-2",
]
verifylist = [
("limit", -2),
]
self.assertRaises(argparse.ArgumentTypeError, self.check_parser,
self.cmd, arglist, verifylist)
def test_volume_list_backward_compatibility(self):
arglist = [
'-c', 'Display Name',
]
verifylist = [
('columns', ['Display Name']),
('long', False),
('all_projects', False),
('name', None),
('status', None),
('marker', None),
('limit', None),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
search_opts = {
'all_tenants': False,
'project_id': None,
'user_id': None,
'name': None,
'status': None,
}
self.volumes_mock.list.assert_called_once_with(
search_opts=search_opts,
marker=None,
limit=None,
)
self.assertIn('Display Name', columns)
self.assertNotIn('Name', columns)
for each_volume in data:
self.assertIn(self.mock_volume.name, each_volume)
class TestVolumeMigrate(TestVolume):
_volume = volume_fakes.FakeVolume.create_one_volume()
def setUp(self):
super(TestVolumeMigrate, self).setUp()
self.volumes_mock.get.return_value = self._volume
self.volumes_mock.migrate_volume.return_value = None
# Get the command object to test
self.cmd = volume.MigrateVolume(self.app, None)
def test_volume_migrate(self):
arglist = [
"--host", "host@backend-name#pool",
self._volume.id,
]
verifylist = [
("force_host_copy", False),
("lock_volume", False),
("host", "host@backend-name#pool"),
("volume", self._volume.id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.volumes_mock.get.assert_called_once_with(self._volume.id)
self.volumes_mock.migrate_volume.assert_called_once_with(
self._volume.id, "host@backend-name#pool", False, False)
self.assertIsNone(result)
def test_volume_migrate_with_option(self):
arglist = [
"--force-host-copy",
"--lock-volume",
"--host", "host@backend-name#pool",
self._volume.id,
]
verifylist = [
("force_host_copy", True),
("lock_volume", True),
("host", "host@backend-name#pool"),
("volume", self._volume.id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.volumes_mock.get.assert_called_once_with(self._volume.id)
self.volumes_mock.migrate_volume.assert_called_once_with(
self._volume.id, "host@backend-name#pool", True, True)
self.assertIsNone(result)
def test_volume_migrate_without_host(self):
arglist = [
self._volume.id,
]
verifylist = [
("force_host_copy", False),
("lock_volume", False),
("volume", self._volume.id),
]
self.assertRaises(tests_utils.ParserException, self.check_parser,
self.cmd, arglist, verifylist)
class TestVolumeSet(TestVolume):
volume_type = volume_fakes.FakeType.create_one_type()
def setUp(self):
super(TestVolumeSet, self).setUp()
self.new_volume = volume_fakes.FakeVolume.create_one_volume()
self.volumes_mock.get.return_value = self.new_volume
self.types_mock.get.return_value = self.volume_type
# Get the command object to test
self.cmd = volume.SetVolume(self.app, None)
def test_volume_set_property(self):
arglist = [
'--property', 'a=b',
'--property', 'c=d',
self.new_volume.id,
]
verifylist = [
('property', {'a': 'b', 'c': 'd'}),
('volume', self.new_volume.id),
('bootable', False),
('non_bootable', False)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.volumes_mock.set_metadata.assert_called_with(
self.new_volume.id, parsed_args.property)
def test_volume_set_image_property(self):
arglist = [
'--image-property', 'Alpha=a',
'--image-property', 'Beta=b',
self.new_volume.id,
]
verifylist = [
('image_property', {'Alpha': 'a', 'Beta': 'b'}),
('volume', self.new_volume.id),
('bootable', False),
('non_bootable', False)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns nothing
self.cmd.take_action(parsed_args)
self.volumes_mock.set_image_metadata.assert_called_with(
self.new_volume.id, parsed_args.image_property)
def test_volume_set_state(self):
arglist = [
'--state', 'error',
self.new_volume.id
]
verifylist = [
('read_only', False),
('read_write', False),
('state', 'error'),
('volume', self.new_volume.id)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.volumes_mock.reset_state.assert_called_with(
self.new_volume.id, 'error')
self.volumes_mock.update_readonly_flag.assert_not_called()
self.assertIsNone(result)
def test_volume_set_state_failed(self):
self.volumes_mock.reset_state.side_effect = exceptions.CommandError()
arglist = [
'--state', 'error',
self.new_volume.id
]
verifylist = [
('state', 'error'),
('volume', self.new_volume.id)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
try:
self.cmd.take_action(parsed_args)
self.fail('CommandError should be raised.')
except exceptions.CommandError as e:
self.assertEqual('One or more of the set operations failed',
str(e))
self.volumes_mock.reset_state.assert_called_with(
self.new_volume.id, 'error')
def test_volume_set_bootable(self):
arglist = [
['--bootable', self.new_volume.id],
['--non-bootable', self.new_volume.id]
]
verifylist = [
[
('bootable', True),
('non_bootable', False),
('volume', self.new_volume.id)
],
[
('bootable', False),
('non_bootable', True),
('volume', self.new_volume.id)
]
]
for index in range(len(arglist)):
parsed_args = self.check_parser(
self.cmd, arglist[index], verifylist[index])
self.cmd.take_action(parsed_args)
self.volumes_mock.set_bootable.assert_called_with(
self.new_volume.id, verifylist[index][0][1])
def test_volume_set_readonly(self):
arglist = [
'--read-only',
self.new_volume.id
]
verifylist = [
('read_only', True),
('read_write', False),
('volume', self.new_volume.id)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.volumes_mock.update_readonly_flag.assert_called_once_with(
self.new_volume.id,
True)
self.assertIsNone(result)
def test_volume_set_read_write(self):
arglist = [
'--read-write',
self.new_volume.id
]
verifylist = [
('read_only', False),
('read_write', True),
('volume', self.new_volume.id)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.volumes_mock.update_readonly_flag.assert_called_once_with(
self.new_volume.id,
False)
self.assertIsNone(result)
def test_volume_set_type(self):
arglist = [
'--type', self.volume_type.id,
self.new_volume.id
]
verifylist = [
('retype_policy', None),
('type', self.volume_type.id),
('volume', self.new_volume.id)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.volumes_mock.retype.assert_called_once_with(
self.new_volume.id,
self.volume_type.id,
'never')
self.assertIsNone(result)
def test_volume_set_type_with_policy(self):
arglist = [
'--retype-policy', 'on-demand',
'--type', self.volume_type.id,
self.new_volume.id
]
verifylist = [
('retype_policy', 'on-demand'),
('type', self.volume_type.id),
('volume', self.new_volume.id)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.volumes_mock.retype.assert_called_once_with(
self.new_volume.id,
self.volume_type.id,
'on-demand')
self.assertIsNone(result)
@mock.patch.object(volume.LOG, 'warning')
def test_volume_set_with_only_retype_policy(self, mock_warning):
arglist = [
'--retype-policy', 'on-demand',
self.new_volume.id
]
verifylist = [
('retype_policy', 'on-demand'),
('volume', self.new_volume.id)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.volumes_mock.retype.assert_not_called()
mock_warning.assert_called_with("'--retype-policy' option will "
"not work without '--type' option")
self.assertIsNone(result)
class TestVolumeShow(TestVolume):
def setUp(self):
super(TestVolumeShow, self).setUp()
self._volume = volume_fakes.FakeVolume.create_one_volume()
self.volumes_mock.get.return_value = self._volume
# Get the command object to test
self.cmd = volume.ShowVolume(self.app, None)
def test_volume_show(self):
arglist = [
self._volume.id
]
verifylist = [
("volume", self._volume.id)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.volumes_mock.get.assert_called_with(self._volume.id)
self.assertEqual(
volume_fakes.FakeVolume.get_volume_columns(self._volume),
columns)
self.assertEqual(
volume_fakes.FakeVolume.get_volume_data(self._volume),
data)
class TestVolumeUnset(TestVolume):
def setUp(self):
super(TestVolumeUnset, self).setUp()
self.new_volume = volume_fakes.FakeVolume.create_one_volume()
self.volumes_mock.get.return_value = self.new_volume
# Get the command object to set property
self.cmd_set = volume.SetVolume(self.app, None)
# Get the command object to unset property
self.cmd_unset = volume.UnsetVolume(self.app, None)
def test_volume_unset_image_property(self):
# Arguments for setting image properties
arglist = [
'--image-property', 'Alpha=a',
'--image-property', 'Beta=b',
self.new_volume.id,
]
verifylist = [
('image_property', {'Alpha': 'a', 'Beta': 'b'}),
('volume', self.new_volume.id),
]
parsed_args = self.check_parser(self.cmd_set, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns nothing
self.cmd_set.take_action(parsed_args)
# Arguments for unsetting image properties
arglist_unset = [
'--image-property', 'Alpha',
self.new_volume.id,
]
verifylist_unset = [
('image_property', ['Alpha']),
('volume', self.new_volume.id),
]
parsed_args_unset = self.check_parser(self.cmd_unset,
arglist_unset,
verifylist_unset)
# In base command class ShowOne in cliff, abstract method take_action()
# returns nothing
self.cmd_unset.take_action(parsed_args_unset)
self.volumes_mock.delete_image_metadata.assert_called_with(
self.new_volume.id, parsed_args_unset.image_property)
def test_volume_unset_image_property_fail(self):
self.volumes_mock.delete_image_metadata.side_effect = (
exceptions.CommandError())
arglist = [
'--image-property', 'Alpha',
'--property', 'Beta',
self.new_volume.id,
]
verifylist = [
('image_property', ['Alpha']),
('property', ['Beta']),
('volume', self.new_volume.id),
]
parsed_args = self.check_parser(
self.cmd_unset, arglist, verifylist)
try:
self.cmd_unset.take_action(parsed_args)
self.fail('CommandError should be raised.')
except exceptions.CommandError as e:
self.assertEqual('One or more of the unset operations failed',
str(e))
self.volumes_mock.delete_image_metadata.assert_called_with(
self.new_volume.id, parsed_args.image_property)
self.volumes_mock.delete_metadata.assert_called_with(
self.new_volume.id, parsed_args.property)
| 32.172745
| 79
| 0.564869
|
b25095282f3612ca9bdf56971b8731ba9c4b65f1
| 2,743
|
py
|
Python
|
Scanner/SensorLogger.py
|
AntonSh/PiProjects
|
e6971bfbc53c35df5c444de7753e4675fa593902
|
[
"MIT"
] | 1
|
2015-03-16T04:19:02.000Z
|
2015-03-16T04:19:02.000Z
|
Scanner/SensorLogger.py
|
AntonSh/RaspberryFridge
|
e6971bfbc53c35df5c444de7753e4675fa593902
|
[
"MIT"
] | null | null | null |
Scanner/SensorLogger.py
|
AntonSh/RaspberryFridge
|
e6971bfbc53c35df5c444de7753e4675fa593902
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import subprocess
import time
import sys
import signal
from datetime import datetime, date
import argparse
import sqlite3 as lite
import os
import os.path
import GPIOControl
import SensorReader
import Utils
# Constants
logFile = "../logs/control.log"
databaseFile = "../Data/sensor_stream.db"
# Gracefull shutdown
def signal_handler(signal, frame):
print('Caught signal, exiting')
sys.exit(0)
def ps(pid):
return subprocess.check_output(["/bin/ps", "hc", pid])
def handle_locked_gpio():
try:
print "Handling exec error"
lockFile = "/var/run/pigpio.pid"
if not os.path.isfile(lockFile):
return
pid = None
with open(lockFile) as f:
pid = f.readline().rstrip()
if pid == None :
return
print "GPIO was locked by {}".format(pid)
if os.path.exists("/proc/{}".format(pid)) :
print "GPIO is legitimely locked by {}".format(ps(pid))
return
print "Process {} is not running, removing lock file".format(pid)
os.remove("/var/run/pigpio.pid")
except:
type, value, tb = sys.exc_info()
print ("Failed to handle exec error", type, value.cmd, value.output)
parser = argparse.ArgumentParser(description='Polls environment sensors and stores data in Sqlite database')
parser.add_argument('-pins', type=int, nargs='+', help='List of GPIO port numbers on which sensors connected', required = True)
parser.add_argument('-refreshSec', type=int, help='Delays between sensor polling, sec', default=10)
args = parser.parse_known_args()[0]
sensorPins = args.pins
refreshSec = args.refreshSec
print "Running sensor logging on pins {} refresh delay {} sec.".format(sensorPins, refreshSec)
SensorReader.setup(sensorPins)
signal.signal(signal.SIGINT, signal_handler)
while True:
try:
con = lite.connect(databaseFile)
value = SensorReader.getReadings()
timestamp = int(time.time())
cur = con.cursor()
for data in value:
row = (timestamp, data['pin'], data['temp'], data['humidity'])
print row
cur.execute("insert into sensor_log values(?,?,?,?)", row)
con.commit()
except lite.Error, e:
if con:
con.rollback()
print "Error %s:" % e.args[0]
except:
type, value, tb = sys.exc_info()
if isinstance(value, subprocess.CalledProcessError):
if "readtemp" in value.cmd[0] and value.returncode == 1:
handle_locked_gpio()
continue
print (type, value, tb)
finally:
if con:
con.close()
time.sleep(refreshSec)
| 25.877358
| 127
| 0.620489
|
f401cb1b5a1819e93bb565dd8a30eb8b49f2d188
| 2,930
|
py
|
Python
|
gbe/scheduling/views/delete_event_view.py
|
bethlakshmi/gbe-divio-djangocms-python2.7
|
6e9b2c894162524bbbaaf73dcbe927988707231d
|
[
"Apache-2.0"
] | 1
|
2021-03-14T11:56:47.000Z
|
2021-03-14T11:56:47.000Z
|
gbe/scheduling/views/delete_event_view.py
|
bethlakshmi/gbe-divio-djangocms-python2.7
|
6e9b2c894162524bbbaaf73dcbe927988707231d
|
[
"Apache-2.0"
] | 180
|
2019-09-15T19:52:46.000Z
|
2021-11-06T23:48:01.000Z
|
gbe/scheduling/views/delete_event_view.py
|
bethlakshmi/gbe-divio-djangocms-python2.7
|
6e9b2c894162524bbbaaf73dcbe927988707231d
|
[
"Apache-2.0"
] | null | null | null |
from django.views.generic import View
from django.views.decorators.cache import never_cache
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.http import HttpResponseRedirect
from django.urls import reverse
from scheduler.idd import (
get_occurrence,
get_occurrences,
delete_occurrence,
)
from gbe.scheduling.views.functions import show_general_status
from gbe.models import UserMessage
from gbe.functions import validate_perms
from settings import GBE_DATETIME_FORMAT
class DeleteEventView(View):
permissions = ('Scheduling Mavens',)
def groundwork(self, request, args, kwargs):
self.profile = validate_perms(request, self.permissions)
if request.GET.get('next', None):
self.redirect_to = request.GET['next']
else:
self.redirect_to = reverse('manage_event_list',
urlconf='gbe.scheduling.urls')
if "occurrence_id" in kwargs:
result = get_occurrence(int(kwargs['occurrence_id']))
if result.errors and len(result.errors) > 0:
show_general_status(
request,
result,
self.__class__.__name__)
return HttpResponseRedirect(self.redirect_to)
else:
self.occurrence = result.occurrence
@never_cache
@method_decorator(login_required)
def get(self, request, *args, **kwargs):
error_url = self.groundwork(request, args, kwargs)
if error_url:
return error_url
title = str(self.occurrence)
start_time = self.occurrence.start_time
gbe_event = self.occurrence.eventitem
result = delete_occurrence(self.occurrence.pk)
show_general_status(request, result, self.__class__.__name__)
if len(result.errors) == 0:
result = get_occurrences(
foreign_event_ids=[self.occurrence.foreign_event_id])
if len(result.occurrences) == 0:
gbe_event.visible = False
gbe_event.save()
user_message = UserMessage.objects.get_or_create(
view=self.__class__.__name__,
code="DELETE_SUCCESS",
defaults={
'summary': "Occurrence Deletion Completed",
'description': "This event has been deleted."})
messages.success(
request,
'%s<br>Title: %s,<br> Start Time: %s' % (
user_message[0].description,
title,
start_time.strftime(
GBE_DATETIME_FORMAT)))
return HttpResponseRedirect(self.redirect_to)
def dispatch(self, *args, **kwargs):
return super(DeleteEventView, self).dispatch(*args, **kwargs)
| 37.564103
| 69
| 0.621502
|
a6755b0dc090f1669a31529aaad139d4076a1efc
| 68
|
py
|
Python
|
src/lib/post_processors/face_recognition/dlib/defaults.py
|
l-maia/viseron
|
d762be93db74f780db13ac332bf8673c41592aa9
|
[
"MIT"
] | null | null | null |
src/lib/post_processors/face_recognition/dlib/defaults.py
|
l-maia/viseron
|
d762be93db74f780db13ac332bf8673c41592aa9
|
[
"MIT"
] | null | null | null |
src/lib/post_processors/face_recognition/dlib/defaults.py
|
l-maia/viseron
|
d762be93db74f780db13ac332bf8673c41592aa9
|
[
"MIT"
] | null | null | null |
FACE_RECOGNITION_PATH = "/config/face_recognition"
EXPIRE_AFTER = 5
| 22.666667
| 50
| 0.823529
|
9330bc7f76f3b61047d412a825b55aff83a7d939
| 5,864
|
py
|
Python
|
nextdl/extractor/sky.py
|
devenu85/nextdl
|
0b458f556e2e0be80cb94bd9a9b1405ad2e9182d
|
[
"MIT"
] | 1
|
2021-12-19T13:55:20.000Z
|
2021-12-19T13:55:20.000Z
|
nextdl/extractor/sky.py
|
devenu85/nextdl
|
0b458f556e2e0be80cb94bd9a9b1405ad2e9182d
|
[
"MIT"
] | null | null | null |
nextdl/extractor/sky.py
|
devenu85/nextdl
|
0b458f556e2e0be80cb94bd9a9b1405ad2e9182d
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from __future__ import unicode_literals
import re
from ..utils import extract_attributes, smuggle_url, strip_or_none, urljoin
from .common import InfoExtractor
class SkyBaseIE(InfoExtractor):
BRIGHTCOVE_URL_TEMPLATE = (
"http://players.brightcove.net/%s/%s_default/index.html?videoId=%s"
)
_SDC_EL_REGEX = (
r'(?s)(<div[^>]+data-(?:component-name|fn)="sdc-(?:articl|sit)e-video"[^>]*>)'
)
def _process_ooyala_element(self, webpage, sdc_el, url):
sdc = extract_attributes(sdc_el)
provider = sdc.get("data-provider")
if provider == "ooyala":
video_id = sdc["data-sdc-video-id"]
video_url = "ooyala:%s" % video_id
ie_key = "Ooyala"
ooyala_el = self._search_regex(
r'(<div[^>]+class="[^"]*\bsdc-article-video__media-ooyala\b[^"]*"[^>]+data-video-id="%s"[^>]*>)'
% video_id,
webpage,
"video data",
fatal=False,
)
if ooyala_el:
ooyala_attrs = extract_attributes(ooyala_el) or {}
if ooyala_attrs.get("data-token-required") == "true":
token_fetch_url = (
self._parse_json(
ooyala_attrs.get("data-token-fetch-options", "{}"),
video_id,
fatal=False,
)
or {}
).get("url")
if token_fetch_url:
embed_token = self._download_json(
urljoin(url, token_fetch_url), video_id, fatal=False
)
if embed_token:
video_url = smuggle_url(
video_url, {"embed_token": embed_token}
)
elif provider == "brightcove":
video_id = sdc["data-video-id"]
account_id = sdc.get("data-account-id") or "6058004172001"
player_id = sdc.get("data-player-id") or "RC9PQUaJ6"
video_url = self.BRIGHTCOVE_URL_TEMPLATE % (account_id, player_id, video_id)
ie_key = "BrightcoveNew"
return {
"_type": "url_transparent",
"id": video_id,
"url": video_url,
"ie_key": ie_key,
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
info = self._process_ooyala_element(
webpage, self._search_regex(self._SDC_EL_REGEX, webpage, "sdc element"), url
)
info.update(
{
"title": self._og_search_title(webpage),
"description": strip_or_none(self._og_search_description(webpage)),
}
)
return info
class SkySportsIE(SkyBaseIE):
IE_NAME = "sky:sports"
_VALID_URL = (
r"https?://(?:www\.)?skysports\.com/watch/video/([^/]+/)*(?P<id>[0-9]+)"
)
_TESTS = [
{
"url": "http://www.skysports.com/watch/video/10328419/bale-its-our-time-to-shine",
"md5": "77d59166cddc8d3cb7b13e35eaf0f5ec",
"info_dict": {
"id": "o3eWJnNDE6l7kfNO8BOoBlRxXRQ4ANNQ",
"ext": "mp4",
"title": "Bale: It's our time to shine",
"description": "md5:e88bda94ae15f7720c5cb467e777bb6d",
},
"add_ie": ["Ooyala"],
},
{
"url": "https://www.skysports.com/watch/video/sports/f1/12160544/abu-dhabi-gp-the-notebook",
"only_matching": True,
},
{
"url": "https://www.skysports.com/watch/video/tv-shows/12118508/rainford-brent-how-ace-programme-helps",
"only_matching": True,
},
]
class SkyNewsIE(SkyBaseIE):
IE_NAME = "sky:news"
_VALID_URL = r"https?://news\.sky\.com/video/[0-9a-z-]+-(?P<id>[0-9]+)"
_TEST = {
"url": "https://news.sky.com/video/russian-plane-inspected-after-deadly-fire-11712962",
"md5": "411e8893fd216c75eaf7e4c65d364115",
"info_dict": {
"id": "ref:1ua21xaDE6lCtZDmbYfl8kwsKLooJbNM",
"ext": "mp4",
"title": "Russian plane inspected after deadly fire",
"description": "The Russian Investigative Committee has released video of the wreckage of a passenger plane which caught fire near Moscow.",
"uploader_id": "6058004172001",
"timestamp": 1567112345,
"upload_date": "20190829",
},
"add_ie": ["BrightcoveNew"],
}
class SkySportsNewsIE(SkyBaseIE):
IE_NAME = "sky:sports:news"
_VALID_URL = r"https?://(?:www\.)?skysports\.com/([^/]+/)*news/\d+/(?P<id>\d+)"
_TEST = {
"url": "http://www.skysports.com/golf/news/12176/10871916/dustin-johnson-ready-to-conquer-players-championship-at-tpc-sawgrass",
"info_dict": {
"id": "10871916",
"title": "Dustin Johnson ready to conquer Players Championship at TPC Sawgrass",
"description": "Dustin Johnson is confident he can continue his dominant form in 2017 by adding the Players Championship to his list of victories.",
},
"playlist_count": 2,
}
def _real_extract(self, url):
article_id = self._match_id(url)
webpage = self._download_webpage(url, article_id)
entries = []
for sdc_el in re.findall(self._SDC_EL_REGEX, webpage):
entries.append(self._process_ooyala_element(webpage, sdc_el, url))
return self.playlist_result(
entries,
article_id,
self._og_search_title(webpage),
self._html_search_meta(["og:description", "description"], webpage),
)
| 38.077922
| 160
| 0.543997
|
977b250fa6c7c91cbd3ec25e5f6396bd7f6616c1
| 3,060
|
py
|
Python
|
forcePlates/MayaIntegration/plugin/maya_interaction.py
|
MontyThibault/centre-of-mass-awareness
|
58778f148e65749e1dfc443043e9fc054ca3ff4d
|
[
"MIT"
] | null | null | null |
forcePlates/MayaIntegration/plugin/maya_interaction.py
|
MontyThibault/centre-of-mass-awareness
|
58778f148e65749e1dfc443043e9fc054ca3ff4d
|
[
"MIT"
] | null | null | null |
forcePlates/MayaIntegration/plugin/maya_interaction.py
|
MontyThibault/centre-of-mass-awareness
|
58778f148e65749e1dfc443043e9fc054ca3ff4d
|
[
"MIT"
] | null | null | null |
## Centre of Pressure Uncertainty for Virtual Character Control
## McGill Computer Graphics Lab
##
## Released under the MIT license. This code is free to be modified
## and distributed.
##
## Author: Monty Thibault, montythibault@gmail.com
## Last Updated: Sep 02, 2016
## ------------------------------------------------------------------------
"""
This module is the bridge between the calibration program thread, the force plates,
and Maya.
"""
import maya_utils as mu
import maya_socket_connection as msc
# Out stream for the sampling status
PRINT_IN_MAYA_TERMINAL = True
def create_sampling_locator():
msc.call_func(mu.createLocatorTransformPair, 'sampling_marker')
def _move_sampling_locator_maya(currently_sampling, current_point):
"""
Move the Maya sampling marker to the current sampling location on
state change.
"""
p = current_point
msc.call_func(mu.moveObject, [p[0], 0, p[1]], 'sampling_marker')
def _print_sampling_status(currently_sampling, current_point):
"""
This function adds a print-out of the coordinates of the current sampling
points. It is generic in that it can be called within the Maya Python environment
or within current Python environment.
"""
p = current_point
# Terminal printout
if currently_sampling:
print "Sampling started at %s" % str(p)
elif not currently_sampling:
print "Sampling stopped. Next point is %s" % str(p)
def _update_forces(forces_after_calibration):
"""
This is called on when the LabPro recieved a new measurement; we want to update
Maya's marker sizes, the interpolation of the center marker, etc.
"""
msc.call_func(mu.move_markers, forces_after_calibration)
def _create_current_point_obj(kpt):
"""
The way the grid class works is by replacing grid.currentPoint with immutable
tuples, but the grid object itself cannot be pickled to be sent into Maya.
Here we define a list object that is updated automatically with the values of the
tuples, so that we can simply send the list instead of the grid object.
"""
l = [0, 0]
update_callable = lambda _: _update_current_point_obj(l, kpt.generator.grid)
kpt._currently_sampling.add_listener(update_callable)
return l
def _update_current_point_obj(l, grid):
p = grid.currentPoint
l[0] = p[0]
l[1] = p[1]
def bind_listeners(kpt, fpt):
"""
Bind all the above to the calibration program thread (kpt) through listeners on
the _currently_sampling attribute.
"""
l = _create_current_point_obj(kpt)
# Sampling locator relocation
move_locator_callable = lambda cs: _move_sampling_locator_maya(cs, l)
kpt._currently_sampling.add_listener(move_locator_callable)
# Sampling printouts
if PRINT_IN_MAYA_TERMINAL:
maya_callable = lambda cs: msc.call_func(_print_sampling_status, cs, l)
kpt._currently_sampling.add_listener(maya_callable)
else:
shell_callable = lambda cs: _print_sampling_status(cs, l)
kpt._currently_sampling.add_listener(shell_callable)
# Force updates
fpt.fp.forces_after_calibration.add_listener(_update_forces)
| 20.816327
| 84
| 0.742484
|
0a6a5d13324d6c663e9142d8a70d655328b1ea65
| 6,428
|
py
|
Python
|
sdk/python/kfp/components/for_loop_test.py
|
votti/pipelines
|
1c3e2768e6177d5d6e3f4b8eff8fafb9a3b76c1f
|
[
"Apache-2.0"
] | 1
|
2022-03-30T05:22:19.000Z
|
2022-03-30T05:22:19.000Z
|
sdk/python/kfp/components/for_loop_test.py
|
votti/pipelines
|
1c3e2768e6177d5d6e3f4b8eff8fafb9a3b76c1f
|
[
"Apache-2.0"
] | 1
|
2020-02-06T12:53:44.000Z
|
2020-02-06T12:53:44.000Z
|
sdk/python/kfp/components/for_loop_test.py
|
votti/pipelines
|
1c3e2768e6177d5d6e3f4b8eff8fafb9a3b76c1f
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 The Kubeflow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for kfpmponents.for_loop."""
import unittest
from absl.testing import parameterized
from kfp.components import pipeline_channel
from kfp.components import for_loop
class ForLoopTest(parameterized.TestCase):
@parameterized.parameters(
{
'collection_type': 'List[int]',
'item_type': 'int',
},
{
'collection_type': 'typing.List[str]',
'item_type': 'str',
},
{
'collection_type': 'typing.Tuple[ float ]',
'item_type': 'float',
},
{
'collection_type': 'typing.Sequence[Dict[str, str]]',
'item_type': 'Dict[str, str]',
},
{
'collection_type': 'List',
'item_type': None,
},
)
def test_get_loop_item_type(self, collection_type, item_type):
self.assertEqual(
for_loop._get_loop_item_type(collection_type), item_type)
@parameterized.parameters(
{
'dict_type': 'Dict[str, int]',
'value_type': 'int',
},
{
'dict_type': 'typing.Mapping[str,float]',
'value_type': 'float',
},
{
'dict_type': 'typing.Mapping[str, Dict[str, str] ]',
'value_type': 'Dict[str, str]',
},
{
'dict_type': 'dict',
'value_type': None,
},
)
def test_get_subvar_type(self, dict_type, value_type):
self.assertEqual(for_loop._get_subvar_type(dict_type), value_type)
@parameterized.parameters(
{
'channel':
pipeline_channel.PipelineParameterChannel(
name='param1',
channel_type='List[str]',
),
'expected_serialization_value':
'{{channel:task=;name=param1-loop-item;type=str;}}',
},
{
'channel':
pipeline_channel.PipelineParameterChannel(
name='output1',
channel_type='List[Dict[str, str]]',
task_name='task1',
),
'expected_serialization_value':
'{{channel:task=task1;name=output1-loop-item;type=Dict[str, str];}}',
},
)
def test_loop_argument_from_pipeline_channel(self, channel,
expected_serialization_value):
loop_argument = for_loop.LoopArgument.from_pipeline_channel(channel)
self.assertEqual(loop_argument.items_or_pipeline_channel, channel)
self.assertEqual(str(loop_argument), expected_serialization_value)
@parameterized.parameters(
{
'raw_items': ['a', 'b', 'c'],
'name_code':
'1',
'expected_serialization_value':
'{{channel:task=;name=loop-item-param-1;type=str;}}',
},
{
'raw_items': [
{
'A_a': 1
},
{
'A_a': 2
},
],
'name_code':
'2',
'expected_serialization_value':
'{{channel:task=;name=loop-item-param-2;type=dict;}}',
},
)
def test_loop_argument_from_raw_items(self, raw_items, name_code,
expected_serialization_value):
loop_argument = for_loop.LoopArgument.from_raw_items(
raw_items, name_code)
self.assertEqual(loop_argument.items_or_pipeline_channel, raw_items)
self.assertEqual(str(loop_argument), expected_serialization_value)
@parameterized.parameters(
{
'name': 'abc-loop-item',
'expected_result': True
},
{
'name': 'abc-loop-item-subvar-a',
'expected_result': True
},
{
'name': 'loop-item-param-1',
'expected_result': True
},
{
'name': 'loop-item-param-1-subvar-a',
'expected_result': True
},
{
'name': 'param1',
'expected_result': False
},
)
def test_name_is_loop_argument(self, name, expected_result):
self.assertEqual(
for_loop.LoopArgument.name_is_loop_argument(name), expected_result)
@parameterized.parameters(
{
'subvar_name': 'a',
'valid': True
},
{
'subvar_name': 'A_a',
'valid': True
},
{
'subvar_name': 'a0',
'valid': True
},
{
'subvar_name': 'a-b',
'valid': False
},
{
'subvar_name': '0',
'valid': False
},
{
'subvar_name': 'a#',
'valid': False
},
)
def test_create_loop_argument_varaible(self, subvar_name, valid):
loop_argument = for_loop.LoopArgument.from_pipeline_channel(
pipeline_channel.PipelineParameterChannel(
name='param1',
channel_type='List[Dict[str, str]]',
))
if valid:
loop_arg_var = for_loop.LoopArgumentVariable(
loop_argument=loop_argument,
subvar_name=subvar_name,
)
self.assertEqual(loop_arg_var.loop_argument, loop_argument)
self.assertEqual(loop_arg_var.subvar_name, subvar_name)
else:
with self.assertRaisesRegex(ValueError,
'Tried to create subvariable'):
for_loop.LoopArgumentVariable(
loop_argument=loop_argument,
subvar_name=subvar_name,
)
if __name__ == '__main__':
unittest.main()
| 31.509804
| 85
| 0.526602
|
d6f1d9496d00491686a91327e147ab0b39254720
| 1,582
|
py
|
Python
|
-MakeSenseofCensus-/code.py
|
hemangi44/greyatom-python-for-data-science
|
2598b537e8bd720de78c02bae51ed770c6483b9e
|
[
"MIT"
] | 1
|
2020-04-10T10:32:59.000Z
|
2020-04-10T10:32:59.000Z
|
-MakeSenseofCensus-/code.py
|
hemangi44/greyatom-python-for-data-science
|
2598b537e8bd720de78c02bae51ed770c6483b9e
|
[
"MIT"
] | null | null | null |
-MakeSenseofCensus-/code.py
|
hemangi44/greyatom-python-for-data-science
|
2598b537e8bd720de78c02bae51ed770c6483b9e
|
[
"MIT"
] | null | null | null |
# --------------
# Importing header files
import numpy as np
# Path of the file has been stored in variable called 'path'
#New record
new_record=[[50, 9, 4, 1, 0, 0, 40, 0]]
#Code starts here
data=np.genfromtxt(path,delimiter=",",skip_header=1)
census=np.concatenate((data,new_record))
print(census)
# --------------
#Code starts here
age=census[:,0]
max_age=age.max()
min_age=age.min()
age_mean=age.mean()
age_std=np.std(age)
print(age_std)
# --------------
#Code starts here
race_0 = census[census[:,2]==0]
race_1 = census[census[:,2]==1]
race_2 = census[census[:,2]==2]
race_3 = census[census[:,2]==3]
race_4 = census[census[:,2]==4]
len_0 = len(race_0)
len_1 = len(race_1)
len_2 = len(race_2)
len_3 = len(race_3)
len_4 = len(race_4)
lenn = np.array([len_0 , len_1 , len_2 , len_3 , len_4])
for i in range(0, len(lenn)):
if lenn[i] == lenn.min():
minority_race = i
print(minority_race)
# --------------
#Code starts here
census = np.genfromtxt(path, delimiter = "," , skip_header = 1)
senior_citizens=census[census[:,0] > 60]
seniors_hours = census[census[:,0] > 60] [:, np.array([False,False,False,False,False,False,True,False])]
working_hours_sum = seniors_hours.sum()
senior_citizens_len = len(senior_citizens)
avg_working_hours = working_hours_sum / senior_citizens_len
print(avg_working_hours)
# --------------
#Code starts here
high=census[census[:,1]>10]
low=census[census[:,1]<=10]
avg_pay_high=high[:,7].mean()
avg_pay_low=low[:,7].mean()
print(avg_pay_high>avg_pay_low)
| 19.530864
| 105
| 0.639064
|
4ddcecb56ffb2d4978ad4ae4a4c97873169af790
| 9,337
|
py
|
Python
|
numba/cuda/simulator/kernelapi.py
|
avdul-q101/numba
|
199798e2c849b5e63eeef36972566fda7b84625c
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 3
|
2017-08-11T13:05:44.000Z
|
2021-08-10T07:47:13.000Z
|
numba/cuda/simulator/kernelapi.py
|
avdul-q101/numba
|
199798e2c849b5e63eeef36972566fda7b84625c
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null |
numba/cuda/simulator/kernelapi.py
|
avdul-q101/numba
|
199798e2c849b5e63eeef36972566fda7b84625c
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 1
|
2017-05-09T04:22:43.000Z
|
2017-05-09T04:22:43.000Z
|
'''
Implements the cuda module as called from within an executing kernel
(@cuda.jit-decorated function).
'''
from contextlib import contextmanager
import sys
import threading
import traceback
import numpy as np
from numba.np import numpy_support
class Dim3(object):
'''
Used to implement thread/block indices/dimensions
'''
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
def __str__(self):
return '(%s, %s, %s)' % (self.x, self.y, self.z)
def __repr__(self):
return 'Dim3(%s, %s, %s)' % (self.x, self.y, self.z)
def __iter__(self):
yield self.x
yield self.y
yield self.z
class GridGroup:
'''
Used to implement the grid group.
'''
def sync(self):
# Synchronization of the grid group is equivalent to synchronization of
# the thread block, because we only support cooperative grids with one
# block.
threading.current_thread().syncthreads()
class FakeCUDACg:
'''
CUDA Cooperative Groups
'''
def this_grid(self):
return GridGroup()
class FakeCUDALocal(object):
'''
CUDA Local arrays
'''
def array(self, shape, dtype):
dtype = numpy_support.as_dtype(dtype)
return np.empty(shape, dtype)
class FakeCUDAConst(object):
'''
CUDA Const arrays
'''
def array_like(self, ary):
return ary
class FakeCUDAShared(object):
'''
CUDA Shared arrays.
Limitations: assumes that only one call to cuda.shared.array is on a line,
and that that line is only executed once per thread. i.e.::
a = cuda.shared.array(...); b = cuda.shared.array(...)
will erroneously alias a and b, and::
for i in range(10):
sharedarrs[i] = cuda.shared.array(...)
will alias all arrays created at that point (though it is not certain that
this would be supported by Numba anyway).
'''
def __init__(self, dynshared_size):
self._allocations = {}
self._dynshared_size = dynshared_size
self._dynshared = np.zeros(dynshared_size, dtype=np.byte)
def array(self, shape, dtype):
dtype = numpy_support.as_dtype(dtype)
# Dynamic shared memory is requested with size 0 - this all shares the
# same underlying memory
if shape == 0:
# Count must be the maximum number of whole elements that fit in the
# buffer (Numpy complains if the buffer is not a multiple of the
# element size)
count = self._dynshared_size // dtype.itemsize
return np.frombuffer(self._dynshared.data, dtype=dtype, count=count)
# Otherwise, identify allocations by source file and line number
# We pass the reference frame explicitly to work around
# http://bugs.python.org/issue25108
stack = traceback.extract_stack(sys._getframe())
caller = stack[-2][0:2]
res = self._allocations.get(caller)
if res is None:
res = np.empty(shape, dtype)
self._allocations[caller] = res
return res
addlock = threading.Lock()
sublock = threading.Lock()
andlock = threading.Lock()
orlock = threading.Lock()
xorlock = threading.Lock()
maxlock = threading.Lock()
minlock = threading.Lock()
caslock = threading.Lock()
inclock = threading.Lock()
declock = threading.Lock()
exchlock = threading.Lock()
class FakeCUDAAtomic(object):
def add(self, array, index, val):
with addlock:
old = array[index]
array[index] += val
return old
def sub(self, array, index, val):
with sublock:
old = array[index]
array[index] -= val
return old
def and_(self, array, index, val):
with andlock:
old = array[index]
array[index] &= val
return old
def or_(self, array, index, val):
with orlock:
old = array[index]
array[index] |= val
return old
def xor(self, array, index, val):
with xorlock:
old = array[index]
array[index] ^= val
return old
def inc(self, array, index, val):
with inclock:
old = array[index]
if old >= val:
array[index] = 0
else:
array[index] += 1
return old
def dec(self, array, index, val):
with declock:
old = array[index]
if (old == 0) or (old > val):
array[index] = val
else:
array[index] -= 1
return old
def exch(self, array, index, val):
with exchlock:
old = array[index]
array[index] = val
return old
def max(self, array, index, val):
with maxlock:
old = array[index]
array[index] = max(old, val)
return old
def min(self, array, index, val):
with minlock:
old = array[index]
array[index] = min(old, val)
return old
def nanmax(self, array, index, val):
with maxlock:
old = array[index]
array[index] = np.nanmax([array[index], val])
return old
def nanmin(self, array, index, val):
with minlock:
old = array[index]
array[index] = np.nanmin([array[index], val])
return old
def compare_and_swap(self, array, old, val):
with caslock:
index = (0,) * array.ndim
loaded = array[index]
if loaded == old:
array[index] = val
return loaded
class FakeCUDAModule(object):
'''
An instance of this class will be injected into the __globals__ for an
executing function in order to implement calls to cuda.*. This will fail to
work correctly if the user code does::
from numba import cuda as something_else
In other words, the CUDA module must be called cuda.
'''
def __init__(self, grid_dim, block_dim, dynshared_size):
self.gridDim = Dim3(*grid_dim)
self.blockDim = Dim3(*block_dim)
self._cg = FakeCUDACg()
self._local = FakeCUDALocal()
self._shared = FakeCUDAShared(dynshared_size)
self._const = FakeCUDAConst()
self._atomic = FakeCUDAAtomic()
@property
def cg(self):
return self._cg
@property
def local(self):
return self._local
@property
def shared(self):
return self._shared
@property
def const(self):
return self._const
@property
def atomic(self):
return self._atomic
@property
def threadIdx(self):
return threading.current_thread().threadIdx
@property
def blockIdx(self):
return threading.current_thread().blockIdx
@property
def warpsize(self):
return 32
@property
def laneid(self):
return threading.current_thread().thread_id % 32
def syncthreads(self):
threading.current_thread().syncthreads()
def threadfence(self):
# No-op
pass
def threadfence_block(self):
# No-op
pass
def threadfence_system(self):
# No-op
pass
def syncthreads_count(self, val):
return threading.current_thread().syncthreads_count(val)
def syncthreads_and(self, val):
return threading.current_thread().syncthreads_and(val)
def syncthreads_or(self, val):
return threading.current_thread().syncthreads_or(val)
def popc(self, val):
return bin(val).count("1")
def fma(self, a, b, c):
return a * b + c
def cbrt(self, a):
return a ** (1 / 3)
def brev(self, val):
return int('{:032b}'.format(val)[::-1], 2)
def clz(self, val):
s = '{:032b}'.format(val)
return len(s) - len(s.lstrip('0'))
def ffs(self, val):
s = '{:032b}'.format(val)
return len(s) - len(s.rstrip('0'))
def selp(self, a, b, c):
return b if a else c
def grid(self, n):
bdim = self.blockDim
bid = self.blockIdx
tid = self.threadIdx
x = bid.x * bdim.x + tid.x
if n == 1:
return x
y = bid.y * bdim.y + tid.y
if n == 2:
return (x, y)
z = bid.z * bdim.z + tid.z
if n == 3:
return (x, y, z)
raise RuntimeError("Global ID has 1-3 dimensions. %d requested" % n)
def gridsize(self, n):
bdim = self.blockDim
gdim = self.gridDim
x = bdim.x * gdim.x
if n == 1:
return x
y = bdim.y * gdim.y
if n == 2:
return (x, y)
z = bdim.z * gdim.z
if n == 3:
return (x, y, z)
raise RuntimeError("Global grid has 1-3 dimensions. %d requested" % n)
@contextmanager
def swapped_cuda_module(fn, fake_cuda_module):
from numba import cuda
fn_globs = fn.__globals__
# get all globals that is the "cuda" module
orig = dict((k, v) for k, v in fn_globs.items() if v is cuda)
# build replacement dict
repl = dict((k, fake_cuda_module) for k, v in orig.items())
# replace
fn_globs.update(repl)
try:
yield
finally:
# revert
fn_globs.update(orig)
| 25.099462
| 80
| 0.573203
|
7ec40d3105db2ba6ba1079af0a0261f33329d482
| 5,185
|
py
|
Python
|
entropytriangle/coordsentropic.py
|
Jaimedlrm/entropytriangle
|
46076aa6e9e06777df4dcf885cd951afdf1de168
|
[
"MIT"
] | 2
|
2019-08-07T07:13:38.000Z
|
2019-08-07T07:13:41.000Z
|
entropytriangle/coordsentropic.py
|
Jaimedlrm/entropytriangle
|
46076aa6e9e06777df4dcf885cd951afdf1de168
|
[
"MIT"
] | null | null | null |
entropytriangle/coordsentropic.py
|
Jaimedlrm/entropytriangle
|
46076aa6e9e06777df4dcf885cd951afdf1de168
|
[
"MIT"
] | 1
|
2021-03-13T18:24:14.000Z
|
2021-03-13T18:24:14.000Z
|
'''
Functions used for calculating variables used in the Entropy Triangle Plotting phase
'''
from numpy import nan_to_num as nan_to_num
from warnings import warn as warning
import pandas as pd # DataFrames manipulation
import matplotlib.pyplot as plt
#Definition of the variables
#SMET
derivedSplitSmetCoords = ["DeltaH_Pxi", "M_Pxi", "VI_Pxi"] # Multivariate entropic coordinates
aggregateSmetCoords = ["H_Ux", "DeltaH_Px", "M_Px", "VI_Px"] # Source multivariate aggregate coordinates (Sum of derivedSplitSmetCoords)
dualAggregateSmetCoords = ["H_Ux", "DeltaH_Px", "D_Px", "VI_Px"] # SMET coords without C_P_X
#CBET
cbetEntropicCoords = ["H_U2", "H_P2", "DeltaH_P2", "M_P2", "VI_P2"] # Caracterization of the variables in a DataFrame of Channel Multivariate Entropies
#CMET
cmetEntropicCoords = ["H_U", "H_P", "DeltaH_P", "M_P", "VI_P"] # Caracterization of the variables in a DataFrame of Channel Multivariate Entropies
#' Functions to detect SMET coordinates
def hasSplitSmetCoords(df):
"""
A function to detect if the source multivariate split entropies are present: this enables working out the multivariate split entropic coordinates (SMET)
derivedSplitSmetCoords = ["DeltaH_Pxi", "M_Pxi", "VI_Pxi", "Name"]
> comprobation = hasSplitSmetCoords(df)
Parameters
----------
df : DataFrame with entropic variables
Returns
----------
Boolean: (True or False) In order to check if all the variables at the input dataframe correspond to this type of coordinates
"""
return (df.columns.isin(derivedSplitSmetCoords).sum() == len(derivedSplitSmetCoords))
def hasAggregateSmetCoords(df):
"""
A function to detect if the source multivariate aggregate entropic coordinates are present (SMET)
aggregateSmetCoords = ["H_Ux", "DeltaH_Px", "M_Px", "VI_Px"]
> comprobation = hasAggregateSmetCoords(df)
Parameters
----------
df : DataFrame with entropic variables
Returns
----------
Boolean: (True or False) In order to check if all the variables at the input dataframe correspond to this type of coordinates
"""
return (df.columns.isin(aggregateSmetCoords).sum() == len(aggregateSmetCoords))
def hasDualAggregateSmetCoords(df):
"""
A function to detect if the source multivariate dual aggregate entropic coordinates are present (SMET)
dualAggregateSmetCoords = ["H_Ux", "DeltaH_Px", "D_Px", "VI_Px"]
> comprobation = hasDualAggregateSmetCoords(df)
Parameters
----------
df : DataFrame with entropic variables
Returns
----------
Boolean: (True or False) In order to check if all the variables at the input dataframe correspond to this type of coordinates
"""
return (df.columns.isin(dualAggregateSmetCoords).sum() == len(dualAggregateSmetCoords))
#' Functions to detect CMET coordinates
def hasCbetEntropicCoords(df):
"""
A function to detect if the the channel binary entropic coordinates are present (CBET)
cmetEntropicCoords = ["H_U2", "H_P2", "DeltaH_P2", "M_P2", "VI_P2"]
> comprobation = hasCbetEntropicCoords(df)
Parameters
----------
df : DataFrame with entropic variables
Returns
----------
Boolean: (True or False) In order to check if all the variables at the input dataframe correspond to this type of coordinates
"""
#return all(df.columns.isin(cmetEntropicCoords))
return (df.columns.isin(cbetEntropicCoords).sum() == len(cbetEntropicCoords))
#' Functions to detect CMET coordinates
def hasCmetEntropicCoords(df):
"""
A function to detect if the the channel multivariate entropic coordinates are present (CMET)
cmetEntropicCoords = ["H_U", "H_P", "DeltaH_P", "M_P", "VI_P"]
> comprobation = hasCmetEntropicCoords(df)
Parameters
----------
df : DataFrame with entropic variables
Returns
----------
Boolean: (True or False) In order to check if all the variables at the input dataframe correspond to this type of coordinates
"""
#return all(df.columns.isin(cmetEntropicCoords))
return (df.columns.isin(cmetEntropicCoords).sum() == len(cmetEntropicCoords))
def entcoords(df,scale=100):
"""
A function for calculating the normalized coordinates of the entropic measures for some DataFrame.
It can be used for SMET and CMET cases, returning a list of arrays with the normalized scaled measures
> entropies_coordinates = entcoords(df)
>
> Example:
>
> entropies_coordinates = [array([ 9.24959601, 142.19609928, 2.96110915]), array([ 15.4082864 , 119.15900389, 1.60549331])]
>
Parameters
----------
df : DataFrame with entropic variables
scale : The scale used for plotting the triangle
Returns
----------
coor : List with the entropic measures to plot in the De Finneti Diagram
"""
coor = list()
for i in range(df.shape[0]):
if(nan_to_num(df.iloc[i].values[2:6]).sum()>1):
coor.append((df.iloc[i].values[2:6])/df.iloc[i].values[0])
else:
coor.append(df.iloc[i].values[2:6])
coor = list(map(lambda x: x * scale, coor))
return coor
| 28.646409
| 156
| 0.687175
|
3f245742fc6060d3e8660627c1601b7ce5077dd4
| 9,414
|
py
|
Python
|
tests/test_OFTI.py
|
sefffal/orbitize
|
e10f57e2a2a181ae3642e66a6aaadd0016579d3d
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
tests/test_OFTI.py
|
sefffal/orbitize
|
e10f57e2a2a181ae3642e66a6aaadd0016579d3d
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
tests/test_OFTI.py
|
sefffal/orbitize
|
e10f57e2a2a181ae3642e66a6aaadd0016579d3d
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
"""
Test the orbitize.sampler OFTI class which performs OFTI on astrometric data
"""
import numpy as np
import os
import pytest
import matplotlib.pyplot as plt
import time
import orbitize
import orbitize.sampler as sampler
import orbitize.driver
import orbitize.priors as priors
import orbitize.system as system
from orbitize.lnlike import chi2_lnlike
from orbitize.kepler import calc_orbit
import orbitize.system
input_file = os.path.join(orbitize.DATADIR, 'GJ504.csv')
input_file_1epoch = os.path.join(orbitize.DATADIR, 'GJ504_1epoch.csv')
def test_scale_and_rotate():
# perform scale-and-rotate
myDriver = orbitize.driver.Driver(input_file, 'OFTI',
1, 1.22, 56.95, mass_err=0.08, plx_err=0.26)
s = myDriver.sampler
samples = s.prepare_samples(100)
sma, ecc, inc, argp, lan, tau, plx, mtot = [samp for samp in samples]
ra, dec, vc = orbitize.kepler.calc_orbit(s.epochs, sma, ecc, inc, argp, lan, tau, plx, mtot, tau_ref_epoch=0)
sep, pa = orbitize.system.radec2seppa(ra, dec)
sep_sar, pa_sar = np.median(sep[s.epoch_idx]), np.median(pa[s.epoch_idx])
# test to make sure sep and pa scaled to scale-and-rotate epoch
sar_epoch = s.system.data_table[s.epoch_idx]
assert sep_sar == pytest.approx(sar_epoch['quant1'], abs=sar_epoch['quant1_err'])
assert pa_sar == pytest.approx(sar_epoch['quant2'], abs=sar_epoch['quant2_err'])
# test scale-and-rotate for orbits run all the way through OFTI
s.run_sampler(100)
# test orbit plot generation
s.results.plot_orbits(start_mjd=s.epochs[0])
samples = s.results.post
sma = samples[:, 0]
ecc = samples[:, 1]
inc = samples[:, 2]
argp = samples[:, 3]
lan = samples[:, 4]
tau = samples[:, 5]
plx = samples[:, 6]
mtot = samples[:, 7]
ra, dec, vc = orbitize.kepler.calc_orbit(s.epochs, sma, ecc, inc, argp, lan, tau, plx, mtot, tau_ref_epoch=0)
assert np.max(lan) > np.pi
sep, pa = orbitize.system.radec2seppa(ra, dec)
sep_sar, pa_sar = np.median(sep[s.epoch_idx]), np.median(pa[s.epoch_idx])
# test to make sure sep and pa scaled to scale-and-rotate epoch
assert sep_sar == pytest.approx(sar_epoch['quant1'], abs=sar_epoch['quant1_err'])
assert pa_sar == pytest.approx(sar_epoch['quant2'], abs=sar_epoch['quant2_err'])
# test scale-and-rotate with restricted upper limits on PAN
myDriver = orbitize.driver.Driver(input_file, 'OFTI',
1, 1.22, 56.95, mass_err=0.08, plx_err=0.26, system_kwargs={'restrict_angle_ranges':True})
s = myDriver.sampler
samples = s.prepare_samples(100)
sma, ecc, inc, argp, lan, tau, plx, mtot = [samp for samp in samples]
assert np.max(lan) < np.pi
assert np.max(argp) > np.pi and np.max(argp) < 2 * np.pi
ra, dec, vc = orbitize.kepler.calc_orbit(s.epochs, sma, ecc, inc, argp, lan, tau, plx, mtot, tau_ref_epoch=0)
sep, pa = orbitize.system.radec2seppa(ra, dec)
sep_sar, pa_sar = np.median(sep[s.epoch_idx]), np.median(pa[s.epoch_idx])
sar_epoch = s.system.data_table[s.epoch_idx]
assert sep_sar == pytest.approx(sar_epoch['quant1'], abs=sar_epoch['quant1_err'])
assert pa_sar == pytest.approx(sar_epoch['quant2'], abs=sar_epoch['quant2_err'])
sma_seppa = 0
seppa_lnprob_compare = None
def test_run_sampler():
global sma_seppa, seppa_lnprob_compare # use for covariances test
# initialize sampler
myDriver = orbitize.driver.Driver(input_file, 'OFTI',
1, 1.22, 56.95, mass_err=0.08, plx_err=0.26)
s = myDriver.sampler
# change eccentricity prior
myDriver.system.sys_priors[1] = priors.LinearPrior(-2.18, 2.01)
# test num_samples=1
s.run_sampler(0, num_samples=1)
# test to make sure outputs are reasonable
start = time.time()
orbits = s.run_sampler(1000, num_cores=4)
end = time.time()
print()
print("Runtime: "+str(end-start) + " s")
print()
print(orbits[0])
# test that lnlikes being saved are correct
returned_lnlike_test = s.results.lnlike[0]
computed_lnlike_test = s._logl(orbits[0])
assert returned_lnlike_test == pytest.approx(computed_lnlike_test, abs=0.01)
seppa_lnprob_compare = (orbits[0], computed_lnlike_test) # one set of params and associated lnlike saved.
print()
idx = s.system.param_idx
sma = np.median([x[idx['sma1']] for x in orbits])
ecc = np.median([x[idx['ecc1']] for x in orbits])
inc = np.median([x[idx['inc1']] for x in orbits])
# expected values from Blunt et al. (2017)
sma_exp = 48.
ecc_exp = 0.19
inc_exp = np.radians(140)
# test to make sure OFTI values are within 20% of expectations
assert sma == pytest.approx(sma_exp, abs=0.2*sma_exp)
assert ecc == pytest.approx(ecc_exp, abs=0.2*ecc_exp)
assert inc == pytest.approx(inc_exp, abs=0.2*inc_exp)
sma_seppa = sma # use for covarinaces test
# test with only one core
orbits = s.run_sampler(100, num_cores=1)
# test with only one epoch
myDriver = orbitize.driver.Driver(input_file_1epoch, 'OFTI',
1, 1.22, 56.95, mass_err=0.08, plx_err=0.26)
s = myDriver.sampler
s.run_sampler(1)
print()
def test_fixed_sys_params_sampling():
# test in case of fixed mass and parallax
myDriver = orbitize.driver.Driver(input_file, 'OFTI',
1, 1.22, 56.95)
s = myDriver.sampler
samples = s.prepare_samples(100)
assert np.all(samples[-1] == s.priors[-1])
assert isinstance(samples[-3], np.ndarray)
def test_OFTI_multiplanet():
# initialize sampler
input_file = os.path.join(orbitize.DATADIR, "test_val_multi.csv")
myDriver = orbitize.driver.Driver(input_file, 'OFTI',
2, 1.52, 24.76, mass_err=0.15, plx_err=0.64)
s = myDriver.sampler
# change eccentricity prior for b
myDriver.system.sys_priors[1] = priors.UniformPrior(0.0, 0.1)
# change eccentricity prior for c
myDriver.system.sys_priors[7] = priors.UniformPrior(0.0, 0.1)
orbits = s.run_sampler(500)
idx = s.system.param_idx
sma1 = np.median(orbits[:,idx['sma1']])
sma2 = np.median(orbits[:,idx['sma2']])
sma1_exp = 66
sma2_exp = 40
print(sma1, sma2)
assert sma1 == pytest.approx(sma1_exp, abs=0.3*sma1_exp)
assert sma2 == pytest.approx(sma2_exp, abs=0.3*sma2_exp)
assert np.all(orbits[:, idx['ecc1']] < 0.1)
assert np.all(orbits[:, idx['ecc2']] < 0.1)
@pytest.hookimpl(trylast=True)
def test_OFTI_covariances():
"""
Test OFTI fits by turning sep/pa measurements to RA/Dec measurements with covariances
Needs to be run after test_run_sampler()!!
"""
# only run if these variables are set.
if sma_seppa == 0 or seppa_lnprob_compare is None:
print("Skipping OFTI covariances test because reference data not initalized. Please make sure test_run_sampler is run first.")
return
# read in seppa data table and turn into raddec data table
data_table = orbitize.read_input.read_file(input_file)
data_ra, data_dec = system.seppa2radec(data_table['quant1'], data_table['quant2'])
data_raerr, data_decerr, data_radeccorr = [], [], []
for row in data_table:
raerr, decerr, radec_corr = system.transform_errors(row['quant1'], row['quant2'],
row['quant1_err'], row['quant2_err'],
0, system.seppa2radec, nsamps=10000000)
data_raerr.append(raerr)
data_decerr.append(decerr)
data_radeccorr.append(radec_corr)
data_table['quant1'] = data_ra
data_table['quant2'] = data_dec
data_table['quant1_err'] = np.array(data_raerr)
data_table['quant2_err'] = np.array(data_decerr)
data_table['quant12_corr'] = np.array(data_radeccorr)
data_table['quant_type'] = np.array(['radec' for _ in data_table])
# initialize system
my_sys = system.System(1, data_table, 1.22, 56.95, mass_err=0.08, plx_err=0.26)
# initialize sampler
s = sampler.OFTI(my_sys)
# change eccentricity prior
my_sys.sys_priors[1] = priors.LinearPrior(-2.18, 2.01)
# test num_samples=1
s.run_sampler(0, num_samples=1)
# test to make sure outputs are reasonable
orbits = s.run_sampler(1000, num_cores=4)
# test that lnlikes being saved are correct
returned_lnlike_test = s.results.lnlike[0]
computed_lnlike_test = s._logl(orbits[0])
assert returned_lnlike_test == pytest.approx(computed_lnlike_test, abs=0.01)
# test that the lnlike is very similar to the values computed in seppa space
ref_params, ref_lnlike = seppa_lnprob_compare
computed_lnlike_ref = s._logl(ref_params)
assert ref_lnlike == pytest.approx(computed_lnlike_ref, abs=0.05) # 5% differencesin lnprob is allowable.
idx = s.system.param_idx
sma = np.median([x[idx['sma1']] for x in orbits])
ecc = np.median([x[idx['ecc1']] for x in orbits])
inc = np.median([x[idx['inc1']] for x in orbits])
# test against seppa fits to see they are similar
assert sma_seppa == pytest.approx(sma, abs=0.2 * sma_seppa)
if __name__ == "__main__":
#test_scale_and_rotate()
test_run_sampler()
test_OFTI_covariances()
# test_OFTI_multiplanet()
# print("Done!")
| 36.207692
| 134
| 0.663374
|
c6ac1f319d703198880f7390f516dc1564ae4f52
| 2,648
|
py
|
Python
|
citrix_hypervisor/tests/test_lab.py
|
tdimnet/integrations-core
|
a78133a3b71a1b8377fa214d121a98647031ab06
|
[
"BSD-3-Clause"
] | 663
|
2016-08-23T05:23:45.000Z
|
2022-03-29T00:37:23.000Z
|
citrix_hypervisor/tests/test_lab.py
|
tdimnet/integrations-core
|
a78133a3b71a1b8377fa214d121a98647031ab06
|
[
"BSD-3-Clause"
] | 6,642
|
2016-06-09T16:29:20.000Z
|
2022-03-31T22:24:09.000Z
|
citrix_hypervisor/tests/test_lab.py
|
tdimnet/integrations-core
|
a78133a3b71a1b8377fa214d121a98647031ab06
|
[
"BSD-3-Clause"
] | 1,222
|
2017-01-27T15:51:38.000Z
|
2022-03-31T18:17:51.000Z
|
# (C) Datadog, Inc. 2021-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import os
import pytest
from datadog_checks.base import is_affirmative
from datadog_checks.citrix_hypervisor import CitrixHypervisorCheck
from datadog_checks.dev.utils import get_metadata_metrics
METRICS = [
'host.cache_hits',
'host.cache_misses',
'host.cache_size',
'host.cache_hits',
'host.cache_misses',
'host.cache_size',
'host.cpu',
'host.memory.free_kib',
'host.memory.reclaimed',
'host.memory.reclaimed_max',
'host.memory.total_kib',
'host.pif.rx',
'host.pif.tx',
'host.pool.session_count',
'host.pool.task_count',
'host.xapi.allocation_kib',
'host.xapi.free_memory_kib',
'host.xapi.live_memory_kib',
'host.xapi.memory_usage_kib',
'host.xapi.open_fds',
'vm.cpu',
'vm.memory',
]
def test_lab(aggregator, dd_run_check):
"""
This test is intended to be run manually to connect to a real vSphere Instance
It's useful for:
- QA/testing the integration with real Citrix Hypervisor instances
- using a debugger to inspect values from a real Citrix Hypervisor instance
Example usage:
$ export TEST_CITRIX_USER='XXXXX' TEST_CITRIX_PASS='XXXXX'
$ TEST_CITRIX_RUN_LAB=true ddev test citrix_hypervisor:py38 -k test_lab
"""
if not is_affirmative(os.environ.get('TEST_CITRIX_RUN_LAB')):
pytest.skip(
"Skipped! Set TEST_CITRIX_RUN_LAB to run this test. "
"TEST_CITRIX_USER and TEST_CITRIX_PASS must also be set."
)
username = os.environ['TEST_CITRIX_USER']
password = os.environ['TEST_CITRIX_PASS']
instances = [
{
'url': 'http://aws.citrixhost/b',
'username': username,
'password': password,
},
{
'url': 'http://aws.citrixhost/c',
'username': username,
'password': password,
},
{
'url': 'http://aws.citrixhost/d',
'username': username,
'password': password,
},
]
for instance in instances:
check = CitrixHypervisorCheck('citrix_hypervisor', {}, [instance])
check._check_connection()
dd_run_check(check)
aggregator.assert_service_check('citrix_hypervisor.can_connect', CitrixHypervisorCheck.OK)
for m in METRICS:
aggregator.assert_metric('citrix_hypervisor.{}'.format(m), at_least=0)
aggregator.assert_all_metrics_covered()
aggregator.assert_metrics_using_metadata(get_metadata_metrics())
aggregator.reset()
| 29.422222
| 98
| 0.651057
|
f76bdcf0b7dd18d292bf8beee536c595663042ef
| 23,611
|
py
|
Python
|
Lib/traceback.py
|
cseci/python
|
82d847087557bcca866dbf6398f1c156e9ff5d3a
|
[
"0BSD"
] | 2,441
|
2020-07-31T06:45:53.000Z
|
2022-03-30T15:56:49.000Z
|
Lib/traceback.py
|
cseci/python
|
82d847087557bcca866dbf6398f1c156e9ff5d3a
|
[
"0BSD"
] | 238
|
2020-10-21T04:54:00.000Z
|
2022-03-31T21:49:03.000Z
|
Lib/traceback.py
|
cseci/python
|
82d847087557bcca866dbf6398f1c156e9ff5d3a
|
[
"0BSD"
] | 93
|
2020-08-09T12:00:17.000Z
|
2022-03-25T07:57:24.000Z
|
"""Extract, format and print information about Python stack traces."""
import collections
import itertools
import linecache
import sys
__all__ = ['extract_stack', 'extract_tb', 'format_exception',
'format_exception_only', 'format_list', 'format_stack',
'format_tb', 'print_exc', 'format_exc', 'print_exception',
'print_last', 'print_stack', 'print_tb', 'clear_frames',
'FrameSummary', 'StackSummary', 'TracebackException',
'walk_stack', 'walk_tb']
#
# Formatting and printing lists of traceback lines.
#
def print_list(extracted_list, file=None):
"""Print the list of tuples as returned by extract_tb() or
extract_stack() as a formatted stack trace to the given file."""
if file is None:
file = sys.stderr
for item in StackSummary.from_list(extracted_list).format():
print(item, file=file, end="")
def format_list(extracted_list):
"""Format a list of tuples or FrameSummary objects for printing.
Given a list of tuples or FrameSummary objects as returned by
extract_tb() or extract_stack(), return a list of strings ready
for printing.
Each string in the resulting list corresponds to the item with the
same index in the argument list. Each string ends in a newline;
the strings may contain internal newlines as well, for those items
whose source text line is not None.
"""
return StackSummary.from_list(extracted_list).format()
#
# Printing and Extracting Tracebacks.
#
def print_tb(tb, limit=None, file=None):
"""Print up to 'limit' stack trace entries from the traceback 'tb'.
If 'limit' is omitted or None, all entries are printed. If 'file'
is omitted or None, the output goes to sys.stderr; otherwise
'file' should be an open file or file-like object with a write()
method.
"""
print_list(extract_tb(tb, limit=limit), file=file)
def format_tb(tb, limit=None):
"""A shorthand for 'format_list(extract_tb(tb, limit))'."""
return extract_tb(tb, limit=limit).format()
def extract_tb(tb, limit=None):
"""
Return a StackSummary object representing a list of
pre-processed entries from traceback.
This is useful for alternate formatting of stack traces. If
'limit' is omitted or None, all entries are extracted. A
pre-processed stack trace entry is a FrameSummary object
containing attributes filename, lineno, name, and line
representing the information that is usually printed for a stack
trace. The line is a string with leading and trailing
whitespace stripped; if the source is not available it is None.
"""
return StackSummary.extract(walk_tb(tb), limit=limit)
#
# Exception formatting and output.
#
_cause_message = (
"\nThe above exception was the direct cause "
"of the following exception:\n\n")
_context_message = (
"\nDuring handling of the above exception, "
"another exception occurred:\n\n")
def print_exception(etype, value, tb, limit=None, file=None, chain=True):
"""Print exception up to 'limit' stack trace entries from 'tb' to 'file'.
This differs from print_tb() in the following ways: (1) if
traceback is not None, it prints a header "Traceback (most recent
call last):"; (2) it prints the exception type and value after the
stack trace; (3) if type is SyntaxError and value has the
appropriate format, it prints the line where the syntax error
occurred with a caret on the next line indicating the approximate
position of the error.
"""
# format_exception has ignored etype for some time, and code such as cgitb
# passes in bogus values as a result. For compatibility with such code we
# ignore it here (rather than in the new TracebackException API).
if file is None:
file = sys.stderr
for line in TracebackException(
type(value), value, tb, limit=limit).format(chain=chain):
print(line, file=file, end="")
def format_exception(etype, value, tb, limit=None, chain=True):
"""Format a stack trace and the exception information.
The arguments have the same meaning as the corresponding arguments
to print_exception(). The return value is a list of strings, each
ending in a newline and some containing internal newlines. When
these lines are concatenated and printed, exactly the same text is
printed as does print_exception().
"""
# format_exception has ignored etype for some time, and code such as cgitb
# passes in bogus values as a result. For compatibility with such code we
# ignore it here (rather than in the new TracebackException API).
return list(TracebackException(
type(value), value, tb, limit=limit).format(chain=chain))
def format_exception_only(etype, value):
"""Format the exception part of a traceback.
The arguments are the exception type and value such as given by
sys.last_type and sys.last_value. The return value is a list of
strings, each ending in a newline.
Normally, the list contains a single string; however, for
SyntaxError exceptions, it contains several lines that (when
printed) display detailed information about where the syntax
error occurred.
The message indicating which exception occurred is always the last
string in the list.
"""
return list(TracebackException(etype, value, None).format_exception_only())
# -- not official API but folk probably use these two functions.
def _format_final_exc_line(etype, value):
valuestr = _some_str(value)
if value is None or not valuestr:
line = "%s\n" % etype
else:
line = "%s: %s\n" % (etype, valuestr)
return line
def _some_str(value):
try:
return str(value)
except:
return '<unprintable %s object>' % type(value).__name__
# --
def print_exc(limit=None, file=None, chain=True):
"""Shorthand for 'print_exception(*sys.exc_info(), limit, file)'."""
print_exception(*sys.exc_info(), limit=limit, file=file, chain=chain)
def format_exc(limit=None, chain=True):
"""Like print_exc() but return a string."""
return "".join(format_exception(*sys.exc_info(), limit=limit, chain=chain))
def print_last(limit=None, file=None, chain=True):
"""This is a shorthand for 'print_exception(sys.last_type,
sys.last_value, sys.last_traceback, limit, file)'."""
if not hasattr(sys, "last_type"):
raise ValueError("no last exception")
print_exception(sys.last_type, sys.last_value, sys.last_traceback,
limit, file, chain)
#
# Printing and Extracting Stacks.
#
def print_stack(f=None, limit=None, file=None):
"""Print a stack trace from its invocation point.
The optional 'f' argument can be used to specify an alternate
stack frame at which to start. The optional 'limit' and 'file'
arguments have the same meaning as for print_exception().
"""
if f is None:
f = sys._getframe().f_back
print_list(extract_stack(f, limit=limit), file=file)
def format_stack(f=None, limit=None):
"""Shorthand for 'format_list(extract_stack(f, limit))'."""
if f is None:
f = sys._getframe().f_back
return format_list(extract_stack(f, limit=limit))
def extract_stack(f=None, limit=None):
"""Extract the raw traceback from the current stack frame.
The return value has the same format as for extract_tb(). The
optional 'f' and 'limit' arguments have the same meaning as for
print_stack(). Each item in the list is a quadruple (filename,
line number, function name, text), and the entries are in order
from oldest to newest stack frame.
"""
if f is None:
f = sys._getframe().f_back
stack = StackSummary.extract(walk_stack(f), limit=limit)
stack.reverse()
return stack
def clear_frames(tb):
"Clear all references to local variables in the frames of a traceback."
while tb is not None:
try:
tb.tb_frame.clear()
except RuntimeError:
# Ignore the exception raised if the frame is still executing.
pass
tb = tb.tb_next
class FrameSummary:
"""A single frame from a traceback.
- :attr:`filename` The filename for the frame.
- :attr:`lineno` The line within filename for the frame that was
active when the frame was captured.
- :attr:`name` The name of the function or method that was executing
when the frame was captured.
- :attr:`line` The text from the linecache module for the
of code that was running when the frame was captured.
- :attr:`locals` Either None if locals were not supplied, or a dict
mapping the name to the repr() of the variable.
"""
__slots__ = ('filename', 'lineno', 'name', '_line', 'locals')
def __init__(self, filename, lineno, name, *, lookup_line=True,
locals=None, line=None):
"""Construct a FrameSummary.
:param lookup_line: If True, `linecache` is consulted for the source
code line. Otherwise, the line will be looked up when first needed.
:param locals: If supplied the frame locals, which will be captured as
object representations.
:param line: If provided, use this instead of looking up the line in
the linecache.
"""
self.filename = filename
self.lineno = lineno
self.name = name
self._line = line
if lookup_line:
self.line
self.locals = {k: repr(v) for k, v in locals.items()} if locals else None
def __eq__(self, other):
if isinstance(other, FrameSummary):
return (self.filename == other.filename and
self.lineno == other.lineno and
self.name == other.name and
self.locals == other.locals)
if isinstance(other, tuple):
return (self.filename, self.lineno, self.name, self.line) == other
return NotImplemented
def __getitem__(self, pos):
return (self.filename, self.lineno, self.name, self.line)[pos]
def __iter__(self):
return iter([self.filename, self.lineno, self.name, self.line])
def __repr__(self):
return "<FrameSummary file {filename}, line {lineno} in {name}>".format(
filename=self.filename, lineno=self.lineno, name=self.name)
def __len__(self):
return 4
@property
def line(self):
if self._line is None:
self._line = linecache.getline(self.filename, self.lineno).strip()
return self._line
def walk_stack(f):
"""Walk a stack yielding the frame and line number for each frame.
This will follow f.f_back from the given frame. If no frame is given, the
current stack is used. Usually used with StackSummary.extract.
"""
if f is None:
f = sys._getframe().f_back.f_back
while f is not None:
yield f, f.f_lineno
f = f.f_back
def walk_tb(tb):
"""Walk a traceback yielding the frame and line number for each frame.
This will follow tb.tb_next (and thus is in the opposite order to
walk_stack). Usually used with StackSummary.extract.
"""
while tb is not None:
yield tb.tb_frame, tb.tb_lineno
tb = tb.tb_next
_RECURSIVE_CUTOFF = 3 # Also hardcoded in traceback.c.
class StackSummary(list):
"""A stack of frames."""
@classmethod
def extract(klass, frame_gen, *, limit=None, lookup_lines=True,
capture_locals=False):
"""Create a StackSummary from a traceback or stack object.
:param frame_gen: A generator that yields (frame, lineno) tuples to
include in the stack.
:param limit: None to include all frames or the number of frames to
include.
:param lookup_lines: If True, lookup lines for each frame immediately,
otherwise lookup is deferred until the frame is rendered.
:param capture_locals: If True, the local variables from each frame will
be captured as object representations into the FrameSummary.
"""
if limit is None:
limit = getattr(sys, 'tracebacklimit', None)
if limit is not None and limit < 0:
limit = 0
if limit is not None:
if limit >= 0:
frame_gen = itertools.islice(frame_gen, limit)
else:
frame_gen = collections.deque(frame_gen, maxlen=-limit)
result = klass()
fnames = set()
for f, lineno in frame_gen:
co = f.f_code
filename = co.co_filename
name = co.co_name
fnames.add(filename)
linecache.lazycache(filename, f.f_globals)
# Must defer line lookups until we have called checkcache.
if capture_locals:
f_locals = f.f_locals
else:
f_locals = None
result.append(FrameSummary(
filename, lineno, name, lookup_line=False, locals=f_locals))
for filename in fnames:
linecache.checkcache(filename)
# If immediate lookup was desired, trigger lookups now.
if lookup_lines:
for f in result:
f.line
return result
@classmethod
def from_list(klass, a_list):
"""
Create a StackSummary object from a supplied list of
FrameSummary objects or old-style list of tuples.
"""
# While doing a fast-path check for isinstance(a_list, StackSummary) is
# appealing, idlelib.run.cleanup_traceback and other similar code may
# break this by making arbitrary frames plain tuples, so we need to
# check on a frame by frame basis.
result = StackSummary()
for frame in a_list:
if isinstance(frame, FrameSummary):
result.append(frame)
else:
filename, lineno, name, line = frame
result.append(FrameSummary(filename, lineno, name, line=line))
return result
def format(self):
"""Format the stack ready for printing.
Returns a list of strings ready for printing. Each string in the
resulting list corresponds to a single frame from the stack.
Each string ends in a newline; the strings may contain internal
newlines as well, for those items with source text lines.
For long sequences of the same frame and line, the first few
repetitions are shown, followed by a summary line stating the exact
number of further repetitions.
"""
result = []
last_file = None
last_line = None
last_name = None
count = 0
for frame in self:
if (last_file is None or last_file != frame.filename or
last_line is None or last_line != frame.lineno or
last_name is None or last_name != frame.name):
if count > _RECURSIVE_CUTOFF:
count -= _RECURSIVE_CUTOFF
result.append(
f' [Previous line repeated {count} more '
f'time{"s" if count > 1 else ""}]\n'
)
last_file = frame.filename
last_line = frame.lineno
last_name = frame.name
count = 0
count += 1
if count > _RECURSIVE_CUTOFF:
continue
row = []
row.append(' File "{}", line {}, in {}\n'.format(
frame.filename, frame.lineno, frame.name))
if frame.line:
row.append(' {}\n'.format(frame.line.strip()))
if frame.locals:
for name, value in sorted(frame.locals.items()):
row.append(' {name} = {value}\n'.format(name=name, value=value))
result.append(''.join(row))
if count > _RECURSIVE_CUTOFF:
count -= _RECURSIVE_CUTOFF
result.append(
f' [Previous line repeated {count} more '
f'time{"s" if count > 1 else ""}]\n'
)
return result
class TracebackException:
"""An exception ready for rendering.
The traceback module captures enough attributes from the original exception
to this intermediary form to ensure that no references are held, while
still being able to fully print or format it.
Use `from_exception` to create TracebackException instances from exception
objects, or the constructor to create TracebackException instances from
individual components.
- :attr:`__cause__` A TracebackException of the original *__cause__*.
- :attr:`__context__` A TracebackException of the original *__context__*.
- :attr:`__suppress_context__` The *__suppress_context__* value from the
original exception.
- :attr:`stack` A `StackSummary` representing the traceback.
- :attr:`exc_type` The class of the original traceback.
- :attr:`filename` For syntax errors - the filename where the error
occurred.
- :attr:`lineno` For syntax errors - the linenumber where the error
occurred.
- :attr:`text` For syntax errors - the text where the error
occurred.
- :attr:`offset` For syntax errors - the offset into the text where the
error occurred.
- :attr:`msg` For syntax errors - the compiler error message.
"""
def __init__(self, exc_type, exc_value, exc_traceback, *, limit=None,
lookup_lines=True, capture_locals=False, _seen=None):
# NB: we need to accept exc_traceback, exc_value, exc_traceback to
# permit backwards compat with the existing API, otherwise we
# need stub thunk objects just to glue it together.
# Handle loops in __cause__ or __context__.
if _seen is None:
_seen = set()
_seen.add(id(exc_value))
# Gracefully handle (the way Python 2.4 and earlier did) the case of
# being called with no type or value (None, None, None).
if (exc_value and exc_value.__cause__ is not None
and id(exc_value.__cause__) not in _seen):
cause = TracebackException(
type(exc_value.__cause__),
exc_value.__cause__,
exc_value.__cause__.__traceback__,
limit=limit,
lookup_lines=False,
capture_locals=capture_locals,
_seen=_seen)
else:
cause = None
if (exc_value and exc_value.__context__ is not None
and id(exc_value.__context__) not in _seen):
context = TracebackException(
type(exc_value.__context__),
exc_value.__context__,
exc_value.__context__.__traceback__,
limit=limit,
lookup_lines=False,
capture_locals=capture_locals,
_seen=_seen)
else:
context = None
self.__cause__ = cause
self.__context__ = context
self.__suppress_context__ = \
exc_value.__suppress_context__ if exc_value else False
# TODO: locals.
self.stack = StackSummary.extract(
walk_tb(exc_traceback), limit=limit, lookup_lines=lookup_lines,
capture_locals=capture_locals)
self.exc_type = exc_type
# Capture now to permit freeing resources: only complication is in the
# unofficial API _format_final_exc_line
self._str = _some_str(exc_value)
if exc_type and issubclass(exc_type, SyntaxError):
# Handle SyntaxError's specially
self.filename = exc_value.filename
lno = exc_value.lineno
self.lineno = str(lno) if lno is not None else None
self.text = exc_value.text
self.offset = exc_value.offset
self.msg = exc_value.msg
if lookup_lines:
self._load_lines()
@classmethod
def from_exception(cls, exc, *args, **kwargs):
"""Create a TracebackException from an exception."""
return cls(type(exc), exc, exc.__traceback__, *args, **kwargs)
def _load_lines(self):
"""Private API. force all lines in the stack to be loaded."""
for frame in self.stack:
frame.line
if self.__context__:
self.__context__._load_lines()
if self.__cause__:
self.__cause__._load_lines()
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __str__(self):
return self._str
def format_exception_only(self):
"""Format the exception part of the traceback.
The return value is a generator of strings, each ending in a newline.
Normally, the generator emits a single string; however, for
SyntaxError exceptions, it emits several lines that (when
printed) display detailed information about where the syntax
error occurred.
The message indicating which exception occurred is always the last
string in the output.
"""
if self.exc_type is None:
yield _format_final_exc_line(None, self._str)
return
stype = self.exc_type.__qualname__
smod = self.exc_type.__module__
if smod not in ("__main__", "builtins"):
stype = smod + '.' + stype
if not issubclass(self.exc_type, SyntaxError):
yield _format_final_exc_line(stype, self._str)
return
# It was a syntax error; show exactly where the problem was found.
filename_suffix = ''
if self.lineno is not None:
yield ' File "{}", line {}\n'.format(
self.filename or "<string>", self.lineno)
elif self.filename is not None:
filename_suffix = ' ({})'.format(self.filename)
badline = self.text
offset = self.offset
if badline is not None:
yield ' {}\n'.format(badline.strip())
if offset is not None:
caretspace = badline.rstrip('\n')
offset = min(len(caretspace), offset) - 1
caretspace = caretspace[:offset].lstrip()
# non-space whitespace (likes tabs) must be kept for alignment
caretspace = ((c.isspace() and c or ' ') for c in caretspace)
yield ' {}^\n'.format(''.join(caretspace))
msg = self.msg or "<no detail available>"
yield "{}: {}{}\n".format(stype, msg, filename_suffix)
def format(self, *, chain=True):
"""Format the exception.
If chain is not *True*, *__cause__* and *__context__* will not be formatted.
The return value is a generator of strings, each ending in a newline and
some containing internal newlines. `print_exception` is a wrapper around
this method which just prints the lines to a file.
The message indicating which exception occurred is always the last
string in the output.
"""
if chain:
if self.__cause__ is not None:
yield from self.__cause__.format(chain=chain)
yield _cause_message
elif (self.__context__ is not None and
not self.__suppress_context__):
yield from self.__context__.format(chain=chain)
yield _context_message
if self.stack:
yield 'Traceback (most recent call last):\n'
yield from self.stack.format()
yield from self.format_exception_only()
| 38.205502
| 87
| 0.635805
|
c21126a1c5fa31fa64282d6973ddfcb26e5b7a91
| 755
|
py
|
Python
|
manage.py
|
SophieO1970/Pitch
|
9aaf52d2b3e8bcbcf96d332227b04ec0e5f37ac1
|
[
"MIT"
] | null | null | null |
manage.py
|
SophieO1970/Pitch
|
9aaf52d2b3e8bcbcf96d332227b04ec0e5f37ac1
|
[
"MIT"
] | null | null | null |
manage.py
|
SophieO1970/Pitch
|
9aaf52d2b3e8bcbcf96d332227b04ec0e5f37ac1
|
[
"MIT"
] | null | null | null |
from app import create_app, db
from flask_script import Manager, Server
from app.models import User
from flask_migrate import Migrate, MigrateCommand
# Creating app instance
app = create_app('development')
manager = Manager(app)
migrate = Migrate(app, db)
manager.add_command('db', MigrateCommand)
manager.add_command('server', Server)
@manager.command
def test():
'''
Run the unit test
'''
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
@manager.shell
def make_shell_context():
return dict(app=app, db=db, User=User, Pitch=Pitch, Comment=Comment, UpVote=UpVote, DownVote=DownVote, PhotoProfile=PhotoProfile)
if __name__ == '__main__':
manager.run()
| 23.59375
| 133
| 0.741722
|
5d67ba40a1587647d63139eb8b0c789758676cb7
| 467
|
py
|
Python
|
pmcts/utilities/indexing.py
|
Weizhe-Chen/pareto-mcts
|
9154029a5668438762807743e452a9ea05cf00bd
|
[
"MIT"
] | 12
|
2021-02-12T21:26:20.000Z
|
2022-02-01T06:54:04.000Z
|
pmcts/utilities/indexing.py
|
Weizhe-Chen/pareto-mcts
|
9154029a5668438762807743e452a9ea05cf00bd
|
[
"MIT"
] | null | null | null |
pmcts/utilities/indexing.py
|
Weizhe-Chen/pareto-mcts
|
9154029a5668438762807743e452a9ea05cf00bd
|
[
"MIT"
] | 3
|
2020-11-11T01:13:24.000Z
|
2021-07-03T19:34:15.000Z
|
import numpy as np
def xy_to_ij(xy, extent, max_row, max_col):
# x -> j
j = (xy[:, 0] - extent[0]) / (extent[1] - extent[0]) * max_col
j = np.round(j, decimals=6)
j[j < 0] = 0
j[j > max_col] = max_col
# y -> i
i = (xy[:, 1] - extent[2]) / (extent[3] - extent[2]) * max_row
i = np.round(i, decimals=6)
i[i < 0] = 0
i[i > max_row] = max_row
# stack
ij = np.vstack([i.ravel(), j.ravel()]).T.astype(np.int)
return ij
| 25.944444
| 66
| 0.511777
|
2957c75a638ff864ee06c885cde437a6e45b0675
| 1,185
|
py
|
Python
|
tests/unit/flow/test_flow_change_gateway.py
|
Karnak123/jina
|
9eba4feb3afa2e49c779b46f77c5022bdcb944aa
|
[
"Apache-2.0"
] | 4
|
2021-06-29T04:14:31.000Z
|
2021-08-01T07:01:08.000Z
|
tests/unit/flow/test_flow_change_gateway.py
|
Karnak123/jina
|
9eba4feb3afa2e49c779b46f77c5022bdcb944aa
|
[
"Apache-2.0"
] | 1
|
2021-12-25T09:06:13.000Z
|
2021-12-25T09:06:13.000Z
|
tests/unit/flow/test_flow_change_gateway.py
|
Karnak123/jina
|
9eba4feb3afa2e49c779b46f77c5022bdcb944aa
|
[
"Apache-2.0"
] | 1
|
2021-07-05T12:42:40.000Z
|
2021-07-05T12:42:40.000Z
|
import pytest
from jina import Flow
from jina.enums import GatewayProtocolType
from tests import random_docs
@pytest.mark.parametrize('protocol', ['http', 'websocket', 'grpc'])
@pytest.mark.parametrize('changeto_protocol', ['grpc', 'http', 'websocket'])
def test_change_gateway(protocol, changeto_protocol, mocker):
f = Flow(protocol=protocol).add().add().add(needs='pod1').needs_all()
with f:
mock = mocker.Mock()
f.post('/', random_docs(10), on_done=mock)
mock.assert_called()
mock = mocker.Mock()
f.protocol = changeto_protocol
f.post('/', random_docs(10), on_done=mock)
mock.assert_called()
@pytest.mark.parametrize('protocol', ['http', 'websocket', 'grpc'])
def test_get_set_client_gateway_in_flow(protocol):
f = Flow(protocol=protocol, port_expose=12345)
assert f.client_args.protocol == GatewayProtocolType.from_string(protocol)
assert f.gateway_args.protocol == GatewayProtocolType.from_string(protocol)
assert f.client_args.port_expose == 12345
assert f.gateway_args.port_expose == 12345
f.update_network_interface(port_expose=54321)
assert f.client_args.port_expose == 54321
| 34.852941
| 79
| 0.716456
|
5f5fbe58198361c86a353c101d62f4c61ef6c638
| 104
|
py
|
Python
|
apps/forum/apps.py
|
picsldev/pyerp
|
e998e3e99a4e45033d54a6b1df50697f7288f67f
|
[
"MIT"
] | null | null | null |
apps/forum/apps.py
|
picsldev/pyerp
|
e998e3e99a4e45033d54a6b1df50697f7288f67f
|
[
"MIT"
] | null | null | null |
apps/forum/apps.py
|
picsldev/pyerp
|
e998e3e99a4e45033d54a6b1df50697f7288f67f
|
[
"MIT"
] | null | null | null |
# Librerias Django
from django.apps import AppConfig
class ForumConfig(AppConfig):
name = 'forum'
| 14.857143
| 33
| 0.75
|
082380bca1f9228a1a8e5bd83a95db06153587a8
| 17,013
|
py
|
Python
|
spearmint/launcher.py
|
pdeboer/Spearmint
|
54d615e0541d457f7947787aaec7260667a34ab8
|
[
"RSA-MD"
] | null | null | null |
spearmint/launcher.py
|
pdeboer/Spearmint
|
54d615e0541d457f7947787aaec7260667a34ab8
|
[
"RSA-MD"
] | null | null | null |
spearmint/launcher.py
|
pdeboer/Spearmint
|
54d615e0541d457f7947787aaec7260667a34ab8
|
[
"RSA-MD"
] | null | null | null |
# -*- coding: utf-8 -*-
# Spearmint
#
# Academic and Non-Commercial Research Use Software License and Terms
# of Use
#
# Spearmint is a software package to perform Bayesian optimization
# according to specific algorithms (the “Software”). The Software is
# designed to automatically run experiments (thus the code name
# 'spearmint') in a manner that iteratively adjusts a number of
# parameters so as to minimize some objective in as few runs as
# possible.
#
# The Software was developed by Ryan P. Adams, Michael Gelbart, and
# Jasper Snoek at Harvard University, Kevin Swersky at the
# University of Toronto (“Toronto”), and Hugo Larochelle at the
# Université de Sherbrooke (“Sherbrooke”), which assigned its rights
# in the Software to Socpra Sciences et Génie
# S.E.C. (“Socpra”). Pursuant to an inter-institutional agreement
# between the parties, it is distributed for free academic and
# non-commercial research use by the President and Fellows of Harvard
# College (“Harvard”).
#
# Using the Software indicates your agreement to be bound by the terms
# of this Software Use Agreement (“Agreement”). Absent your agreement
# to the terms below, you (the “End User”) have no rights to hold or
# use the Software whatsoever.
#
# Harvard agrees to grant hereunder the limited non-exclusive license
# to End User for the use of the Software in the performance of End
# User’s internal, non-commercial research and academic use at End
# User’s academic or not-for-profit research institution
# (“Institution”) on the following terms and conditions:
#
# 1. NO REDISTRIBUTION. The Software remains the property Harvard,
# Toronto and Socpra, and except as set forth in Section 4, End User
# shall not publish, distribute, or otherwise transfer or make
# available the Software to any other party.
#
# 2. NO COMMERCIAL USE. End User shall not use the Software for
# commercial purposes and any such use of the Software is expressly
# prohibited. This includes, but is not limited to, use of the
# Software in fee-for-service arrangements, core facilities or
# laboratories or to provide research services to (or in collaboration
# with) third parties for a fee, and in industry-sponsored
# collaborative research projects where any commercial rights are
# granted to the sponsor. If End User wishes to use the Software for
# commercial purposes or for any other restricted purpose, End User
# must execute a separate license agreement with Harvard.
#
# Requests for use of the Software for commercial purposes, please
# contact:
#
# Office of Technology Development
# Harvard University
# Smith Campus Center, Suite 727E
# 1350 Massachusetts Avenue
# Cambridge, MA 02138 USA
# Telephone: (617) 495-3067
# Facsimile: (617) 495-9568
# E-mail: otd@harvard.edu
#
# 3. OWNERSHIP AND COPYRIGHT NOTICE. Harvard, Toronto and Socpra own
# all intellectual property in the Software. End User shall gain no
# ownership to the Software. End User shall not remove or delete and
# shall retain in the Software, in any modifications to Software and
# in any Derivative Works, the copyright, trademark, or other notices
# pertaining to Software as provided with the Software.
#
# 4. DERIVATIVE WORKS. End User may create and use Derivative Works,
# as such term is defined under U.S. copyright laws, provided that any
# such Derivative Works shall be restricted to non-commercial,
# internal research and academic use at End User’s Institution. End
# User may distribute Derivative Works to other Institutions solely
# for the performance of non-commercial, internal research and
# academic use on terms substantially similar to this License and
# Terms of Use.
#
# 5. FEEDBACK. In order to improve the Software, comments from End
# Users may be useful. End User agrees to provide Harvard with
# feedback on the End User’s use of the Software (e.g., any bugs in
# the Software, the user experience, etc.). Harvard is permitted to
# use such information provided by End User in making changes and
# improvements to the Software without compensation or an accounting
# to End User.
#
# 6. NON ASSERT. End User acknowledges that Harvard, Toronto and/or
# Sherbrooke or Socpra may develop modifications to the Software that
# may be based on the feedback provided by End User under Section 5
# above. Harvard, Toronto and Sherbrooke/Socpra shall not be
# restricted in any way by End User regarding their use of such
# information. End User acknowledges the right of Harvard, Toronto
# and Sherbrooke/Socpra to prepare, publish, display, reproduce,
# transmit and or use modifications to the Software that may be
# substantially similar or functionally equivalent to End User’s
# modifications and/or improvements if any. In the event that End
# User obtains patent protection for any modification or improvement
# to Software, End User agrees not to allege or enjoin infringement of
# End User’s patent against Harvard, Toronto or Sherbrooke or Socpra,
# or any of the researchers, medical or research staff, officers,
# directors and employees of those institutions.
#
# 7. PUBLICATION & ATTRIBUTION. End User has the right to publish,
# present, or share results from the use of the Software. In
# accordance with customary academic practice, End User will
# acknowledge Harvard, Toronto and Sherbrooke/Socpra as the providers
# of the Software and may cite the relevant reference(s) from the
# following list of publications:
#
# Practical Bayesian Optimization of Machine Learning Algorithms
# Jasper Snoek, Hugo Larochelle and Ryan Prescott Adams
# Neural Information Processing Systems, 2012
#
# Multi-Task Bayesian Optimization
# Kevin Swersky, Jasper Snoek and Ryan Prescott Adams
# Advances in Neural Information Processing Systems, 2013
#
# Input Warping for Bayesian Optimization of Non-stationary Functions
# Jasper Snoek, Kevin Swersky, Richard Zemel and Ryan Prescott Adams
# Preprint, arXiv:1402.0929, http://arxiv.org/abs/1402.0929, 2013
#
# Bayesian Optimization and Semiparametric Models with Applications to
# Assistive Technology Jasper Snoek, PhD Thesis, University of
# Toronto, 2013
#
# 8. NO WARRANTIES. THE SOFTWARE IS PROVIDED "AS IS." TO THE FULLEST
# EXTENT PERMITTED BY LAW, HARVARD, TORONTO AND SHERBROOKE AND SOCPRA
# HEREBY DISCLAIM ALL WARRANTIES OF ANY KIND (EXPRESS, IMPLIED OR
# OTHERWISE) REGARDING THE SOFTWARE, INCLUDING BUT NOT LIMITED TO ANY
# IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE, OWNERSHIP, AND NON-INFRINGEMENT. HARVARD, TORONTO AND
# SHERBROOKE AND SOCPRA MAKE NO WARRANTY ABOUT THE ACCURACY,
# RELIABILITY, COMPLETENESS, TIMELINESS, SUFFICIENCY OR QUALITY OF THE
# SOFTWARE. HARVARD, TORONTO AND SHERBROOKE AND SOCPRA DO NOT WARRANT
# THAT THE SOFTWARE WILL OPERATE WITHOUT ERROR OR INTERRUPTION.
#
# 9. LIMITATIONS OF LIABILITY AND REMEDIES. USE OF THE SOFTWARE IS AT
# END USER’S OWN RISK. IF END USER IS DISSATISFIED WITH THE SOFTWARE,
# ITS EXCLUSIVE REMEDY IS TO STOP USING IT. IN NO EVENT SHALL
# HARVARD, TORONTO OR SHERBROOKE OR SOCPRA BE LIABLE TO END USER OR
# ITS INSTITUTION, IN CONTRACT, TORT OR OTHERWISE, FOR ANY DIRECT,
# INDIRECT, SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR OTHER
# DAMAGES OF ANY KIND WHATSOEVER ARISING OUT OF OR IN CONNECTION WITH
# THE SOFTWARE, EVEN IF HARVARD, TORONTO OR SHERBROOKE OR SOCPRA IS
# NEGLIGENT OR OTHERWISE AT FAULT, AND REGARDLESS OF WHETHER HARVARD,
# TORONTO OR SHERBROOKE OR SOCPRA IS ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGES.
#
# 10. INDEMNIFICATION. To the extent permitted by law, End User shall
# indemnify, defend and hold harmless Harvard, Toronto and Sherbrooke
# and Socpra, their corporate affiliates, current or future directors,
# trustees, officers, faculty, medical and professional staff,
# employees, students and agents and their respective successors,
# heirs and assigns (the "Indemnitees"), against any liability,
# damage, loss or expense (including reasonable attorney's fees and
# expenses of litigation) incurred by or imposed upon the Indemnitees
# or any one of them in connection with any claims, suits, actions,
# demands or judgments arising from End User’s breach of this
# Agreement or its Institution’s use of the Software except to the
# extent caused by the gross negligence or willful misconduct of
# Harvard, Toronto or Sherbrooke or Socpra. This indemnification
# provision shall survive expiration or termination of this Agreement.
#
# 11. GOVERNING LAW. This Agreement shall be construed and governed by
# the laws of the Commonwealth of Massachusetts regardless of
# otherwise applicable choice of law standards.
#
# 12. NON-USE OF NAME. Nothing in this License and Terms of Use shall
# be construed as granting End Users or their Institutions any rights
# or licenses to use any trademarks, service marks or logos associated
# with the Software. You may not use the terms “Harvard” or
# “University of Toronto” or “Université de Sherbrooke” or “Socpra
# Sciences et Génie S.E.C.” (or a substantially similar term) in any
# way that is inconsistent with the permitted uses described
# herein. You agree not to use any name or emblem of Harvard, Toronto
# or Sherbrooke, or any of their subdivisions for any purpose, or to
# falsely suggest any relationship between End User (or its
# Institution) and Harvard, Toronto and/or Sherbrooke, or in any
# manner that would infringe or violate any of their rights.
#
# 13. End User represents and warrants that it has the legal authority
# to enter into this License and Terms of Use on behalf of itself and
# its Institution.
import os
import sys
import time
import optparse
import subprocess
import numpy as np
filepath = os.path.realpath(__file__)
sys.path.append(os.path.dirname(filepath))
sys.path.append(os.path.abspath( os.path.join(os.path.dirname(filepath), os.pardir) ))
print("using path %s" % sys.path)
from spearmint.utils.database.mongodb import MongoDB
def main():
parser = optparse.OptionParser(usage="usage: %prog [options]")
parser.add_option("--experiment-name", dest="experiment_name",
help="The name of the experiment in the database.",
type="string")
parser.add_option("--database-address", dest="db_address",
help="The address where the database is located.",
type="string")
parser.add_option("--job-id", dest="job_id",
help="The id number of the job to launch in the database.",
type="int")
(options, args) = parser.parse_args()
if not options.experiment_name:
parser.error('Experiment name must be given.')
if not options.db_address:
parser.error('Database address must be given.')
if not options.job_id:
parser.error('Job ID not given or an ID of 0 was used.')
launch(options.db_address, options.experiment_name, options.job_id)
def launch(db_address, experiment_name, job_id):
"""
Launches a job from on a given id.
"""
db = MongoDB(database_address=db_address)
job = db.load(experiment_name, 'jobs', {'id': job_id})
start_time = time.time()
job['start time'] = start_time
db.save(job, experiment_name, 'jobs', {'id': job_id})
sys.stderr.write("Job launching after %0.2f seconds in submission.\n"
% (start_time - job['submit time']))
success = False
try:
if job['language'].lower() == 'matlab':
result = matlab_launcher(job)
elif job['language'].lower() == 'python':
result = python_launcher(job)
elif job['language'].lower() == 'shell':
result = shell_launcher(job)
elif job['language'].lower() == 'mcr':
result = mcr_launcher(job)
else:
raise Exception("That language has not been implemented.")
if not isinstance(result, dict):
# Returning just NaN means NaN on all tasks
if np.isnan(result):
# Apparently this dict generator throws an error for some people??
# result = {task_name: np.nan for task_name in job['tasks']}
# So we use the much uglier version below... ????
result = dict(zip(job['tasks'], [np.nan] * len(job['tasks'])))
elif len(job['tasks']) == 1: # Only one named job
result = {job['tasks'][0]: result}
else:
result = {'main': result}
if set(result.keys()) != set(job['tasks']):
raise Exception("Result task names %s did not match job task names %s." % (result.keys(), job['tasks']))
success = True
except:
import traceback
traceback.print_exc()
sys.stderr.write("Problem executing the function\n")
print(sys.exc_info())
end_time = time.time()
if success:
sys.stderr.write("Completed successfully in %0.2f seconds. [%s]\n"
% (end_time - start_time, result))
job['values'] = result
job['status'] = 'complete'
job['end time'] = end_time
else:
sys.stderr.write("Job failed in %0.2f seconds.\n" % (end_time - start_time))
# Update metadata.
job['status'] = 'broken'
job['end time'] = end_time
db.save(job, experiment_name, 'jobs', {'id': job_id})
def python_launcher(job):
# Run a Python function
sys.stderr.write("Running python job.\n")
# Add directory to the system path.
sys.path.append(os.path.realpath(job['expt_dir']))
# Change into the directory.
os.chdir(job['expt_dir'])
sys.stderr.write("Changed into dir %s\n" % (os.getcwd()))
# Convert the JSON object into useful parameters.
params = {}
for name, param in job['params'].iteritems():
vals = param['values']
if param['type'].lower() == 'float':
params[name] = np.array(vals)
elif param['type'].lower() == 'int':
params[name] = np.array(vals, dtype=int)
elif param['type'].lower() == 'enum':
params[name] = vals
else:
raise Exception("Unknown parameter type.")
# Load up this module and run
main_file = job['main-file']
if main_file[-3:] == '.py':
main_file = main_file[:-3]
sys.stderr.write('Importing %s.py\n' % main_file)
module = __import__(main_file)
sys.stderr.write('Running %s.main()\n' % main_file)
result = module.main(job['id'], params)
# Change back out.
os.chdir('..')
# TODO: add dict capability
sys.stderr.write("Got result %s\n" % (result))
return result
# BROKEN
def matlab_launcher(job):
# Run it as a Matlab function.
try:
import pymatlab
except:
raise Exception("Cannot import pymatlab. pymatlab is required for Matlab jobs. It is installable with pip.")
sys.stderr.write("Booting up Matlab...\n")
session = pymatlab.session_factory()
# Add directory to the Matlab path.
session.run("cd('%s')" % os.path.realpath(job['expt_dir']))
session.run('params = struct()')
for name, param in job['params'].iteritems():
vals = param['values']
# sys.stderr.write('%s = %s\n' % (param['name'], str(vals)))
# should have dtype=float explicitly, otherwise
# if they are ints it will automatically do int64, which
# matlab will receive, and will tend to break matlab scripts
# because in matlab things tend to always be double type
session.putvalue('params_%s' % name, np.array(vals, dtype=float))
session.run("params.%s = params_%s" % (name, name))
# pymatlab sucks, so I cannot put the value directly into a struct
# instead i do this silly workaround to put it in a variable and then
# copy that over into the struct
# session.run('params_%s'%param['name'])
sys.stderr.write('Running function %s\n' % job['function-name'])
# Execute the function
session.run('result = %s(params)' % job['function-name'])
# Get the result
result = session.getvalue('result')
# TODO: this only works for single-task right now
result = float(result)
sys.stderr.write("Got result %s\n" % (result))
del session
return result
# BROKEN
def shell_launcher(job):
# Change into the directory.
os.chdir(job['expt_dir'])
cmd = './%s %s' % (job['function-name'], job_file)
sys.stderr.write("Executing command '%s'\n" % cmd)
subprocess.check_call(cmd, shell=True)
return result
# BROKEN
def mcr_launcher(job):
# Change into the directory.
os.chdir(job['expt_dir'])
if os.environ.has_key('MATLAB'):
mcr_loc = os.environ['MATLAB']
else:
raise Exception("Please set the MATLAB environment variable")
cmd = './run_%s.sh %s %s' % (job['function-name'], mcr_loc, job_file)
sys.stderr.write("Executing command '%s'\n" % (cmd))
subprocess.check_call(cmd, shell=True)
return result
if __name__ == '__main__':
main()
| 39.93662
| 116
| 0.703168
|
efaf5cfd2bc95a4f04fd69a586c14f3136956cc9
| 688
|
py
|
Python
|
setup.py
|
emhayusa/webinar
|
5fa05e17a0328f3f1fa2bed647830b85f4c17471
|
[
"MIT"
] | null | null | null |
setup.py
|
emhayusa/webinar
|
5fa05e17a0328f3f1fa2bed647830b85f4c17471
|
[
"MIT"
] | null | null | null |
setup.py
|
emhayusa/webinar
|
5fa05e17a0328f3f1fa2bed647830b85f4c17471
|
[
"MIT"
] | null | null | null |
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name='webinar',
version='0.2',
author="Muhammad Hasannudin Yusa",
author_email="emhayusa@gmail.com",
description="A webinar bot package",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/emhayusa/webinar",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
install_requires=[
'Click',
],
entry_points={
"console_scripts": ['webinar=webinar.main:hello']
},
)
| 25.481481
| 57
| 0.696221
|
ae4bbba5a1c752236e5481d7249208a5c9f4d81f
| 4,553
|
py
|
Python
|
benchmark/startQiskit_noisy1610.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startQiskit_noisy1610.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startQiskit_noisy1610.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
# qubit number=5
# total number=60
import cirq
import qiskit
from qiskit.providers.aer import QasmSimulator
from qiskit.test.mock import FakeVigo
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2,floor, sqrt, pi
import numpy as np
import networkx as nx
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f^\pm
# NOTE: use U1 gate (P gate) with \lambda = 180 ==> CZ gate
# or multi_control_Z_gate (issue #127)
controls = QuantumRegister(n, "ofc")
oracle = QuantumCircuit(controls, name="Zf")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.h(controls[n])
if n >= 2:
oracle.mcu1(pi, controls[1:], controls[0])
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[0]) # number=3
prog.h(input_qubit[1]) # number=4
prog.h(input_qubit[2]) # number=5
prog.h(input_qubit[3]) # number=6
prog.h(input_qubit[0]) # number=38
prog.cz(input_qubit[1],input_qubit[0]) # number=39
prog.h(input_qubit[0]) # number=40
prog.h(input_qubit[0]) # number=51
prog.cz(input_qubit[1],input_qubit[0]) # number=52
prog.h(input_qubit[0]) # number=53
prog.h(input_qubit[0]) # number=57
prog.cz(input_qubit[1],input_qubit[0]) # number=58
prog.h(input_qubit[0]) # number=59
prog.z(input_qubit[1]) # number=55
prog.cx(input_qubit[1],input_qubit[0]) # number=56
prog.cx(input_qubit[1],input_qubit[0]) # number=50
prog.h(input_qubit[0]) # number=32
prog.cz(input_qubit[1],input_qubit[0]) # number=33
prog.h(input_qubit[0]) # number=34
prog.h(input_qubit[4]) # number=21
Zf = build_oracle(n, f)
repeat = floor(sqrt(2 ** n) * pi / 4)
for i in range(repeat):
prog.append(Zf.to_gate(), [input_qubit[i] for i in range(n)])
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=8
prog.cx(input_qubit[3],input_qubit[0]) # number=41
prog.z(input_qubit[3]) # number=42
prog.cx(input_qubit[3],input_qubit[0]) # number=43
prog.cx(input_qubit[1],input_qubit[3]) # number=44
prog.cx(input_qubit[3],input_qubit[2]) # number=45
prog.x(input_qubit[0]) # number=9
prog.x(input_qubit[1]) # number=10
prog.x(input_qubit[2]) # number=11
prog.cx(input_qubit[0],input_qubit[3]) # number=35
prog.x(input_qubit[3]) # number=36
prog.cx(input_qubit[0],input_qubit[3]) # number=37
if n>=2:
prog.mcu1(pi,input_qubit[1:],input_qubit[0])
prog.cx(input_qubit[1],input_qubit[0]) # number=24
prog.x(input_qubit[0]) # number=25
prog.cx(input_qubit[1],input_qubit[0]) # number=26
prog.x(input_qubit[1]) # number=14
prog.x(input_qubit[2]) # number=15
prog.x(input_qubit[3]) # number=16
prog.x(input_qubit[3]) # number=46
prog.y(input_qubit[1]) # number=47
prog.h(input_qubit[0]) # number=17
prog.h(input_qubit[1]) # number=18
prog.h(input_qubit[2]) # number=19
prog.h(input_qubit[3]) # number=20
prog.x(input_qubit[1]) # number=22
prog.x(input_qubit[1]) # number=23
# circuit end
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
key = "00000"
f = lambda rep: str(int(rep == key))
prog = make_circuit(5,f)
backend = FakeVigo()
sample_shot =7924
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_noisy1610.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.depth(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| 32.29078
| 82
| 0.620909
|
ba258ede85e9a3bab517ea3c0573461912de4f6f
| 26,279
|
py
|
Python
|
test/functional/test_runner.py
|
CodeIsTheKey/raptoreum
|
8a44d39f985c503f08969f91e0c946042c173496
|
[
"MIT"
] | 1
|
2021-12-18T04:44:10.000Z
|
2021-12-18T04:44:10.000Z
|
test/functional/test_runner.py
|
CodeIsTheKey/raptoreum
|
8a44d39f985c503f08969f91e0c946042c173496
|
[
"MIT"
] | null | null | null |
test/functional/test_runner.py
|
CodeIsTheKey/raptoreum
|
8a44d39f985c503f08969f91e0c946042c173496
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Run regression test suite.
This module calls down into individual test cases via subprocess. It will
forward all unrecognized arguments onto the individual test scripts.
Functional tests are disabled on Windows by default. Use --force to run them anyway.
For a description of arguments recognized by test scripts, see
`test/functional/test_framework/test_framework.py:BitcoinTestFramework.main`.
"""
import argparse
from collections import deque
import configparser
import datetime
import os
import time
import shutil
import signal
import sys
import subprocess
import tempfile
import re
import logging
# Formatting. Default colors to empty strings.
BOLD, BLUE, RED, GREY = ("", ""), ("", ""), ("", ""), ("", "")
try:
# Make sure python thinks it can write unicode to its stdout
"\u2713".encode("utf_8").decode(sys.stdout.encoding)
TICK = "✓ "
CROSS = "✖ "
CIRCLE = "○ "
except UnicodeDecodeError:
TICK = "P "
CROSS = "x "
CIRCLE = "o "
if os.name == 'posix':
# primitive formatting on supported
# terminal via ANSI escape sequences:
BOLD = ('\033[0m', '\033[1m')
BLUE = ('\033[0m', '\033[0;34m')
RED = ('\033[0m', '\033[0;31m')
GREY = ('\033[0m', '\033[1;30m')
TEST_EXIT_PASSED = 0
TEST_EXIT_SKIPPED = 77
# 30 minutes represented in seconds
TRAVIS_TIMEOUT_DURATION = 30 * 60
BASE_SCRIPTS= [
# Scripts that are run by the travis build process.
# Longest test should go first, to favor running tests in parallel
'feature_dip3_deterministicmns.py', # NOTE: needs dash_hash to pass
'feature_block_reward_reallocation.py',
'feature_llmq_data_recovery.py',
'wallet_hd.py',
'wallet_backup.py',
# vv Tests less than 5m vv
'feature_block.py', # NOTE: needs dash_hash to pass
'rpc_fundrawtransaction.py',
'rpc_fundrawtransaction_hd.py',
'wallet_multiwallet.py --usecli',
'p2p_quorum_data.py',
# vv Tests less than 2m vv
'p2p_instantsend.py',
'wallet_basic.py',
'wallet_labels.py',
'wallet_dump.py',
'wallet_listtransactions.py',
'feature_multikeysporks.py',
'feature_llmq_signing.py', # NOTE: needs dash_hash to pass
'feature_llmq_signing.py --spork21', # NOTE: needs dash_hash to pass
'feature_llmq_chainlocks.py', # NOTE: needs dash_hash to pass
'feature_llmq_connections.py', # NOTE: needs dash_hash to pass
'feature_llmq_simplepose.py', # NOTE: needs dash_hash to pass
'feature_llmq_is_cl_conflicts.py', # NOTE: needs dash_hash to pass
'feature_llmq_is_retroactive.py', # NOTE: needs dash_hash to pass
'feature_llmq_dkgerrors.py', # NOTE: needs dash_hash to pass
'feature_dip4_coinbasemerkleroots.py', # NOTE: needs dash_hash to pass
# vv Tests less than 60s vv
'p2p_sendheaders.py', # NOTE: needs dash_hash to pass
'wallet_zapwallettxes.py',
'wallet_importmulti.py',
'mempool_limit.py',
'rpc_txoutproof.py',
'wallet_listreceivedby.py',
'wallet_abandonconflict.py',
'feature_csv_activation.py',
'rpc_rawtransaction.py',
'feature_reindex.py',
# vv Tests less than 30s vv
'wallet_keypool_topup.py',
'interface_zmq_dash.py',
'interface_zmq.py',
'interface_bitcoin_cli.py',
'mempool_resurrect.py',
'wallet_txn_doublespend.py --mineblock',
'wallet_txn_clone.py',
'rpc_getchaintips.py',
'interface_rest.py',
'mempool_spend_coinbase.py',
'mempool_reorg.py',
'mempool_persist.py',
'wallet_multiwallet.py',
'interface_http.py',
'rpc_users.py',
'feature_proxy.py',
'rpc_signrawtransaction.py',
'p2p_disconnect_ban.py',
'feature_addressindex.py',
'feature_timestampindex.py',
'feature_spentindex.py',
'rpc_decodescript.py',
'rpc_blockchain.py',
'rpc_deprecated.py',
'wallet_disable.py',
'rpc_net.py',
'wallet_keypool.py',
'wallet_keypool_hd.py',
'p2p_mempool.py',
'mining_prioritisetransaction.py',
'p2p_invalid_block.py',
'p2p_invalid_tx.py',
'feature_versionbits_warning.py',
'rpc_preciousblock.py',
'wallet_importprunedfunds.py',
'rpc_zmq.py',
'rpc_signmessage.py',
'feature_nulldummy.py',
'wallet_import_rescan.py',
'rpc_bind.py --ipv4',
'rpc_bind.py --ipv6',
'rpc_bind.py --nonloopback',
'mining_basic.py',
'rpc_named_arguments.py',
'wallet_listsinceblock.py',
'p2p_leak.py',
'p2p_compactblocks.py',
'p2p_connect_to_devnet.py',
'feature_sporks.py',
'rpc_getblockstats.py',
'wallet_encryption.py',
'wallet_upgradetohd.py',
'feature_dersig.py',
'feature_cltv.py',
'feature_new_quorum_type_activation.py',
'feature_governance_objects.py',
'rpc_uptime.py',
'wallet_resendwallettransactions.py',
'feature_minchainwork.py',
'p2p_unrequested_blocks.py', # NOTE: needs dash_hash to pass
'feature_shutdown.py',
'rpc_coinjoin.py',
'rpc_masternode.py',
'rpc_mnauth.py',
'rpc_verifyislock.py',
'rpc_verifychainlock.py',
'p2p_fingerprint.py',
'rpc_platform_filter.py',
'feature_dip0020_activation.py',
'feature_uacomment.py',
'p2p_unrequested_blocks.py',
'feature_logging.py',
'p2p_node_network_limited.py',
'feature_blocksdir.py',
'feature_config_args.py',
'feature_help.py',
# Don't append tests at the end to avoid merge conflicts
# Put them in a random line within the section that fits their approximate run-time
]
EXTENDED_SCRIPTS = [
# These tests are not run by the travis build process.
# Longest test should go first, to favor running tests in parallel
'feature_pruning.py', # NOTE: Prune mode is incompatible with -txindex, should work with governance validation disabled though.
# vv Tests less than 20m vv
'feature_fee_estimation.py',
# vv Tests less than 5m vv
'feature_maxuploadtarget.py',
'mempool_packages.py',
'feature_dbcrash.py',
# vv Tests less than 2m vv
'feature_bip68_sequence.py',
'mining_getblocktemplate_longpoll.py', # FIXME: "socket.error: [Errno 54] Connection reset by peer" on my Mac, same as https://github.com/bitcoin/bitcoin/issues/6651
'p2p_timeouts.py',
# vv Tests less than 60s vv
# vv Tests less than 30s vv
'feature_assumevalid.py',
'example_test.py',
'wallet_txn_doublespend.py',
'wallet_txn_clone.py --mineblock',
'feature_txindex.py',
'feature_notifications.py',
'rpc_invalidateblock.py',
]
# Place EXTENDED_SCRIPTS first since it has the 3 longest running tests
ALL_SCRIPTS = EXTENDED_SCRIPTS + BASE_SCRIPTS
NON_SCRIPTS = [
# These are python files that live in the functional tests directory, but are not test scripts.
"combine_logs.py",
"create_cache.py",
"test_runner.py",
]
def main():
# Parse arguments and pass through unrecognised args
parser = argparse.ArgumentParser(add_help=False,
usage='%(prog)s [test_runner.py options] [script options] [scripts]',
description=__doc__,
epilog='''
Help text and arguments for individual test script:''',
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('--combinedlogslen', '-c', type=int, default=0, help='print a combined log (of length n lines) from all test nodes and test framework to the console on failure.')
parser.add_argument('--coverage', action='store_true', help='generate a basic coverage report for the RPC interface')
parser.add_argument('--ci', action='store_true', help='Run checks and code that are usually only enabled in a continuous integration environment')
parser.add_argument('--exclude', '-x', help='specify a comma-separated-list of scripts to exclude.')
parser.add_argument('--extended', action='store_true', help='run the extended test suite in addition to the basic tests')
parser.add_argument('--force', '-f', action='store_true', help='run tests even on platforms where they are disabled by default (e.g. windows).')
parser.add_argument('--help', '-h', '-?', action='store_true', help='print help text and exit')
parser.add_argument('--jobs', '-j', type=int, default=4, help='how many test scripts to run in parallel. Default=4.')
parser.add_argument('--quiet', '-q', action='store_true', help='only print results summary and failure logs')
parser.add_argument('--keepcache', '-k', action='store_true', help='the default behavior is to flush the cache directory on startup. --keepcache retains the cache from the previous testrun.')
parser.add_argument('--tmpdirprefix', '-t', default=tempfile.gettempdir(), help="Root directory for datadirs")
parser.add_argument('--failfast', action='store_true', help='stop execution after the first test failure')
args, unknown_args = parser.parse_known_args()
# args to be passed on always start with two raptoreumes; tests are the remaining unknown args
tests = [arg for arg in unknown_args if arg[:2] != "--"]
passon_args = [arg for arg in unknown_args if arg[:2] == "--"]
# Read config generated by configure.
config = configparser.ConfigParser()
configfile = os.path.abspath(os.path.dirname(__file__)) + "/../config.ini"
config.read_file(open(configfile, encoding="utf8"))
passon_args.append("--configfile=%s" % configfile)
# Set up logging
logging_level = logging.INFO if args.quiet else logging.DEBUG
logging.basicConfig(format='%(message)s', level=logging_level)
# Create base test directory
tmpdir = "%s/raptoreum_test_runner_%s" % (args.tmpdirprefix, datetime.datetime.now().strftime("%Y%m%d_%H%M%S"))
os.makedirs(tmpdir)
logging.debug("Temporary test directory at %s" % tmpdir)
enable_wallet = config["components"].getboolean("ENABLE_WALLET")
enable_utils = config["components"].getboolean("ENABLE_UTILS")
enable_bitcoind = config["components"].getboolean("ENABLE_BITCOIND")
if config["environment"]["EXEEXT"] == ".exe" and not args.force:
# https://github.com/bitcoin/bitcoin/commit/d52802551752140cf41f0d9a225a43e84404d3e9
# https://github.com/bitcoin/bitcoin/pull/5677#issuecomment-136646964
print("Tests currently disabled on Windows by default. Use --force option to enable")
sys.exit(0)
if not (enable_wallet and enable_utils and enable_bitcoind):
print("No functional tests to run. Wallet, utils, and raptoreumd must all be enabled")
print("Rerun `configure` with -enable-wallet, -with-utils and -with-daemon and rerun make")
sys.exit(0)
# Build list of tests
test_list = []
if tests:
# Individual tests have been specified. Run specified tests that exist
# in the ALL_SCRIPTS list. Accept the name with or without .py extension.
tests = [re.sub("\.py$", "", test) + ".py" for test in tests]
for test in tests:
if test in ALL_SCRIPTS:
test_list.append(test)
else:
print("{}WARNING!{} Test '{}' not found in full test list.".format(BOLD[1], BOLD[0], test))
elif args.extended:
# Include extended tests
test_list += ALL_SCRIPTS
else:
# Run base tests only
test_list += BASE_SCRIPTS
# Remove the test cases that the user has explicitly asked to exclude.
if args.exclude:
exclude_tests = [re.sub("\.py$", "", test) + ".py" for test in args.exclude.split(',')]
for exclude_test in exclude_tests:
if exclude_test in test_list:
test_list.remove(exclude_test)
else:
print("{}WARNING!{} Test '{}' not found in current test list.".format(BOLD[1], BOLD[0], exclude_test))
if not test_list:
print("No valid test scripts specified. Check that your test is in one "
"of the test lists in test_runner.py, or run test_runner.py with no arguments to run all tests")
sys.exit(0)
if args.help:
# Print help for test_runner.py, then print help of the first script (with args removed) and exit.
parser.print_help()
subprocess.check_call([sys.executable, os.path.join(config["environment"]["SRCDIR"], 'test', 'functional', test_list[0].split()[0]), '-h'])
sys.exit(0)
check_script_list(src_dir=config["environment"]["SRCDIR"], fail_on_warn=args.ci)
check_script_prefixes()
if not args.keepcache:
shutil.rmtree("%s/test/cache" % config["environment"]["BUILDDIR"], ignore_errors=True)
run_tests(
test_list=test_list,
src_dir=config["environment"]["SRCDIR"],
build_dir=config["environment"]["BUILDDIR"],
tmpdir=tmpdir,
jobs=args.jobs,
enable_coverage=args.coverage,
args=passon_args,
failfast=args.failfast,
runs_ci=args.ci,
combined_logs_len=args.combinedlogslen,
)
def run_tests(*, test_list, src_dir, build_dir, tmpdir, jobs=1, enable_coverage=False, args=None, failfast=False, runs_ci, combined_logs_len=0):
args = args or []
# Warn if raptoreumd is already running (unix only)
try:
pidof_output = subprocess.check_output(["pidof", "raptoreumd"])
if not (pidof_output is None or pidof_output == b''):
print("%sWARNING!%s There is already a raptoreumd process running on this system. Tests may fail unexpectedly due to resource contention!" % (BOLD[1], BOLD[0]))
except (OSError, subprocess.SubprocessError):
pass
# Warn if there is a cache directory
cache_dir = "%s/test/cache" % build_dir
if os.path.isdir(cache_dir):
print("%sWARNING!%s There is a cache directory here: %s. If tests fail unexpectedly, try deleting the cache directory." % (BOLD[1], BOLD[0], cache_dir))
tests_dir = src_dir + '/test/functional/'
flags = ["--srcdir={}/src".format(build_dir)] + args
flags.append("--cachedir=%s" % cache_dir)
if enable_coverage:
coverage = RPCCoverage()
flags.append(coverage.flag)
logging.debug("Initializing coverage directory at %s" % coverage.dir)
else:
coverage = None
if len(test_list) > 1 and jobs > 1:
# Populate cache
try:
subprocess.check_output([sys.executable, tests_dir + 'create_cache.py'] + flags + ["--tmpdir=%s/cache" % tmpdir])
except subprocess.CalledProcessError as e:
sys.stdout.buffer.write(e.output)
raise
#Run Tests
job_queue = TestHandler(
num_tests_parallel=jobs,
tests_dir=tests_dir,
tmpdir=tmpdir,
test_list=test_list,
flags=flags,
timeout_duration=TRAVIS_TIMEOUT_DURATION if runs_ci else float('inf'), # in seconds
)
start_time = time.time()
test_results = []
max_len_name = len(max(test_list, key=len))
for _ in range(len(test_list)):
test_result, testdir, stdout, stderr = job_queue.get_next()
test_results.append(test_result)
if test_result.status == "Passed":
logging.debug("\n%s%s%s passed, Duration: %s s" % (BOLD[1], test_result.name, BOLD[0], test_result.time))
elif test_result.status == "Skipped":
logging.debug("\n%s%s%s skipped" % (BOLD[1], test_result.name, BOLD[0]))
else:
print("\n%s%s%s failed, Duration: %s s\n" % (BOLD[1], test_result.name, BOLD[0], test_result.time))
print(BOLD[1] + 'stdout:\n' + BOLD[0] + stdout + '\n')
print(BOLD[1] + 'stderr:\n' + BOLD[0] + stderr + '\n')
if combined_logs_len and os.path.isdir(testdir):
# Print the final `combinedlogslen` lines of the combined logs
print('{}Combine the logs and print the last {} lines ...{}'.format(BOLD[1], combined_logs_len, BOLD[0]))
print('\n============')
print('{}Combined log for {}:{}'.format(BOLD[1], testdir, BOLD[0]))
print('============\n')
combined_logs, _ = subprocess.Popen([sys.executable, os.path.join(tests_dir, 'combine_logs.py'), '-c', testdir], universal_newlines=True, stdout=subprocess.PIPE).communicate()
print("\n".join(deque(combined_logs.splitlines(), combined_logs_len)))
if failfast:
logging.debug("Early exiting after test failure")
break
print_results(test_results, max_len_name, (int(time.time() - start_time)))
if coverage:
coverage.report_rpc_coverage()
logging.debug("Cleaning up coverage data")
coverage.cleanup()
# Clear up the temp directory if all subdirectories are gone
if not os.listdir(tmpdir):
os.rmdir(tmpdir)
all_passed = all(map(lambda test_result: test_result.was_successful, test_results))
# This will be a no-op unless failfast is True in which case there may be dangling
# processes which need to be killed.
job_queue.kill_and_join()
sys.exit(not all_passed)
def print_results(test_results, max_len_name, runtime):
results = "\n" + BOLD[1] + "%s | %s | %s\n\n" % ("TEST".ljust(max_len_name), "STATUS ", "DURATION") + BOLD[0]
test_results.sort(key=TestResult.sort_key)
all_passed = True
time_sum = 0
for test_result in test_results:
all_passed = all_passed and test_result.was_successful
time_sum += test_result.time
test_result.padding = max_len_name
results += str(test_result)
status = TICK + "Passed" if all_passed else CROSS + "Failed"
if not all_passed:
results += RED[1]
results += BOLD[1] + "\n%s | %s | %s s (accumulated) \n" % ("ALL".ljust(max_len_name), status.ljust(9), time_sum) + BOLD[0]
if not all_passed:
results += RED[0]
results += "Runtime: %s s\n" % (runtime)
print(results)
class TestHandler:
"""
Trigger the test scripts passed in via the list.
"""
def __init__(self, *, num_tests_parallel, tests_dir, tmpdir, test_list, flags, timeout_duration):
assert num_tests_parallel >= 1
self.num_jobs = num_tests_parallel
self.tests_dir = tests_dir
self.tmpdir = tmpdir
self.timeout_duration = timeout_duration
self.test_list = test_list
self.flags = flags
self.num_running = 0
# In case there is a graveyard of zombie dashds, we can apply a
# pseudorandom offset to hopefully jump over them.
# (625 is PORT_RANGE/MAX_NODES)
self.portseed_offset = int(time.time() * 1000) % 625
self.jobs = []
def get_next(self):
while self.num_running < self.num_jobs and self.test_list:
# Add tests
self.num_running += 1
test = self.test_list.pop(0)
portseed = len(self.test_list) + self.portseed_offset
portseed_arg = ["--portseed={}".format(portseed)]
log_stdout = tempfile.SpooledTemporaryFile(max_size=2**16)
log_stderr = tempfile.SpooledTemporaryFile(max_size=2**16)
test_argv = test.split()
testdir = "{}/{}_{}".format(self.tmpdir, re.sub(".py$", "", test_argv[0]), portseed)
tmpdir_arg = ["--tmpdir={}".format(testdir)]
self.jobs.append((test,
time.time(),
subprocess.Popen([sys.executable, self.tests_dir + test_argv[0]] + test_argv[1:] + self.flags + portseed_arg + tmpdir_arg,
universal_newlines=True,
stdout=log_stdout,
stderr=log_stderr),
testdir,
log_stdout,
log_stderr))
if not self.jobs:
raise IndexError('pop from empty list')
while True:
# Return first proc that finishes
time.sleep(.5)
for job in self.jobs:
(name, start_time, proc, testdir, log_out, log_err) = job
if int(time.time() - start_time) > self.timeout_duration:
# In travis, timeout individual tests (to stop tests hanging and not providing useful output).
proc.send_signal(signal.SIGINT)
if proc.poll() is not None:
log_out.seek(0), log_err.seek(0)
[stdout, stderr] = [log_file.read().decode('utf-8') for log_file in (log_out, log_err)]
log_out.close(), log_err.close()
if proc.returncode == TEST_EXIT_PASSED and stderr == "":
status = "Passed"
elif proc.returncode == TEST_EXIT_SKIPPED:
status = "Skipped"
else:
status = "Failed"
self.num_running -= 1
self.jobs.remove(job)
return TestResult(name, status, int(time.time() - start_time)), testdir, stdout, stderr
print('.', end='', flush=True)
def kill_and_join(self):
"""Send SIGKILL to all jobs and block until all have ended."""
procs = [i[2] for i in self.jobs]
for proc in procs:
proc.kill()
for proc in procs:
proc.wait()
class TestResult():
def __init__(self, name, status, time):
self.name = name
self.status = status
self.time = time
self.padding = 0
def sort_key(self):
if self.status == "Passed":
return 0, self.name.lower()
elif self.status == "Failed":
return 2, self.name.lower()
elif self.status == "Skipped":
return 1, self.name.lower()
def __repr__(self):
if self.status == "Passed":
color = BLUE
glyph = TICK
elif self.status == "Failed":
color = RED
glyph = CROSS
elif self.status == "Skipped":
color = GREY
glyph = CIRCLE
return color[1] + "%s | %s%s | %s s\n" % (self.name.ljust(self.padding), glyph, self.status.ljust(7), self.time) + color[0]
@property
def was_successful(self):
return self.status != "Failed"
def check_script_prefixes():
"""Check that test scripts start with one of the allowed name prefixes."""
good_prefixes_re = re.compile("(example|feature|interface|mempool|mining|p2p|rpc|wallet)_")
bad_script_names = [script for script in ALL_SCRIPTS if good_prefixes_re.match(script) is None]
if bad_script_names:
print("%sERROR:%s %d tests not meeting naming conventions:" % (BOLD[1], BOLD[0], len(bad_script_names)))
print(" %s" % ("\n ".join(sorted(bad_script_names))))
raise AssertionError("Some tests are not following naming convention!")
def check_script_list(*, src_dir, fail_on_warn):
"""Check scripts directory.
Check that there are no scripts in the functional tests directory which are
not being run by pull-tester.py."""
script_dir = src_dir + '/test/functional/'
python_files = set([test_file for test_file in os.listdir(script_dir) if test_file.endswith(".py")])
missed_tests = list(python_files - set(map(lambda x: x.split()[0], ALL_SCRIPTS + NON_SCRIPTS)))
if len(missed_tests) != 0:
print("%sWARNING!%s The following scripts are not being run: %s. Check the test lists in test_runner.py." % (BOLD[1], BOLD[0], str(missed_tests)))
if fail_on_warn:
# On travis this warning is an error to prevent merging incomplete commits into master
sys.exit(1)
class RPCCoverage():
"""
Coverage reporting utilities for test_runner.
Coverage calculation works by having each test script subprocess write
coverage files into a particular directory. These files contain the RPC
commands invoked during testing, as well as a complete listing of RPC
commands per `raptoreum-cli help` (`rpc_interface.txt`).
After all tests complete, the commands run are combined and diff'd against
the complete list to calculate uncovered RPC commands.
See also: test/functional/test_framework/coverage.py
"""
def __init__(self):
self.dir = tempfile.mkdtemp(prefix="coverage")
self.flag = '--coveragedir=%s' % self.dir
def report_rpc_coverage(self):
"""
Print out RPC commands that were unexercised by tests.
"""
uncovered = self._get_uncovered_rpc_commands()
if uncovered:
print("Uncovered RPC commands:")
print("".join((" - %s\n" % command) for command in sorted(uncovered)))
else:
print("All RPC commands covered.")
def cleanup(self):
return shutil.rmtree(self.dir)
def _get_uncovered_rpc_commands(self):
"""
Return a set of currently untested RPC commands.
"""
# This is shared from `test/functional/test-framework/coverage.py`
reference_filename = 'rpc_interface.txt'
coverage_file_prefix = 'coverage.'
coverage_ref_filename = os.path.join(self.dir, reference_filename)
coverage_filenames = set()
all_cmds = set()
covered_cmds = set()
if not os.path.isfile(coverage_ref_filename):
raise RuntimeError("No coverage reference found")
with open(coverage_ref_filename, 'r', encoding="utf8") as coverage_ref_file:
all_cmds.update([line.strip() for line in coverage_ref_file.readlines()])
for root, dirs, files in os.walk(self.dir):
for filename in files:
if filename.startswith(coverage_file_prefix):
coverage_filenames.add(os.path.join(root, filename))
for filename in coverage_filenames:
with open(filename, 'r', encoding="utf8") as coverage_file:
covered_cmds.update([line.strip() for line in coverage_file.readlines()])
return all_cmds - covered_cmds
if __name__ == '__main__':
main()
| 40.243492
| 195
| 0.642719
|
0e154dbd21f4977e4e5baabee1ddf840bd6df148
| 201
|
py
|
Python
|
app.py
|
Prodops3pg/dockerdemo
|
c7bd967d568b4225c2cf022cfebf211b3b47dc58
|
[
"MIT"
] | null | null | null |
app.py
|
Prodops3pg/dockerdemo
|
c7bd967d568b4225c2cf022cfebf211b3b47dc58
|
[
"MIT"
] | null | null | null |
app.py
|
Prodops3pg/dockerdemo
|
c7bd967d568b4225c2cf022cfebf211b3b47dc58
|
[
"MIT"
] | 1
|
2018-08-11T06:02:55.000Z
|
2018-08-11T06:02:55.000Z
|
# app.py
from flask import Flask
app = Flask(__name__)
app.config.from_pyfile('config.py')
@app.route('/ping')
def ping():
return 'PONG'
if __name__ == '__main__':
app.run(host='0.0.0.0', port=80)
| 16.75
| 35
| 0.676617
|
a54f5ecd0b8da6555841a2c118a9a84bb95c9dfe
| 2,257
|
py
|
Python
|
addons14/document_page/wizard/document_page_create_menu.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | 1
|
2021-06-10T14:59:13.000Z
|
2021-06-10T14:59:13.000Z
|
addons14/document_page/wizard/document_page_create_menu.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | null | null | null |
addons14/document_page/wizard/document_page_create_menu.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | 1
|
2021-04-09T09:44:44.000Z
|
2021-04-09T09:44:44.000Z
|
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, fields, models
class DocumentPageCreateMenu(models.TransientModel):
"""Create Menu."""
_name = "document.page.create.menu"
_description = "Wizard Create Menu"
menu_name = fields.Char("Menu Name", required=True)
menu_parent_id = fields.Many2one("ir.ui.menu", "Parent Menu", required=True)
@api.model
def default_get(self, fields_list):
"""Get Page name of the menu."""
res = super().default_get(fields_list)
page_id = self.env.context.get("active_id")
obj_page = self.env["document.page"]
page = obj_page.browse(page_id)
res["menu_name"] = page.name
return res
def document_page_menu_create(self):
"""Menu creation."""
obj_page = self.env["document.page"]
obj_menu = self.env["ir.ui.menu"]
obj_action = self.env["ir.actions.act_window"]
obj_model_data = self.env["ir.model.data"]
page_id = self.env.context.get("active_id", False)
page = obj_page.browse(page_id)
data = self[0]
view_id = obj_model_data.sudo().get_object_reference(
"document_page", "view_wiki_menu_form"
)[1]
value = {
"name": "Document Page",
"view_mode": "form,tree",
"res_model": "document.page",
"view_id": view_id,
"type": "ir.actions.act_window",
"target": "current",
}
value["domain"] = "[('parent_id','=',%d)]" % page.id
value["res_id"] = page.id
# only the super user is allowed to create menu due to security rules
# on ir.values
# see.: http://goo.gl/Y99S7V
action_id = obj_action.sudo().create(value)
menu_id = obj_menu.sudo().create(
{
"name": data.menu_name,
"parent_id": data.menu_parent_id.id,
"action": "ir.actions.act_window," + str(action_id.id),
}
)
if page.menu_id:
page.menu_id.unlink()
page.write({"menu_id": menu_id.id})
return {"type": "ir.actions.client", "tag": "reload"}
| 34.19697
| 80
| 0.576429
|
0536ea8e065cf6a9712e6285f00ccc1fcc2e85d4
| 3,299
|
py
|
Python
|
cryspy/A_functions_base/function_1_gamma_nu.py
|
ikibalin/rhochi
|
1ca03f18dc72006322a101ed877cdbba33ed61e7
|
[
"MIT"
] | null | null | null |
cryspy/A_functions_base/function_1_gamma_nu.py
|
ikibalin/rhochi
|
1ca03f18dc72006322a101ed877cdbba33ed61e7
|
[
"MIT"
] | null | null | null |
cryspy/A_functions_base/function_1_gamma_nu.py
|
ikibalin/rhochi
|
1ca03f18dc72006322a101ed877cdbba33ed61e7
|
[
"MIT"
] | null | null | null |
"""
Functions:
- gammanu_to_tthphi
- tthphi_to_gammanu
- recal_int_to_tthphi_grid
- recal_int_to_gammanu_grid
- app_grid
- app_2d
"""
import math
def gammanu_to_tthphi(gamma, nu):
"""Transfer gamma-nu to ttheta-phi.
gamma
is the angle in the equatorial plane
nu
is the angle between equatorial plane and scattering neutron
[-pi/2, pi/2]
return
ttheta is diffraction angle
phi is polar angle
"""
ttheta = math.acos(math.cos(gamma)*math.cos(nu))
phi = math.atan2(math.sin(nu), math.cos(nu)*math.sin(gamma))
return ttheta, phi
def tthphi_to_gammanu(tth, phi):
"""Transfer ttheta-phi to gamma-nu."""
gamma = math.atan2(math.cos(phi)*math.sin(tth), math.cos(tth))
nu = math.asin(math.sin(tth)*math.sin(phi))
return gamma, nu
def recal_int_to_tthphi_grid(l_gamma_grid, l_nu_grid, ll_int_grid,
l_ttheta_grid, l_phi_grid):
l_point = []
for phi in l_phi_grid:
for tth in l_ttheta_grid:
point = tthphi_to_gammanu(tth, phi)
l_point.append(point)
l_int_dang_grid = app_grid(ll_int_grid, l_gamma_grid, l_nu_grid, l_point)
n_tth = len(l_ttheta_grid)
lint_out = [[l_int_dang_grid[ind_tth+ind_phi*n_tth]
for ind_tth, tth in enumerate(l_ttheta_grid)]
for ind_phi, phi in enumerate(l_phi_grid)]
return lint_out
def recal_int_to_gammanu_grid(ltth_grid, l_phi_grid, ll_int_grid,
l_gamma_grid, l_nu_grid):
l_point = []
for nu in l_nu_grid:
for gamma in l_gamma_grid:
point = gammanu_to_tthphi(gamma, nu)
l_point.append(point)
l_int_dang_grid = app_grid(ll_int_grid, ltth_grid, l_phi_grid, l_point)
n_gamma = len(l_gamma_grid)
lint_out = [[l_int_dang_grid[ind_gamma+ind_nu*n_gamma]
for ind_gamma, gamma in enumerate(l_gamma_grid)]
for ind_nu, nu in enumerate(l_nu_grid)]
return lint_out
def app_grid(mat_xy, x_grid, y_grid, l_point):
l_res = []
min_x, max_x = float(x_grid[0]), float(x_grid[-1])
min_y, max_y = float(y_grid[0]), float(y_grid[-1])
n_x, n_y = len(x_grid), len(y_grid)
step_x = (max_x - min_x)/float(n_x-1)
step_y = (max_y - min_y)/float(n_y-1)
for ipoint, point in enumerate(l_point):
try:
val_x = point[0]
val_y = point[1]
except Exception:
return []
n_tot_x = (val_x - min_x)/step_x
n_tot_y = (val_y - min_y)/step_y
if all([(n_tot_x >= 0.), (n_tot_x < float(n_x-1)),
(n_tot_y >= 0.), (n_tot_y < float(n_y-1))]):
nx = n_tot_x % 1.
ny = n_tot_y % 1.
ind_x = int(n_tot_x//1.)
ind_y = int(n_tot_y//1.)
res = app_2d(mat_xy[ind_y][ind_x], mat_xy[ind_y][ind_x+1],
mat_xy[ind_y+1][ind_x], mat_xy[ind_y+1][ind_x+1],
nx, ny)
else:
res = None
l_res.append(res)
return l_res
def app_2d(f11, f12, f21, f22, nx, ny):
try:
res2 = f21 - nx*(f21-f22)
res1 = f11 - nx*(f11-f12)
res = res1 - ny*(res1-res2)
except Exception:
res = None
return res
| 29.19469
| 77
| 0.588663
|
c3c4650a9519652a1b2ad75bd06cbc3a1eb1203e
| 1,959
|
py
|
Python
|
var/spack/repos/builtin/packages/gem5/package.py
|
weijianwen/spack
|
7ad58b47e4890b3c120e0e35ccf297f90e388e5d
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 1
|
2019-12-10T12:41:38.000Z
|
2019-12-10T12:41:38.000Z
|
var/spack/repos/builtin/packages/gem5/package.py
|
weijianwen/spack
|
7ad58b47e4890b3c120e0e35ccf297f90e388e5d
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | null | null | null |
var/spack/repos/builtin/packages/gem5/package.py
|
weijianwen/spack
|
7ad58b47e4890b3c120e0e35ccf297f90e388e5d
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | null | null | null |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Gem5(SConsPackage):
"""The gem5 simulator is a modular platform
for computer-system architecture research,
encompassing system-level architecture
as well as processor microarchitecture."""
homepage = "http://www.gem5.org"
url = "https://github.com/gem5/gem5/archive/stable_2015_09_03.tar.gz"
version('2015_09_03', 'a7e926d1a64b302b38a10d6bf57bfb2d')
depends_on('m4', type='build')
depends_on('swig', type='build')
depends_on('python')
depends_on('zlib')
# def build_args(self, spec, prefix):
# # FIXME: Add arguments to pass to build.
# # FIXME: If not needed delete this function
# args = []
# return args
| 39.979592
| 78
| 0.668709
|
e830080c62ce8e1ec28f0fa56e4a72c3b8620102
| 56
|
py
|
Python
|
hordak/models/__init__.py
|
audience-platform/django-hordak
|
aa3a18438136a020794b1c0b10603dd78fa7aa76
|
[
"MIT"
] | 187
|
2016-12-12T10:58:11.000Z
|
2022-03-27T08:14:19.000Z
|
hordak/models/__init__.py
|
audience-platform/django-hordak
|
aa3a18438136a020794b1c0b10603dd78fa7aa76
|
[
"MIT"
] | 62
|
2016-12-10T00:12:47.000Z
|
2022-03-16T09:23:05.000Z
|
hordak/models/__init__.py
|
audience-platform/django-hordak
|
aa3a18438136a020794b1c0b10603dd78fa7aa76
|
[
"MIT"
] | 47
|
2016-12-12T11:07:31.000Z
|
2022-03-15T20:30:07.000Z
|
from .core import *
from .statement_csv_import import *
| 18.666667
| 35
| 0.785714
|
c1298aebb71f1922d86c3d1ea4007e5e8dbf31f0
| 6,223
|
py
|
Python
|
code/gaussian/plot_gaussian_results.py
|
davidwhogg/DiffractionMicroscopy
|
948849db85e5fc58cc1ef9e6bd57a6d77dacee86
|
[
"MIT"
] | 3
|
2015-12-28T17:06:25.000Z
|
2019-06-18T15:49:41.000Z
|
code/gaussian/plot_gaussian_results.py
|
davidwhogg/DiffractionMicroscopy
|
948849db85e5fc58cc1ef9e6bd57a6d77dacee86
|
[
"MIT"
] | 15
|
2015-12-28T17:14:32.000Z
|
2016-09-05T00:12:04.000Z
|
code/gaussian/plot_gaussian_results.py
|
davidwhogg/DiffractionMicroscopy
|
948849db85e5fc58cc1ef9e6bd57a6d77dacee86
|
[
"MIT"
] | null | null | null |
"""
This file is part of the DiffractionMicroscopy project.
Copyright 2015 David W. Hogg (NYU).
"""
import glob
import numpy as np
import pickle as cp
import pylab as plt
from gaussian import *
Truth = 1. / np.array([47., 13., 11.]) # MUST BE ALIGNED WITH gaussian.py
def hogg_savefig(fn, **kwargs):
print("writing file", fn)
return plt.savefig(fn, **kwargs)
def read_all_pickle_files(log2NK):
"""
Must be synchronized strictly with `gaussian.py`.
"""
Ns = []
Ks = []
ivars = []
models = []
iterations = []
for log2K in range(0, 9):
log2N = log2NK - log2K
template = "./??/model_{:02d}_{:02d}_??.pkl".format(log2N, log2K)
fns = glob.glob(template)
M = len(fns)
if M == 0:
continue
for i,fn in enumerate(fns):
iteration = int(fn[2:4])
print(fn, fn[2:4], iteration)
try:
model, x0, x1, x2, sixf = read_pickle_file(fn)
except: # DFM would be upset here
continue
iterations.append(iteration)
models.append(model)
N, K = model.N, model.K
Ns.append(N)
Ks.append(K)
ivars.append(np.exp(x2))
return models, np.array(Ns), np.array(Ks), np.array(ivars), np.array(iterations)
def plot_datum(model, n):
datum = model.get_datum(n)
plt.plot(datum[:,0], datum[:,1], "k.", alpha=1./(model.K ** 0.25))
return None
def plot_posterior_sampling(model, n):
# compute lnlikes as in gaussian.py
lnlikes = -0.5 * model.get_chisquareds(n) + 0.5 * model.K * model.get_lndets()
# importance sample
keep = np.random.uniform(size=len(lnlikes)) < np.exp(lnlikes - np.max(lnlikes))
ivarts = model.get_ivarts()[keep]
shortT, two, twoo = ivarts.shape
assert two == 2
assert twoo == 2
ivartsample = ivarts[np.random.randint(shortT, size=16),:,:]
for ivart in ivartsample:
a, b = np.linalg.eigh(ivart)
l1, l2 = 1. / np.sqrt(a) # sqrt because want deviation not variance
v1, v2 = b[:,0], b[:,1]
tiny = 0.001
thetas = np.arange(0., 2. * np.pi + tiny, tiny)
r = l1 * v1[None,:] * np.cos(thetas)[:,None] + l2 * v2[None,:] * np.sin(thetas)[:,None]
r = r * 2. # just because!
plt.plot(r[:,0], r[:,1], "r-", alpha=0.25)
return None
def plot_data(model, sampling=False):
log2N = (np.round(np.log2(model.N))+0.001).astype("int")
log2K = (np.round(np.log2(model.K))+0.001).astype("int")
prefix = "{:02d}_{:02d}".format(log2N, log2K)
if sampling:
prefix = "sampling_" + prefix
else:
prefix = "data_" + prefix
plt.figure(figsize=(12,6))
plt.clf()
plt.subplots_adjust(bottom=0.06, top=0.94, left=0.06, right=0.94,
wspace=0.25, hspace=0.25)
nex = np.min((18, model.N))
for n in range(nex):
plt.subplot(3, 6, n+1)
plt.xticks(rotation=45)
plt.yticks(rotation=45)
plot_datum(model, n)
if sampling:
plot_posterior_sampling(model, n)
if (n+1) != 13: # magic
plt.gca().get_xaxis().set_ticklabels([])
plt.gca().get_yaxis().set_ticklabels([])
plt.axis("equal")
plt.xlim(-20., 20)
plt.ylim(plt.xlim())
plt.title("image {}".format(n))
return hogg_savefig(prefix + ".png")
def divergence(iv1, iv2):
"""
Hard-coded to 3-d diagonals.
"""
return 0.5 * (np.sum(iv1 / iv2) + np.sum(iv2 / iv1) - 6)
def _hoggmedian(foo):
if len(foo) == 0:
return np.nan
else:
return np.median(foo)
def plot_divergences(Ns, Ks, ivars):
divs = np.array([divergence(ivar, Truth) for ivar in ivars])
small = (Ns * Ks) < 300
med = ((Ns * Ks) > 3000) * ((Ns * Ks) < 5000)
big = (Ns * Ks) > 60000
Ksteps = 2. ** np.arange(0, 9)
mediansmalldivs = np.array([_hoggmedian((divs[small])[np.isclose(Ks[small], Kstep)]) for Kstep in Ksteps])
medianmeddivs = np.array([_hoggmedian((divs[med])[np.isclose(Ks[med], Kstep)]) for Kstep in Ksteps])
medianbigdivs = np.array([_hoggmedian((divs[big])[np.isclose(Ks[big], Kstep)]) for Kstep in Ksteps])
plt.clf()
plt.axhline(np.median(divs[small]), color="k", alpha=0.25)
plt.axhline(np.median(divs[med] ), color="k", alpha=0.25)
plt.axhline(np.median(divs[big] ), color="k", alpha=0.25)
plt.plot(Ks[small], divs[small], "k_", ms= 6, alpha=0.5)
plt.plot(Ks[med], divs[med], "k_", ms=12, alpha=0.5)
plt.plot(Ks[big], divs[big], "k_", ms=18, alpha=0.5)
good = np.isfinite(mediansmalldivs)
plt.plot(Ksteps[good], mediansmalldivs[good], "k_", ms= 6, mew=4)
plt.plot(Ksteps, medianmeddivs, "k_", ms=12, mew=4)
plt.plot(Ksteps, medianbigdivs, "k_", ms=18, mew=4)
plt.loglog()
plt.xlim(np.min(Ks) / 1.5, np.max(Ks) * 1.5)
plt.ylim(np.nanmedian(divs[big]) / 30., np.nanmedian(divs[small]) * 30.)
plt.xlabel("number of photons per image $K$")
plt.ylabel("divergence from the Truth")
hogg_savefig("divergences.png")
return None
if __name__ == "__main__":
np.random.seed(23)
# read data
models, Ns, Ks, ivars, iterations = read_all_pickle_files(12)
models2, Ns2, Ks2, ivars2, iterations2 = read_all_pickle_files(16)
models3, Ns3, Ks3, ivars3, iterations3 = read_all_pickle_files(8)
models = np.append(models, models2)
models = np.append(models, models3)
Ns = np.append(Ns, Ns2)
Ns = np.append(Ns, Ns3)
Ks = np.append(Ks, Ks2)
Ks = np.append(Ks, Ks3)
ivars = np.vstack((ivars, ivars2))
ivars = np.vstack((ivars, ivars3))
iterations = np.append(iterations, iterations2)
iterations = np.append(iterations, iterations3)
print(len(models), Ns.shape, Ks.shape, ivars.shape)
# make summary plots
plot_divergences(Ns, Ks, ivars)
if False:
# make data plots
for log2N, log2K in [(16, 0), (12, 4), (8, 8)]:
thismodel = (np.where((Ns == 2 ** log2N) * (Ks == 2 ** log2K) * (iterations == 0))[0])[0]
model = models[thismodel]
print(model.get_ivar(), ivars[thismodel])
plot_data(model)
plot_data(model, sampling=True)
| 35.357955
| 110
| 0.585891
|
c9aa94a3f6fcfe2b4ba6eca2337c02eaf30d1fdb
| 43,061
|
py
|
Python
|
test_gql/tests.py
|
SegFaulti4/lingvodoc
|
8b296b43453a46b814d3cd381f94382ebcb9c6a6
|
[
"Apache-2.0"
] | 5
|
2017-03-30T18:02:11.000Z
|
2021-07-20T16:02:34.000Z
|
test_gql/tests.py
|
SegFaulti4/lingvodoc
|
8b296b43453a46b814d3cd381f94382ebcb9c6a6
|
[
"Apache-2.0"
] | 15
|
2016-02-24T13:16:59.000Z
|
2021-09-03T11:47:15.000Z
|
test_gql/tests.py
|
Winking-maniac/lingvodoc
|
f037bf0e91ccdf020469037220a43e63849aa24a
|
[
"Apache-2.0"
] | 22
|
2015-09-25T07:13:40.000Z
|
2021-08-04T18:08:26.000Z
|
import pytest
import os
from subprocess import PIPE, Popen
from configparser import ConfigParser
import transaction
from pyramid.httpexceptions import HTTPForbidden
from pyramid.paster import get_appsettings
from lingvodoc.scripts.initializedb import data_init
#from lingvodoc.utils.creation import create_dbdictionary
#from lingvodoc.schema.gql_holders import ResponseError
from lingvodoc.utils.search import translation_gist_search
from lingvodoc.models import (
Dictionary,
DBSession,
Group,
BaseGroup,
Language,
)
from graphene.test import Client
#from lingvodoc.scheme import schema
from sqlalchemy import create_engine
import webtest.http
import webtest
from pyramid import paster
class TestConnection:
def setup_class(self):
self.alembic_ini_path = os.path.join(
os.path.dirname(__file__), 'testing.ini')
parser = ConfigParser()
parser.read(self.alembic_ini_path)
alembic_conf = dict()
for k, v in parser.items('alembic'):
alembic_conf[k] = v
dbname = alembic_conf['sqlalchemy.url']
my_env = os.environ
pathdir = os.path.dirname(os.path.realpath(__file__))
bashcommand = "alembic -c %s upgrade head" % self.alembic_ini_path
myapp = paster.get_app(self.alembic_ini_path)
self.app = webtest.TestApp(myapp)
args = bashcommand.split()
pathdir = "/".join(pathdir.split("/")[:-1])
proc = Popen(args, cwd=pathdir, env=my_env)
proc.communicate()
self.ws = webtest.http.StopableWSGIServer.create(myapp, port=6543, host="0.0.0.0") # todo: change to pserve
self.ws.wait()
self.set_server_is_up = True
accounts = get_appsettings(self.alembic_ini_path, 'accounts')
# Create dictionary for tests
#engine = create_engine(dbname)
#DBSession.configure(bind=engine)
#self.create_dbdictionary(id=[1, None], parent_id=[1, 13])
data_init(transaction.manager, accounts, dbname)
# Get admin's and user's headers
self.admin_header = self.return_header_for_admin(self)
self.user_header = self.return_header_of_new_user(self)
#with open(os.path.dirname(__file__) + '/test_file.txt', 'w', encoding='utf-8') as file:
# file.write("one")
a = 2+1
print(a)
def teardown_class(self):
bashcommand = "alembic -c %s downgrade base" % self.alembic_ini_path
args = bashcommand.split()
pathdir = os.path.dirname(os.path.realpath(__file__))
pathdir = "/".join(pathdir.split("/")[:-1])
my_env = os.environ
proc = Popen(args, cwd=pathdir, env=my_env)
#proc.communicate()
# def create_dbdictionary(id=None,
# parent_id=None,
# translation_gist_id=None,
# additional_metadata=None,
# domain=0,
# category=0):
# client_id, object_id = id
#
# if not parent_id:
# raise
# parent_client_id, parent_object_id = parent_id
# translation_gist_client_id, translation_gist_object_id = translation_gist_id if translation_gist_id else (
# None, None)
#
# duplicate_check = DBSession.query(Dictionary).filter_by(client_id=client_id, object_id=object_id).all()
# if duplicate_check:
# raise
# parent = DBSession.query(Language).filter_by(client_id=parent_client_id, object_id=parent_object_id).first()
# if not parent:
# raise
#
# resp = translation_gist_search("WiP")
# state_translation_gist_object_id, state_translation_gist_client_id = resp.object_id, resp.client_id
# dbdictionary_obj = Dictionary(client_id=client_id,
# object_id=object_id,
# state_translation_gist_object_id=state_translation_gist_object_id,
# state_translation_gist_client_id=state_translation_gist_client_id,
# parent=parent,
# translation_gist_client_id=translation_gist_client_id,
# translation_gist_object_id=translation_gist_object_id,
# additional_metadata=additional_metadata,
# domain=domain,
# category=category
# )
#
# client = DBSession.query(Client).filter_by(id=client_id).first()
# user = client.user
# for base in DBSession.query(BaseGroup).filter_by(dictionary_default=True):
# new_group = Group(parent=base,
# subject_object_id=dbdictionary_obj.object_id,
# subject_client_id=dbdictionary_obj.client_id)
# if user not in new_group.users:
# new_group.users.append(user)
# DBSession.add(new_group)
# DBSession.flush()
# return dbdictionary_obj
def graphql_request(self, query, variables="{}", header="", content_type="application/json"):
# if variables is None:
# params = query
# else:
#params = '{"variables":' + variables + ', "query": "' + query + '"}'
params = '{"variables":%s, "query": "%s"}' % (variables, query)
params = params.replace("\n", " ").replace("\t", ' ')
#params = params.replace("\\'", "")
response = self.app.post('/graphql',
params=params,
content_type=content_type,
headers={"Cookie": header})
return response
def get_cookie_from_headers(self, headers):
string_headers = str(headers)
start_of_auth = string_headers.find("auth_tkt=", 0, 120)
finish_of_auth = string_headers.find("Max-Age=315360000", 120, 290)
auth_tkt = string_headers[start_of_auth:finish_of_auth]
start_of_client_id = string_headers.find("client_id", 900, 1000)
finish_of_client_id = string_headers.find("; Max-Age=315360000", start_of_client_id, start_of_client_id + 30)
client_id = string_headers[start_of_client_id:finish_of_client_id]
cookie = auth_tkt + "locale_id=2; " + client_id
return cookie
def return_header_for_admin(self):
response = self.app.post('/login', params={'login': "admin",
'password': 'password'}, headers={"Cookie": "locale_id=2"})
return self.get_cookie_from_headers(self, response.headers)
def return_header_of_new_user(self):
self.app.post('/signup', params='''{"login":"tester",
"name":"tester",
"email":"tester@test.com",
"password":"password",
"day":2,
"month":3,
"year":1980}''',
content_type="application/json",
headers = {"Cookie" : "locale_id=2"})
#response = self.app.post('/logout')
response = self.app.post('/login', params={'login': "tester",
'password': "password"}, headers={"Cookie" : "locale_id=2"})
return self.get_cookie_from_headers(self, response.headers)
# def test_hey(self, snapshot):
# response = self.app.post('/graphql',
# params='''query myQuery {
# dictionary(id: [2, 2]) {
# id
# translation
# parent_id
# marked_for_deletion
# translation_gist_id
# additional_metadata {
# blob_description
# }
# perspectives{id translation}
#
# }
# }''',
# content_type = "application/graphql",
# headers = {"Cookie" : self.admin_header})
# snapshot.assert_match(response.json_body)
#
#
# def test_test(self, snapshot):
# variables = '{"id": [%s, %s]}' % ('2', '2')
# query = '''query myQuery($id:LingvodocID){
# dictionary(id: $id) {
# id
# }}'''
# response = self.graphql_request(query, variables)
# snapshot.assert_match(response.json_body)
#
#################################################################
# Languages tests
#################################################################
def test_USE_BANHAMMER(self, snapshot):
query = '''mutation {
activate_deactivate_user(user_id: 3, is_active: false) { triumph } }
'''
response = self.graphql_request(query, header=self.admin_header)
snapshot.assert_match(response.json_body)
def test_give_roles_to_user(self, snapshot):
query = '''mutation Add_roles {
add_perspective_roles(id: [2, 3], user_id:3 , roles_users:[8, 12, 13, 15, 20, 21, 22, 23, 24, 26]){
triumph
}
}'''
response = self.graphql_request(query, header=self.admin_header)
snapshot.assert_match(response.json_body)
def test_get_language(self, snapshot):
variables = '{"id": [%s, %s]}' % ('1', '13') # English language
query = '''query Language ($id: LingvodocID){
language(id: $id) {
id
translation
parent_id
translation_gist_id
marked_for_deletion
dictionaries {
id
parent_id
translation
translation_gist_id
marked_for_deletion
state_translation_gist_id
category
domain
status
}
}
}'''
response = self.graphql_request(query, variables)
snapshot.assert_match(response.json_body)
def test_get_all_languages(self, snapshot):
query = '''query LanguagesList{
languages {
id
translation
parent_id
translation_gist_id
marked_for_deletion
dictionaries {
id
parent_id
translation
translation_gist_id
marked_for_deletion
state_translation_gist_id
category
domain
status
}
}
}'''
response = self.graphql_request(query)
snapshot.assert_match(response.json_body)
def test_create_language_with_old_gist_none_header(self, snapshot):
query = '''mutation create_language{
create_language(translation_gist_id: [1, 4],
parent_id: [1, 13])
{
language {
id
translation_gist_id
}
}
}'''
response = self.graphql_request(query)
snapshot.assert_match(response.json_body)
def test_create_language_with_old_gist_user_header(self, snapshot):
query = '''mutation create_language{
create_language(translation_gist_id: [1, 4],
parent_id: [1, 13])
{
language {
id
translation_gist_id
}
}
}'''
response = self.graphql_request(query, header=self.user_header)
snapshot.assert_match(response.json_body)
def test_create_language_with_old_gist_admin_header(self, snapshot):
query = '''mutation create_language{
create_language(translation_gist_id: [1, 4],
parent_id: [1, 13])
{
language {
id
translation_gist_id
}
}
}'''
response = self.graphql_request(query, header=self.admin_header)
snapshot.assert_match(response.json_body)
def test_create_language_with_new_gist_none_header(self, snapshot):
objectval = '[{"locale_id": 2, "content": "test content"}]'
variables = '{"atoms": %s}' % objectval
query = '''mutation create_language ($atoms: [ObjectVal]){
create_language (translation_atoms: $atoms)
{
language {
id
translation_gist_id
}
}
}'''
response = self.graphql_request(query, variables)
snapshot.assert_match(response.json_body)
def test_create_language_with_new_gist_user_header(self, snapshot):
objectval = '[{"locale_id": 2, "content": "test content"}]'
variables = '{"atoms": %s}' % objectval
query = '''mutation create_language ($atoms: [ObjectVal]){
create_language (translation_atoms: $atoms)
{
language {
id
translation_gist_id
}
}
}'''
response = self.graphql_request(query, variables, header=self.user_header)
snapshot.assert_match(response.json_body)
def test_create_language_with_new_gist_admin_header(self, snapshot):
objectval = '[{"locale_id": 2, "content": "test content 2"}]'
variables = '{"atoms": %s}' % objectval
query = '''mutation create_language ($atoms: [ObjectVal]){
create_language (translation_atoms: $atoms)
{
language {
id
translation_gist_id
}
}
}'''
response = self.graphql_request(query, variables, header=self.admin_header)
snapshot.assert_match(response.json_body)
def test_update_language_no_header(self, snapshot):
variables = '{"lang_id": [%s, %s], "gist_id": [%s, %s]}'%('1', '14', '1', '2') # Finnish language, Russian gist
query = '''mutation update_lang($lang_id: LingvodocID!, $gist_id: LingvodocID!) {
update_language(id: $lang_id, translation_gist_id: $gist_id) {
language {
id
translation_gist_id
}
}
}'''
response = self.graphql_request(query, variables)
snapshot.assert_match(response.json_body)
def test_update_language_user_header(self, snapshot):
variables = '{"lang_id": [%s, %s], "gist_id": [%s, %s]}'%('1', '14', '1', '2') # Finnish language, Russian gist
query = '''mutation update_lang($lang_id: LingvodocID!, $gist_id: LingvodocID!) {
update_language(id: $lang_id, translation_gist_id: $gist_id) {
language {
id
translation_gist_id
}
}
}'''
response = self.graphql_request(query, variables, header=self.user_header)
snapshot.assert_match(response.json_body)
def test_update_language_admin_header(self, snapshot):
variables = '{"lang_id": [%s, %s], "gist_id": [%s, %s]}'%('1', '14', '1', '2') # Finnish language, Russian gist
query = '''mutation update_lang($lang_id: LingvodocID!, $gist_id: LingvodocID!) {
update_language(id: $lang_id, translation_gist_id: $gist_id) {
language {
id
translation_gist_id
}
}
}'''
response = self.graphql_request(query, variables, header=self.admin_header)
snapshot.assert_match(response.json_body)
def test_delete_language_no_header(self, snapshot):
variables = '{"id": [%s, %s]}' % ('1', '14') # Finnish language
query = '''mutation del_language($id: LingvodocID!) {
delete_language(id: $id) {
triumph
language {
id
translation
marked_for_deletion
}
}
}'''
response = self.graphql_request(query, variables)
snapshot.assert_match(response.json_body)
def test_delete_language_user_header(self, snapshot):
variables = '{"id": [%s, %s]}' % ('1', '14') # Finnish language
query = '''mutation del_language($id: LingvodocID!) {
delete_language(id: $id) {
triumph
language {
id
translation
marked_for_deletion
}
}
}'''
response = self.graphql_request(query, variables, header=self.user_header)
snapshot.assert_match(response.json_body)
def test_delete_language_admin_header(self, snapshot):
variables = '{"id": [%s, %s]}' % ('1', '14') # Finnish language
query = '''mutation del_language($id: LingvodocID!) {
delete_language(id: $id) {
triumph
language {
id
translation
marked_for_deletion
}
}
}'''
response = self.graphql_request(query, variables, header=self.admin_header)
snapshot.assert_match(response.json_body)
#################################################################
# Entities tests
#################################################################
def test_get_entity(self, snapshot):
variables = '{"id": [%s, %s]}' % ('2', '22')
query = '''query getEntities ($id: LingvodocID!){
entity(id: $id) {
id
marked_for_deletion
parent_id
content
}
}'''
response = self.graphql_request(query, variables)
snapshot.assert_match(response.json_body)
def test_create_entity_no_header(self, snapshot):
variables = '{"parent_id": [%s, %s], "field_id": [%s, %s], "content": "%s"}' % (
'2', '18', '2', '5', 'test_content')
query = '''mutation createEntity ($parent_id: LingvodocID!, $field_id: LingvodocID!, $content: String){
create_entity(parent_id: $parent_id, field_id: $field_id, content: $content) {
entity {
id
parent_id
content
marked_for_deletion
}
triumph
}
}'''
response = self.graphql_request(query, variables)
snapshot.assert_match(response.json_body)
def test_create_entity_user_header(self, snapshot):
variables = '{"parent_id": [%s, %s], "field_id": [%s, %s], "content": "%s"}' % (
'2', '18', '2', '5', 'test_content')
query = '''mutation createEntity ($parent_id: LingvodocID!, $field_id: LingvodocID!, $content: String){
create_entity(parent_id: $parent_id, field_id: $field_id, content: $content) {
entity {
id
parent_id
content
marked_for_deletion
published
}
triumph
}
}'''
response = self.graphql_request(query, variables, header=self.user_header)
snapshot.assert_match(response.json_body)
def test_create_entity_admin_header(self, snapshot):
variables = '{"parent_id": [%s, %s], "field_id": [%s, %s], "content": "%s"}' % (
'2', '18', '2', '5', 'test_content')
query = '''mutation createEntity ($parent_id: LingvodocID!, $field_id: LingvodocID!, $content: String){
create_entity(parent_id: $parent_id, field_id: $field_id, content: $content) {
entity {
id
parent_id
content
marked_for_deletion
published
}
triumph
}
}'''
response = self.graphql_request(query, variables, header=self.admin_header)
snapshot.assert_match(response.json_body)
def test_update_entity_no_header(self, snapshot):
variables = '{"id": [%s, %s], "published": %s}' % ('2', '22', 'true')
query = '''mutation updateEntity ($id: LingvodocID!, $published: Boolean){
update_entity(id: $id, published: $published) {
entity {
id
parent_id
content
marked_for_deletion
published
}
triumph
}
}'''
response = self.graphql_request(query, variables)
snapshot.assert_match(response.json_body)
def test_update_entity_user_header(self, snapshot):
variables = '{"id": [%s, %s], "published": %s}' % ('2', '23', 'true')
query = '''mutation updateEntity ($id: LingvodocID!, $published: Boolean){
update_entity(id: $id, published: $published) {
entity {
id
parent_id
content
marked_for_deletion
published
}
triumph
}
}'''
response = self.graphql_request(query, variables, header=self.user_header)
snapshot.assert_match(response.json_body)
def test_update_entity_admin_header(self, snapshot):
variables = '{"id": [%s, %s], "published": %s}' % ('2', '22', 'true')
query = '''mutation updateEntity ($id: LingvodocID!, $published: Boolean){
update_entity(id: $id, published: $published) {
entity {
id
parent_id
content
marked_for_deletion
published
}
triumph
}
}'''
response = self.graphql_request(query, variables, header=self.admin_header)
snapshot.assert_match(response.json_body)
def test_delete_entity_no_header(self, snapshot):
variables = '{"id": [%s, %s]}' % ('2', '23')
query = '''mutation deleteEntity ($id: LingvodocID!){
delete_entity(id: $id) {
entity {
id
parent_id
content
marked_for_deletion
published
}
triumph
}
}'''
response = self.graphql_request(query, variables)
snapshot.assert_match(response.json_body)
def test_delete_entity_not_owner_header(self, snapshot):
variables = '{"id": [%s, %s]}' % ('2', '24')
query = '''mutation deleteEntity ($id: LingvodocID!){
delete_entity(id: $id) {
entity {
id
parent_id
content
marked_for_deletion
published
}
triumph
}
}'''
response = self.graphql_request(query, variables, header=self.user_header)
snapshot.assert_match(response.json_body)
def test_delete_entity_owner_header(self, snapshot):
#variables = '{"id": [%s, %s]}' % ('4', '3')
variables = '{"id": [%s, %s]}' % ('2', '22')
query = '''mutation deleteEntity ($id: LingvodocID!){
delete_entity(id: $id) {
entity {
id
parent_id
content
marked_for_deletion
published
}
triumph
}
}'''
response = self.graphql_request(query, variables, header=self.user_header)
snapshot.assert_match(response.json_body)
def test_delete_entity_admin_header(self, snapshot):
variables = '{"id": [%s, %s]}' % ('2', '25')
query = '''mutation deleteEntity ($id: LingvodocID!){
delete_entity(id: $id) {
entity {
id
parent_id
content
marked_for_deletion
published
}
triumph
}
}'''
response = self.graphql_request(query, variables, header=self.admin_header)
snapshot.assert_match(response.json_body)
#################################################################
# Gists tests
#################################################################
def test_get_gist(self, snapshot):
variables = '{"id": [%s, %s]}' % ('1', '193')
query = """query getTranslationGist($id:LingvodocID){
translationgist (id: $id) {
id
marked_for_deletion
type
translation
translationatoms {
id
parent_id
marked_for_deletion
content
}
}
}"""
response = self.graphql_request(query, variables)
snapshot.assert_match(response.json_body)
def test_create_gist_no_header(self, snapshot):
variables = '{"type": "%s"}' % ("Text")
query = """mutation create_trans_gist($type: String!){
create_translationgist(type: $type) {
translationgist {
id
type
marked_for_deletion
translation
translationatoms{
id
parent_id
content
}
}
triumph
}
}"""
response = self.graphql_request(query, variables)
snapshot.assert_match(response.json_body)
def test_create_gist_user_header(self, snapshot):
variables = '{"type": "%s"}' % ("Text")
query = """mutation create_trans_gist($type: String!){
create_translationgist(type: $type) {
translationgist {
id
type
marked_for_deletion
translation
translationatoms{
id
parent_id
content
}
}
triumph
}
}"""
response = self.graphql_request(query, variables, header=self.user_header)
snapshot.assert_match(response.json_body)
def test_create_gist_admin_header(self, snapshot):
variables = '{"type": "%s"}' % ("Text")
query = """mutation create_trans_gist($type: String!){
create_translationgist(type: $type) {
translationgist {
id
type
marked_for_deletion
translation
translationatoms{
id
parent_id
content
}
}
triumph
}
}"""
response = self.graphql_request(query, variables, header=self.admin_header)
snapshot.assert_match(response.json_body)
def test_delete_gist_no_header(self, snapshot):
variables = '{"id": [%s, %s]}' % ('4', '4')
query = """mutation delete_translationgist($id: LingvodocID!){
delete_translationgist(id: $id) {
translationgist {
id
type
marked_for_deletion
translation
translationatoms{
id
parent_id
content
}
}
triumph
}
}"""
response = self.graphql_request(query, variables)
snapshot.assert_match(response.json_body)
def test_delete_gist_not_owners_header(self, snapshot):
variables = '{"id": [%s, %s]}' % ('3', '4')
query = """mutation delete_translationgist($id: LingvodocID!){
delete_translationgist(id: $id) {
translationgist {
id
type
marked_for_deletion
translation
translationatoms{
id
parent_id
content
}
}
triumph
}
}"""
response = self.graphql_request(query, variables, header=self.user_header)
snapshot.assert_match(response.json_body)
def test_delete_gist_owner_header(self, snapshot):
variables = '{"id": [%s, %s]}' % ('4', '7')
query = """mutation delete_translationgist($id: LingvodocID!){
delete_translationgist(id: $id) {
translationgist {
id
type
marked_for_deletion
translation
translationatoms{
id
parent_id
content
}
}
triumph
}
}"""
response = self.graphql_request(query, variables, header=self.user_header)
snapshot.assert_match(response.json_body)
def test_delete_gist_admin_header(self, snapshot):
variables = '{"id": [%s, %s]}' % ('3', '7')
query = """mutation delete_translationgist($id: LingvodocID!){
delete_translationgist(id: $id) {
translationgist {
id
type
marked_for_deletion
translation
translationatoms{
id
parent_id
content
}
}
triumph
}
}"""
response = self.graphql_request(query, variables, header=self.admin_header)
snapshot.assert_match(response.json_body)
#################################################################
# Atoms tests
#################################################################
def test_get_atom(self, snapshot):
variables = '{"id": [%s, %s]}' % ('1', '8')
query = '''query getAtom ($id: LingvodocID!){
translationatom(id: $id) {
id
parent_id
marked_for_deletion
content
locale_id
}
}'''
response = self.graphql_request(query, variables)
snapshot.assert_match(response.json_body)
def test_create_atom_no_header(self, snapshot):
variables = '{"parent_id": [%s, %s], "locale_id": %s, "content": "%s"}' % ('1', '61', '7', "test atom content")
query = """mutation create_atom ($parent_id: LingvodocID!, $locale_id: Int!, $content: String!){
create_translationatom(parent_id: $parent_id, locale_id: $locale_id, content: $content) {
translationatom{
id
parent_id
content
marked_for_deletion
locale_id
}
triumph
}
}"""
response = self.graphql_request(query, variables)
snapshot.assert_match(response.json_body)
def test_create_atom_user_header(self, snapshot):
variables = '{"parent_id": [%s, %s], "locale_id": %s, "content": "%s"}' % ('1', '61', '5', "test atom content")
query = """mutation create_atom ($parent_id: LingvodocID!, $locale_id: Int!, $content: String!){
create_translationatom(parent_id: $parent_id, locale_id: $locale_id, content: $content) {
translationatom{
id
parent_id
content
marked_for_deletion
locale_id
}
triumph
}
}"""
response = self.graphql_request(query, variables, header=self.user_header)
snapshot.assert_match(response.json_body)
def test_create_atom_admin_header(self, snapshot):
variables = '{"parent_id": [%s, %s], "locale_id": %s, "content": "%s"}' % ('1', '61', '6', "test atom content")
query = """mutation create_atom ($parent_id: LingvodocID!, $locale_id: Int!, $content: String!){
create_translationatom(parent_id: $parent_id, locale_id: $locale_id, content: $content) {
translationatom{
id
parent_id
content
marked_for_deletion
locale_id
}
triumph
}
}"""
response = self.graphql_request(query, variables, header=self.admin_header)
snapshot.assert_match(response.json_body)
def test_update_atom_no_header(self, snapshot):
variables = '{"id": [%s, %s], "content": "%s", "locale_id": %s}' % ('4', '8', 'never happends', '20')
query = """mutation update_translationatom ($id: LingvodocID!, $content: String, $locale_id: Int){
update_translationatom (id: $id, content: $content, locale_id: $locale_id) {
translationatom{
id
parent_id
content
marked_for_deletion
locale_id
}
triumph
}
}"""
response = self.graphql_request(query, variables)
snapshot.assert_match(response.json_body)
def test_update_atom_not_owner_header(self, snapshot):
variables = '{"id": [%s, %s], "content": "%s", "locale_id": %s}' % ('3', '8', 'never happends', '20')
query = """mutation update_translationatom ($id: LingvodocID!, $content: String, $locale_id: Int){
update_translationatom (id: $id, content: $content, locale_id: $locale_id) {
translationatom{
id
parent_id
content
marked_for_deletion
locale_id
}
triumph
}
}"""
response = self.graphql_request(query, variables, header=self.user_header)
snapshot.assert_match(response.json_body)
def test_update_atom_owner_header(self, snapshot):
variables = '{"id": [%s, %s], "content": "%s", "locale_id": %s}' % ('4', '8', 'test updated content', '21')
query = """mutation update_translationatom ($id: LingvodocID!, $content: String, $locale_id: Int){
update_translationatom (id: $id, content: $content, locale_id: $locale_id) {
translationatom{
id
parent_id
content
marked_for_deletion
locale_id
}
triumph
}
}"""
response = self.graphql_request(query, variables, header=self.user_header)
snapshot.assert_match(response.json_body)
def test_update_atom_admin_header(self, snapshot):
variables = '{"id": [%s, %s], "content": "%s", "locale_id": %s}' % ('3', '8', 'test content updated by admin', '22')
query = """mutation update_translationatom ($id: LingvodocID!, $content: String, $locale_id: Int){
update_translationatom (id: $id, content: $content, locale_id: $locale_id) {
translationatom{
id
parent_id
content
marked_for_deletion
locale_id
}
triumph
}
}"""
response = self.graphql_request(query, variables, header=self.admin_header)
snapshot.assert_match(response.json_body)
#################################################################
# Dictionary tests
#################################################################
#
# def test_get_all_dictionaries_published_true(self, snapshot):
# query = '''query DictionaryList {
# dictionaries(published: true, mode:1) {
# id
# translation
# parent_id
# translation_gist_id
# state_translation_gist_id
# category
# domain
# }
# }'''
# response = self.graphql_request(query, header=self.user_header)
# snapshot.assert_match(response.json_body)
def test_get_perspective_list(self, snapshot):
variables = '{"id": [%s, %s], "mode": "%s"}' % ('2', '3', 'all')
query = """query PerspectiveList($id:LingvodocID, $mode:String){
perspective (id: $id) {
id
lexical_entries (mode: $mode){
id
marked_for_deletion
parent_id
entities {
id
marked_for_deletion
parent_id
content
locale_id
data_type
}
}
columns{
id
parent_id
position
}
marked_for_deletion
authors {
id
name
}
tree {
translation
translation_gist_id
parent_id
id
marked_for_deletion
}
}
}"""
response = self.graphql_request(query, variables, header=self.user_header)
snapshot.assert_match(response.json_body)
| 41.806796
| 124
| 0.446158
|
d46fcdda1c76b19be152bddc570e06ea77d48c73
| 493
|
py
|
Python
|
zdppy_requests/exceptions.py
|
zhangdapeng520/zdppy_requests
|
154af90a413172e3d291044a33718ae98eb051b7
|
[
"MIT"
] | null | null | null |
zdppy_requests/exceptions.py
|
zhangdapeng520/zdppy_requests
|
154af90a413172e3d291044a33718ae98eb051b7
|
[
"MIT"
] | null | null | null |
zdppy_requests/exceptions.py
|
zhangdapeng520/zdppy_requests
|
154af90a413172e3d291044a33718ae98eb051b7
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2022/2/22 15:34
# @Author : 张大鹏
# @Site :
# @File : exceptions.py
# @Software: PyCharm
class StatusCodeError(Exception):
def __init__(self, *args):
super(StatusCodeError, self).__init__(*args)
class ParamError(Exception):
def __init__(self, *args):
super(ParamError, self).__init__(*args)
class EmptyError(Exception):
def __init__(self, *args):
super(EmptyError, self).__init__(*args)
| 22.409091
| 52
| 0.640974
|
f1e5b46111f56aa3264d5b8f226b776178703a99
| 124,104
|
py
|
Python
|
twisted/test/test_ftp.py
|
djmitche/Twisted
|
0b404060400d35d23e7f2ba2963b3d47540d8f81
|
[
"MIT",
"Unlicense"
] | null | null | null |
twisted/test/test_ftp.py
|
djmitche/Twisted
|
0b404060400d35d23e7f2ba2963b3d47540d8f81
|
[
"MIT",
"Unlicense"
] | null | null | null |
twisted/test/test_ftp.py
|
djmitche/Twisted
|
0b404060400d35d23e7f2ba2963b3d47540d8f81
|
[
"MIT",
"Unlicense"
] | null | null | null |
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
FTP tests.
"""
import os
import errno
from StringIO import StringIO
import getpass
from zope.interface import implements
from zope.interface.verify import verifyClass
from twisted.trial import unittest
from twisted.python.randbytes import insecureRandom
from twisted.cred.portal import IRealm
from twisted.protocols import basic
from twisted.internet import reactor, task, protocol, defer, error
from twisted.internet.interfaces import IConsumer
from twisted.cred.error import UnauthorizedLogin
from twisted.cred import portal, checkers, credentials
from twisted.python import failure, filepath, runtime
from twisted.test import proto_helpers
from twisted.protocols import ftp, loopback
if runtime.platform.isWindows():
nonPOSIXSkip = "Cannot run on Windows"
else:
nonPOSIXSkip = None
class Dummy(basic.LineReceiver):
logname = None
def __init__(self):
self.lines = []
self.rawData = []
def connectionMade(self):
self.f = self.factory # to save typing in pdb :-)
def lineReceived(self,line):
self.lines.append(line)
def rawDataReceived(self, data):
self.rawData.append(data)
def lineLengthExceeded(self, line):
pass
class _BufferingProtocol(protocol.Protocol):
def connectionMade(self):
self.buffer = ''
self.d = defer.Deferred()
def dataReceived(self, data):
self.buffer += data
def connectionLost(self, reason):
self.d.callback(self)
class FTPServerTestCase(unittest.TestCase):
"""
Simple tests for an FTP server with the default settings.
@ivar clientFactory: class used as ftp client.
"""
clientFactory = ftp.FTPClientBasic
userAnonymous = "anonymous"
def setUp(self):
# Create a directory
self.directory = self.mktemp()
os.mkdir(self.directory)
self.dirPath = filepath.FilePath(self.directory)
# Start the server
p = portal.Portal(ftp.FTPRealm(
anonymousRoot=self.directory,
userHome=self.directory,
))
p.registerChecker(checkers.AllowAnonymousAccess(),
credentials.IAnonymous)
users_checker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
self.username = "test-user"
self.password = "test-password"
users_checker.addUser(self.username, self.password)
p.registerChecker(users_checker, credentials.IUsernamePassword)
self.factory = ftp.FTPFactory(portal=p,
userAnonymous=self.userAnonymous)
port = reactor.listenTCP(0, self.factory, interface="127.0.0.1")
self.addCleanup(port.stopListening)
# Hook the server's buildProtocol to make the protocol instance
# accessible to tests.
buildProtocol = self.factory.buildProtocol
d1 = defer.Deferred()
def _rememberProtocolInstance(addr):
# Done hooking this.
del self.factory.buildProtocol
protocol = buildProtocol(addr)
self.serverProtocol = protocol.wrappedProtocol
def cleanupServer():
if self.serverProtocol.transport is not None:
self.serverProtocol.transport.loseConnection()
self.addCleanup(cleanupServer)
d1.callback(None)
return protocol
self.factory.buildProtocol = _rememberProtocolInstance
# Connect a client to it
portNum = port.getHost().port
clientCreator = protocol.ClientCreator(reactor, self.clientFactory)
d2 = clientCreator.connectTCP("127.0.0.1", portNum)
def gotClient(client):
self.client = client
self.addCleanup(self.client.transport.loseConnection)
d2.addCallback(gotClient)
return defer.gatherResults([d1, d2])
def assertCommandResponse(self, command, expectedResponseLines,
chainDeferred=None):
"""Asserts that a sending an FTP command receives the expected
response.
Returns a Deferred. Optionally accepts a deferred to chain its actions
to.
"""
if chainDeferred is None:
chainDeferred = defer.succeed(None)
def queueCommand(ignored):
d = self.client.queueStringCommand(command)
def gotResponse(responseLines):
self.assertEqual(expectedResponseLines, responseLines)
return d.addCallback(gotResponse)
return chainDeferred.addCallback(queueCommand)
def assertCommandFailed(self, command, expectedResponse=None,
chainDeferred=None):
if chainDeferred is None:
chainDeferred = defer.succeed(None)
def queueCommand(ignored):
return self.client.queueStringCommand(command)
chainDeferred.addCallback(queueCommand)
self.assertFailure(chainDeferred, ftp.CommandFailed)
def failed(exception):
if expectedResponse is not None:
self.assertEqual(
expectedResponse, exception.args[0])
return chainDeferred.addCallback(failed)
def _anonymousLogin(self):
d = self.assertCommandResponse(
'USER anonymous',
['331 Guest login ok, type your email address as password.'])
return self.assertCommandResponse(
'PASS test@twistedmatrix.com',
['230 Anonymous login ok, access restrictions apply.'],
chainDeferred=d)
def _userLogin(self):
"""Authenticates the FTP client using the test account."""
d = self.assertCommandResponse(
'USER %s' % (self.username),
['331 Password required for %s.' % (self.username)])
return self.assertCommandResponse(
'PASS %s' % (self.password),
['230 User logged in, proceed'],
chainDeferred=d)
class FTPAnonymousTestCase(FTPServerTestCase):
"""
Simple tests for an FTP server with different anonymous username.
The new anonymous username used in this test case is "guest"
"""
userAnonymous = "guest"
def test_anonymousLogin(self):
"""
Tests whether the changing of the anonymous username is working or not.
The FTP server should not comply about the need of password for the
username 'guest', letting it login as anonymous asking just an email
address as password.
"""
d = self.assertCommandResponse(
'USER guest',
['331 Guest login ok, type your email address as password.'])
return self.assertCommandResponse(
'PASS test@twistedmatrix.com',
['230 Anonymous login ok, access restrictions apply.'],
chainDeferred=d)
class BasicFTPServerTestCase(FTPServerTestCase):
def testNotLoggedInReply(self):
"""
When not logged in, most commands other than USER and PASS should
get NOT_LOGGED_IN errors, but some can be called before USER and PASS.
"""
loginRequiredCommandList = ['CDUP', 'CWD', 'LIST', 'MODE', 'PASV',
'PWD', 'RETR', 'STRU', 'SYST', 'TYPE']
loginNotRequiredCommandList = ['FEAT']
# Issue commands, check responses
def checkFailResponse(exception, command):
failureResponseLines = exception.args[0]
self.failUnless(failureResponseLines[-1].startswith("530"),
"%s - Response didn't start with 530: %r"
% (command, failureResponseLines[-1],))
def checkPassResponse(result, command):
result = result[0]
self.failIf(result.startswith("530"),
"%s - Response start with 530: %r"
% (command, result,))
deferreds = []
for command in loginRequiredCommandList:
deferred = self.client.queueStringCommand(command)
self.assertFailure(deferred, ftp.CommandFailed)
deferred.addCallback(checkFailResponse, command)
deferreds.append(deferred)
for command in loginNotRequiredCommandList:
deferred = self.client.queueStringCommand(command)
deferred.addCallback(checkPassResponse, command)
deferreds.append(deferred)
return defer.DeferredList(deferreds, fireOnOneErrback=True)
def testPASSBeforeUSER(self):
"""
Issuing PASS before USER should give an error.
"""
return self.assertCommandFailed(
'PASS foo',
["503 Incorrect sequence of commands: "
"USER required before PASS"])
def testNoParamsForUSER(self):
"""
Issuing USER without a username is a syntax error.
"""
return self.assertCommandFailed(
'USER',
['500 Syntax error: USER requires an argument.'])
def testNoParamsForPASS(self):
"""
Issuing PASS without a password is a syntax error.
"""
d = self.client.queueStringCommand('USER foo')
return self.assertCommandFailed(
'PASS',
['500 Syntax error: PASS requires an argument.'],
chainDeferred=d)
def testAnonymousLogin(self):
return self._anonymousLogin()
def testQuit(self):
"""
Issuing QUIT should return a 221 message.
"""
d = self._anonymousLogin()
return self.assertCommandResponse(
'QUIT',
['221 Goodbye.'],
chainDeferred=d)
def testAnonymousLoginDenied(self):
# Reconfigure the server to disallow anonymous access, and to have an
# IUsernamePassword checker that always rejects.
self.factory.allowAnonymous = False
denyAlwaysChecker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
self.factory.portal.registerChecker(denyAlwaysChecker,
credentials.IUsernamePassword)
# Same response code as allowAnonymous=True, but different text.
d = self.assertCommandResponse(
'USER anonymous',
['331 Password required for anonymous.'])
# It will be denied. No-one can login.
d = self.assertCommandFailed(
'PASS test@twistedmatrix.com',
['530 Sorry, Authentication failed.'],
chainDeferred=d)
# It's not just saying that. You aren't logged in.
d = self.assertCommandFailed(
'PWD',
['530 Please login with USER and PASS.'],
chainDeferred=d)
return d
def test_anonymousWriteDenied(self):
"""
When an anonymous user attempts to edit the server-side filesystem, they
will receive a 550 error with a descriptive message.
"""
d = self._anonymousLogin()
return self.assertCommandFailed(
'MKD newdir',
['550 Anonymous users are forbidden to change the filesystem'],
chainDeferred=d)
def testUnknownCommand(self):
d = self._anonymousLogin()
return self.assertCommandFailed(
'GIBBERISH',
["502 Command 'GIBBERISH' not implemented"],
chainDeferred=d)
def testRETRBeforePORT(self):
d = self._anonymousLogin()
return self.assertCommandFailed(
'RETR foo',
["503 Incorrect sequence of commands: "
"PORT or PASV required before RETR"],
chainDeferred=d)
def testSTORBeforePORT(self):
d = self._anonymousLogin()
return self.assertCommandFailed(
'STOR foo',
["503 Incorrect sequence of commands: "
"PORT or PASV required before STOR"],
chainDeferred=d)
def testBadCommandArgs(self):
d = self._anonymousLogin()
self.assertCommandFailed(
'MODE z',
["504 Not implemented for parameter 'z'."],
chainDeferred=d)
self.assertCommandFailed(
'STRU I',
["504 Not implemented for parameter 'I'."],
chainDeferred=d)
return d
def testDecodeHostPort(self):
self.assertEqual(ftp.decodeHostPort('25,234,129,22,100,23'),
('25.234.129.22', 25623))
nums = range(6)
for i in range(6):
badValue = list(nums)
badValue[i] = 256
s = ','.join(map(str, badValue))
self.assertRaises(ValueError, ftp.decodeHostPort, s)
def test_PASV(self):
"""
When the client sends the command C{PASV}, the server responds with a
host and port, and is listening on that port.
"""
# Login
d = self._anonymousLogin()
# Issue a PASV command
d.addCallback(lambda _: self.client.queueStringCommand('PASV'))
def cb(responseLines):
"""
Extract the host and port from the resonse, and
verify the server is listening of the port it claims to be.
"""
host, port = ftp.decodeHostPort(responseLines[-1][4:])
self.assertEqual(port, self.serverProtocol.dtpPort.getHost().port)
d.addCallback(cb)
# Semi-reasonable way to force cleanup
d.addCallback(lambda _: self.serverProtocol.transport.loseConnection())
return d
def test_SYST(self):
"""SYST command will always return UNIX Type: L8"""
d = self._anonymousLogin()
self.assertCommandResponse('SYST', ["215 UNIX Type: L8"],
chainDeferred=d)
return d
def test_RNFRandRNTO(self):
"""
Sending the RNFR command followed by RNTO, with valid filenames, will
perform a successful rename operation.
"""
# Create user home folder with a 'foo' file.
self.dirPath.child(self.username).createDirectory()
self.dirPath.child(self.username).child('foo').touch()
d = self._userLogin()
self.assertCommandResponse(
'RNFR foo',
["350 Requested file action pending further information."],
chainDeferred=d)
self.assertCommandResponse(
'RNTO bar',
["250 Requested File Action Completed OK"],
chainDeferred=d)
def check_rename(result):
self.assertTrue(
self.dirPath.child(self.username).child('bar').exists())
return result
d.addCallback(check_rename)
return d
def test_RNFRwithoutRNTO(self):
"""
Sending the RNFR command followed by any command other than RNTO
should return an error informing users that RNFR should be followed
by RNTO.
"""
d = self._anonymousLogin()
self.assertCommandResponse(
'RNFR foo',
["350 Requested file action pending further information."],
chainDeferred=d)
self.assertCommandFailed(
'OTHER don-tcare',
["503 Incorrect sequence of commands: RNTO required after RNFR"],
chainDeferred=d)
return d
def test_portRangeForwardError(self):
"""
Exceptions other than L{error.CannotListenError} which are raised by
C{listenFactory} should be raised to the caller of L{FTP.getDTPPort}.
"""
def listenFactory(portNumber, factory):
raise RuntimeError()
self.serverProtocol.listenFactory = listenFactory
self.assertRaises(RuntimeError, self.serverProtocol.getDTPPort,
protocol.Factory())
def test_portRange(self):
"""
L{FTP.passivePortRange} should determine the ports which
L{FTP.getDTPPort} attempts to bind. If no port from that iterator can
be bound, L{error.CannotListenError} should be raised, otherwise the
first successful result from L{FTP.listenFactory} should be returned.
"""
def listenFactory(portNumber, factory):
if portNumber in (22032, 22033, 22034):
raise error.CannotListenError('localhost', portNumber, 'error')
return portNumber
self.serverProtocol.listenFactory = listenFactory
port = self.serverProtocol.getDTPPort(protocol.Factory())
self.assertEqual(port, 0)
self.serverProtocol.passivePortRange = xrange(22032, 65536)
port = self.serverProtocol.getDTPPort(protocol.Factory())
self.assertEqual(port, 22035)
self.serverProtocol.passivePortRange = xrange(22032, 22035)
self.assertRaises(error.CannotListenError,
self.serverProtocol.getDTPPort,
protocol.Factory())
def test_portRangeInheritedFromFactory(self):
"""
The L{FTP} instances created by L{ftp.FTPFactory.buildProtocol} have
their C{passivePortRange} attribute set to the same object the
factory's C{passivePortRange} attribute is set to.
"""
portRange = xrange(2017, 2031)
self.factory.passivePortRange = portRange
protocol = self.factory.buildProtocol(None)
self.assertEqual(portRange, protocol.wrappedProtocol.passivePortRange)
def testFEAT(self):
"""
When the server receives 'FEAT', it should report the list of supported
features. (Additionally, ensure that the server reports various
particular features that are supported by all Twisted FTP servers.)
"""
d = self.client.queueStringCommand('FEAT')
def gotResponse(responseLines):
self.assertEqual('211-Features:', responseLines[0])
self.assertTrue(' MDTM' in responseLines)
self.assertTrue(' PASV' in responseLines)
self.assertTrue(' TYPE A;I' in responseLines)
self.assertTrue(' SIZE' in responseLines)
self.assertEqual('211 End', responseLines[-1])
return d.addCallback(gotResponse)
def testOPTS(self):
"""
When the server receives 'OPTS something', it should report
that the FTP server does not support the option called 'something'.
"""
d = self._anonymousLogin()
self.assertCommandFailed(
'OPTS something',
["502 Option 'something' not implemented."],
chainDeferred=d,
)
return d
def test_STORreturnsErrorFromOpen(self):
"""
Any FTP error raised inside STOR while opening the file is returned
to the client.
"""
# We create a folder inside user's home folder and then
# we try to write a file with the same name.
# This will trigger an FTPCmdError.
self.dirPath.child(self.username).createDirectory()
self.dirPath.child(self.username).child('folder').createDirectory()
d = self._userLogin()
def sendPASV(result):
"""
Send the PASV command required before port.
"""
return self.client.queueStringCommand('PASV')
def mockDTPInstance(result):
"""
Fake an incoming connection and create a mock DTPInstance so
that PORT command will start processing the request.
"""
self.serverProtocol.dtpFactory.deferred.callback(None)
self.serverProtocol.dtpInstance = object()
return result
d.addCallback(sendPASV)
d.addCallback(mockDTPInstance)
self.assertCommandFailed(
'STOR folder',
["550 folder: is a directory"],
chainDeferred=d,
)
return d
def test_STORunknownErrorBecomesFileNotFound(self):
"""
Any non FTP error raised inside STOR while opening the file is
converted into FileNotFound error and returned to the client together
with the path.
The unknown error is logged.
"""
d = self._userLogin()
def failingOpenForWriting(ignore):
return defer.fail(AssertionError())
def sendPASV(result):
"""
Send the PASV command required before port.
"""
return self.client.queueStringCommand('PASV')
def mockDTPInstance(result):
"""
Fake an incoming connection and create a mock DTPInstance so
that PORT command will start processing the request.
"""
self.serverProtocol.dtpFactory.deferred.callback(None)
self.serverProtocol.dtpInstance = object()
self.serverProtocol.shell.openForWriting = failingOpenForWriting
return result
def checkLogs(result):
"""
Check that unknown errors are logged.
"""
logs = self.flushLoggedErrors()
self.assertEqual(1, len(logs))
self.assertIsInstance(logs[0].value, AssertionError)
d.addCallback(sendPASV)
d.addCallback(mockDTPInstance)
self.assertCommandFailed(
'STOR something',
["550 something: No such file or directory."],
chainDeferred=d,
)
d.addCallback(checkLogs)
return d
class FTPServerTestCaseAdvancedClient(FTPServerTestCase):
"""
Test FTP server with the L{ftp.FTPClient} class.
"""
clientFactory = ftp.FTPClient
def test_anonymousSTOR(self):
"""
Try to make an STOR as anonymous, and check that we got a permission
denied error.
"""
def eb(res):
res.trap(ftp.CommandFailed)
self.assertEqual(res.value.args[0][0],
'550 foo: Permission denied.')
d1, d2 = self.client.storeFile('foo')
d2.addErrback(eb)
return defer.gatherResults([d1, d2])
def test_STORtransferErrorIsReturned(self):
"""
Any FTP error raised by STOR while transferring the file is returned
to the client.
"""
# Make a failing file writer.
class FailingFileWriter(ftp._FileWriter):
def receive(self):
return defer.fail(ftp.IsADirectoryError("failing_file"))
def failingSTOR(a, b):
return defer.succeed(FailingFileWriter(None))
# Monkey patch the shell so it returns a file writer that will
# fail during transfer.
self.patch(ftp.FTPAnonymousShell, 'openForWriting', failingSTOR)
def eb(res):
res.trap(ftp.CommandFailed)
logs = self.flushLoggedErrors()
self.assertEqual(1, len(logs))
self.assertIsInstance(logs[0].value, ftp.IsADirectoryError)
self.assertEqual(
res.value.args[0][0],
"550 failing_file: is a directory")
d1, d2 = self.client.storeFile('failing_file')
d2.addErrback(eb)
return defer.gatherResults([d1, d2])
def test_STORunknownTransferErrorBecomesAbort(self):
"""
Any non FTP error raised by STOR while transferring the file is
converted into a critical error and transfer is closed.
The unknown error is logged.
"""
class FailingFileWriter(ftp._FileWriter):
def receive(self):
return defer.fail(AssertionError())
def failingSTOR(a, b):
return defer.succeed(FailingFileWriter(None))
# Monkey patch the shell so it returns a file writer that will
# fail during transfer.
self.patch(ftp.FTPAnonymousShell, 'openForWriting', failingSTOR)
def eb(res):
res.trap(ftp.CommandFailed)
logs = self.flushLoggedErrors()
self.assertEqual(1, len(logs))
self.assertIsInstance(logs[0].value, AssertionError)
self.assertEqual(
res.value.args[0][0],
"426 Transfer aborted. Data connection closed.")
d1, d2 = self.client.storeFile('failing_file')
d2.addErrback(eb)
return defer.gatherResults([d1, d2])
def test_RETRreadError(self):
"""
Any errors during reading a file inside a RETR should be returned to
the client.
"""
# Make a failing file reading.
class FailingFileReader(ftp._FileReader):
def send(self, consumer):
return defer.fail(ftp.IsADirectoryError("blah"))
def failingRETR(a, b):
return defer.succeed(FailingFileReader(None))
# Monkey patch the shell so it returns a file reader that will
# fail.
self.patch(ftp.FTPAnonymousShell, 'openForReading', failingRETR)
def check_response(failure):
self.flushLoggedErrors()
failure.trap(ftp.CommandFailed)
self.assertEqual(
failure.value.args[0][0],
"125 Data connection already open, starting transfer")
self.assertEqual(
failure.value.args[0][1],
"550 blah: is a directory")
proto = _BufferingProtocol()
d = self.client.retrieveFile('failing_file', proto)
d.addErrback(check_response)
return d
class FTPServerPasvDataConnectionTestCase(FTPServerTestCase):
def _makeDataConnection(self, ignored=None):
# Establish a passive data connection (i.e. client connecting to
# server).
d = self.client.queueStringCommand('PASV')
def gotPASV(responseLines):
host, port = ftp.decodeHostPort(responseLines[-1][4:])
cc = protocol.ClientCreator(reactor, _BufferingProtocol)
return cc.connectTCP('127.0.0.1', port)
return d.addCallback(gotPASV)
def _download(self, command, chainDeferred=None):
if chainDeferred is None:
chainDeferred = defer.succeed(None)
chainDeferred.addCallback(self._makeDataConnection)
def queueCommand(downloader):
# wait for the command to return, and the download connection to be
# closed.
d1 = self.client.queueStringCommand(command)
d2 = downloader.d
return defer.gatherResults([d1, d2])
chainDeferred.addCallback(queueCommand)
def downloadDone((ignored, downloader)):
return downloader.buffer
return chainDeferred.addCallback(downloadDone)
def test_LISTEmpty(self):
"""
When listing empty folders, LIST returns an empty response.
"""
d = self._anonymousLogin()
# No files, so the file listing should be empty
self._download('LIST', chainDeferred=d)
def checkEmpty(result):
self.assertEqual('', result)
return d.addCallback(checkEmpty)
def test_LISTWithBinLsFlags(self):
"""
LIST ignores requests for folder with names like '-al' and will list
the content of current folder.
"""
os.mkdir(os.path.join(self.directory, 'foo'))
os.mkdir(os.path.join(self.directory, 'bar'))
# Login
d = self._anonymousLogin()
self._download('LIST -aL', chainDeferred=d)
def checkDownload(download):
names = []
for line in download.splitlines():
names.append(line.split(' ')[-1])
self.assertEqual(2, len(names))
self.assertIn('foo', names)
self.assertIn('bar', names)
return d.addCallback(checkDownload)
def test_LISTWithContent(self):
"""
LIST returns all folder's members, each member listed on a separate
line and with name and other details.
"""
os.mkdir(os.path.join(self.directory, 'foo'))
os.mkdir(os.path.join(self.directory, 'bar'))
# Login
d = self._anonymousLogin()
# We expect 2 lines because there are two files.
self._download('LIST', chainDeferred=d)
def checkDownload(download):
self.assertEqual(2, len(download[:-2].split('\r\n')))
d.addCallback(checkDownload)
# Download a names-only listing.
self._download('NLST ', chainDeferred=d)
def checkDownload(download):
filenames = download[:-2].split('\r\n')
filenames.sort()
self.assertEqual(['bar', 'foo'], filenames)
d.addCallback(checkDownload)
# Download a listing of the 'foo' subdirectory. 'foo' has no files, so
# the file listing should be empty.
self._download('LIST foo', chainDeferred=d)
def checkDownload(download):
self.assertEqual('', download)
d.addCallback(checkDownload)
# Change the current working directory to 'foo'.
def chdir(ignored):
return self.client.queueStringCommand('CWD foo')
d.addCallback(chdir)
# Download a listing from within 'foo', and again it should be empty,
# because LIST uses the working directory by default.
self._download('LIST', chainDeferred=d)
def checkDownload(download):
self.assertEqual('', download)
return d.addCallback(checkDownload)
def _listTestHelper(self, command, listOutput, expectedOutput):
"""
Exercise handling by the implementation of I{LIST} or I{NLST} of certain
return values and types from an L{IFTPShell.list} implementation.
This will issue C{command} and assert that if the L{IFTPShell.list}
implementation includes C{listOutput} as one of the file entries then
the result given to the client is matches C{expectedOutput}.
@param command: Either C{b"LIST"} or C{b"NLST"}
@type command: L{bytes}
@param listOutput: A value suitable to be used as an element of the list
returned by L{IFTPShell.list}. Vary the values and types of the
contents to exercise different code paths in the server's handling
of this result.
@param expectedOutput: A line of output to expect as a result of
C{listOutput} being transformed into a response to the command
issued.
@type expectedOutput: L{bytes}
@return: A L{Deferred} which fires when the test is done, either with an
L{Failure} if the test failed or with a function object if it
succeeds. The function object is the function which implements
L{IFTPShell.list} (and is useful to make assertions about what
warnings might have been emitted).
@rtype: L{Deferred}
"""
# Login
d = self._anonymousLogin()
def patchedList(segments, keys=()):
return defer.succeed([listOutput])
def loggedIn(result):
self.serverProtocol.shell.list = patchedList
return result
d.addCallback(loggedIn)
self._download('%s something' % (command,), chainDeferred=d)
def checkDownload(download):
self.assertEqual(expectedOutput, download)
return patchedList
return d.addCallback(checkDownload)
def test_LISTUnicode(self):
"""
Unicode filenames returned from L{IFTPShell.list} are encoded using
UTF-8 before being sent with the response.
"""
return self._listTestHelper(
"LIST",
(u'my resum\xe9', (
0, 1, filepath.Permissions(0777), 0, 0, 'user', 'group')),
'drwxrwxrwx 0 user group '
'0 Jan 01 1970 my resum\xc3\xa9\r\n')
def test_LISTNonASCIIBytes(self):
"""
When LIST receive a filename as byte string from L{IFTPShell.list}
it will just pass the data to lower level without any change.
"""
return self._listTestHelper(
"LIST",
('my resum\xc3\xa9', (
0, 1, filepath.Permissions(0777), 0, 0, 'user', 'group')),
'drwxrwxrwx 0 user group '
'0 Jan 01 1970 my resum\xc3\xa9\r\n')
def testManyLargeDownloads(self):
# Login
d = self._anonymousLogin()
# Download a range of different size files
for size in range(100000, 110000, 500):
fObj = file(os.path.join(self.directory, '%d.txt' % (size,)), 'wb')
fObj.write('x' * size)
fObj.close()
self._download('RETR %d.txt' % (size,), chainDeferred=d)
def checkDownload(download, size=size):
self.assertEqual(size, len(download))
d.addCallback(checkDownload)
return d
def test_downloadFolder(self):
"""
When RETR is called for a folder, it will fail complaining that
the path is a folder.
"""
# Make a directory in the current working directory
self.dirPath.child('foo').createDirectory()
# Login
d = self._anonymousLogin()
d.addCallback(self._makeDataConnection)
def retrFolder(downloader):
downloader.transport.loseConnection()
deferred = self.client.queueStringCommand('RETR foo')
return deferred
d.addCallback(retrFolder)
def failOnSuccess(result):
raise AssertionError('Downloading a folder should not succeed.')
d.addCallback(failOnSuccess)
def checkError(failure):
failure.trap(ftp.CommandFailed)
self.assertEqual(
['550 foo: is a directory'], failure.value.message)
current_errors = self.flushLoggedErrors()
self.assertEqual(
0, len(current_errors),
'No errors should be logged while downloading a folder.')
d.addErrback(checkError)
return d
def test_NLSTEmpty(self):
"""
NLST with no argument returns the directory listing for the current
working directory.
"""
# Login
d = self._anonymousLogin()
# Touch a file in the current working directory
self.dirPath.child('test.txt').touch()
# Make a directory in the current working directory
self.dirPath.child('foo').createDirectory()
self._download('NLST ', chainDeferred=d)
def checkDownload(download):
filenames = download[:-2].split('\r\n')
filenames.sort()
self.assertEqual(['foo', 'test.txt'], filenames)
return d.addCallback(checkDownload)
def test_NLSTNonexistent(self):
"""
NLST on a non-existent file/directory returns nothing.
"""
# Login
d = self._anonymousLogin()
self._download('NLST nonexistent.txt', chainDeferred=d)
def checkDownload(download):
self.assertEqual('', download)
return d.addCallback(checkDownload)
def test_NLSTUnicode(self):
"""
NLST will receive Unicode filenames for IFTPShell.list, and will
encode them using UTF-8.
"""
return self._listTestHelper(
"NLST",
(u'my resum\xe9', (
0, 1, filepath.Permissions(0777), 0, 0, 'user', 'group')),
'my resum\xc3\xa9\r\n')
def test_NLSTNonASCIIBytes(self):
"""
NLST will just pass the non-Unicode data to lower level.
"""
return self._listTestHelper(
"NLST",
('my resum\xc3\xa9', (
0, 1, filepath.Permissions(0777), 0, 0, 'user', 'group')),
'my resum\xc3\xa9\r\n')
def test_NLSTOnPathToFile(self):
"""
NLST on an existent file returns only the path to that file.
"""
# Login
d = self._anonymousLogin()
# Touch a file in the current working directory
self.dirPath.child('test.txt').touch()
self._download('NLST test.txt', chainDeferred=d)
def checkDownload(download):
filenames = download[:-2].split('\r\n')
self.assertEqual(['test.txt'], filenames)
return d.addCallback(checkDownload)
class FTPServerPortDataConnectionTestCase(FTPServerPasvDataConnectionTestCase):
def setUp(self):
self.dataPorts = []
return FTPServerPasvDataConnectionTestCase.setUp(self)
def _makeDataConnection(self, ignored=None):
# Establish an active data connection (i.e. server connecting to
# client).
deferred = defer.Deferred()
class DataFactory(protocol.ServerFactory):
protocol = _BufferingProtocol
def buildProtocol(self, addr):
p = protocol.ServerFactory.buildProtocol(self, addr)
reactor.callLater(0, deferred.callback, p)
return p
dataPort = reactor.listenTCP(0, DataFactory(), interface='127.0.0.1')
self.dataPorts.append(dataPort)
cmd = 'PORT ' + ftp.encodeHostPort('127.0.0.1', dataPort.getHost().port)
self.client.queueStringCommand(cmd)
return deferred
def tearDown(self):
l = [defer.maybeDeferred(port.stopListening) for port in self.dataPorts]
d = defer.maybeDeferred(
FTPServerPasvDataConnectionTestCase.tearDown, self)
l.append(d)
return defer.DeferredList(l, fireOnOneErrback=True)
def testPORTCannotConnect(self):
# Login
d = self._anonymousLogin()
# Listen on a port, and immediately stop listening as a way to find a
# port number that is definitely closed.
def loggedIn(ignored):
port = reactor.listenTCP(0, protocol.Factory(),
interface='127.0.0.1')
portNum = port.getHost().port
d = port.stopListening()
d.addCallback(lambda _: portNum)
return d
d.addCallback(loggedIn)
# Tell the server to connect to that port with a PORT command, and
# verify that it fails with the right error.
def gotPortNum(portNum):
return self.assertCommandFailed(
'PORT ' + ftp.encodeHostPort('127.0.0.1', portNum),
["425 Can't open data connection."])
return d.addCallback(gotPortNum)
def test_nlstGlobbing(self):
"""
When Unix shell globbing is used with NLST only files matching the
pattern will be returned.
"""
self.dirPath.child('test.txt').touch()
self.dirPath.child('ceva.txt').touch()
self.dirPath.child('no.match').touch()
d = self._anonymousLogin()
self._download('NLST *.txt', chainDeferred=d)
def checkDownload(download):
filenames = download[:-2].split('\r\n')
filenames.sort()
self.assertEqual(['ceva.txt', 'test.txt'], filenames)
return d.addCallback(checkDownload)
class DTPFactoryTests(unittest.TestCase):
"""
Tests for L{ftp.DTPFactory}.
"""
def setUp(self):
"""
Create a fake protocol interpreter and a L{ftp.DTPFactory} instance to
test.
"""
self.reactor = task.Clock()
class ProtocolInterpreter(object):
dtpInstance = None
self.protocolInterpreter = ProtocolInterpreter()
self.factory = ftp.DTPFactory(
self.protocolInterpreter, None, self.reactor)
def test_setTimeout(self):
"""
L{ftp.DTPFactory.setTimeout} uses the reactor passed to its initializer
to set up a timed event to time out the DTP setup after the specified
number of seconds.
"""
# Make sure the factory's deferred fails with the right exception, and
# make it so we can tell exactly when it fires.
finished = []
d = self.assertFailure(self.factory.deferred, ftp.PortConnectionError)
d.addCallback(finished.append)
self.factory.setTimeout(6)
# Advance the clock almost to the timeout
self.reactor.advance(5)
# Nothing should have happened yet.
self.assertFalse(finished)
# Advance it to the configured timeout.
self.reactor.advance(1)
# Now the Deferred should have failed with TimeoutError.
self.assertTrue(finished)
# There should also be no calls left in the reactor.
self.assertFalse(self.reactor.calls)
def test_buildProtocolOnce(self):
"""
A L{ftp.DTPFactory} instance's C{buildProtocol} method can be used once
to create a L{ftp.DTP} instance.
"""
protocol = self.factory.buildProtocol(None)
self.assertIsInstance(protocol, ftp.DTP)
# A subsequent call returns None.
self.assertIdentical(self.factory.buildProtocol(None), None)
def test_timeoutAfterConnection(self):
"""
If a timeout has been set up using L{ftp.DTPFactory.setTimeout}, it is
cancelled by L{ftp.DTPFactory.buildProtocol}.
"""
self.factory.setTimeout(10)
self.factory.buildProtocol(None)
# Make sure the call is no longer active.
self.assertFalse(self.reactor.calls)
def test_connectionAfterTimeout(self):
"""
If L{ftp.DTPFactory.buildProtocol} is called after the timeout
specified by L{ftp.DTPFactory.setTimeout} has elapsed, C{None} is
returned.
"""
# Handle the error so it doesn't get logged.
d = self.assertFailure(self.factory.deferred, ftp.PortConnectionError)
# Set up the timeout and then cause it to elapse so the Deferred does
# fail.
self.factory.setTimeout(10)
self.reactor.advance(10)
# Try to get a protocol - we should not be able to.
self.assertIdentical(self.factory.buildProtocol(None), None)
# Make sure the Deferred is doing the right thing.
return d
def test_timeoutAfterConnectionFailed(self):
"""
L{ftp.DTPFactory.deferred} fails with L{PortConnectionError} when
L{ftp.DTPFactory.clientConnectionFailed} is called. If the timeout
specified with L{ftp.DTPFactory.setTimeout} expires after that, nothing
additional happens.
"""
finished = []
d = self.assertFailure(self.factory.deferred, ftp.PortConnectionError)
d.addCallback(finished.append)
self.factory.setTimeout(10)
self.assertFalse(finished)
self.factory.clientConnectionFailed(None, None)
self.assertTrue(finished)
self.reactor.advance(10)
return d
def test_connectionFailedAfterTimeout(self):
"""
If L{ftp.DTPFactory.clientConnectionFailed} is called after the timeout
specified by L{ftp.DTPFactory.setTimeout} has elapsed, nothing beyond
the normal timeout before happens.
"""
# Handle the error so it doesn't get logged.
d = self.assertFailure(self.factory.deferred, ftp.PortConnectionError)
# Set up the timeout and then cause it to elapse so the Deferred does
# fail.
self.factory.setTimeout(10)
self.reactor.advance(10)
# Now fail the connection attempt. This should do nothing. In
# particular, it should not raise an exception.
self.factory.clientConnectionFailed(None, defer.TimeoutError("foo"))
# Give the Deferred to trial so it can make sure it did what we
# expected.
return d
class DTPTests(unittest.TestCase):
"""
Tests for L{ftp.DTP}.
The DTP instances in these tests are generated using
DTPFactory.buildProtocol()
"""
def setUp(self):
"""
Create a fake protocol interpreter, a L{ftp.DTPFactory} instance,
and dummy transport to help with tests.
"""
self.reactor = task.Clock()
class ProtocolInterpreter(object):
dtpInstance = None
self.protocolInterpreter = ProtocolInterpreter()
self.factory = ftp.DTPFactory(
self.protocolInterpreter, None, self.reactor)
self.transport = proto_helpers.StringTransportWithDisconnection()
def test_sendLineNewline(self):
"""
L{ftp.DTP.sendLine} writes the line passed to it plus a line delimiter
to its transport.
"""
dtpInstance = self.factory.buildProtocol(None)
dtpInstance.makeConnection(self.transport)
lineContent = 'line content'
dtpInstance.sendLine(lineContent)
dataSent = self.transport.value()
self.assertEqual(lineContent + '\r\n', dataSent)
# -- Client Tests -----------------------------------------------------------
class PrintLines(protocol.Protocol):
"""Helper class used by FTPFileListingTests."""
def __init__(self, lines):
self._lines = lines
def connectionMade(self):
for line in self._lines:
self.transport.write(line + "\r\n")
self.transport.loseConnection()
class MyFTPFileListProtocol(ftp.FTPFileListProtocol):
def __init__(self):
self.other = []
ftp.FTPFileListProtocol.__init__(self)
def unknownLine(self, line):
self.other.append(line)
class FTPFileListingTests(unittest.TestCase):
def getFilesForLines(self, lines):
fileList = MyFTPFileListProtocol()
d = loopback.loopbackAsync(PrintLines(lines), fileList)
d.addCallback(lambda _: (fileList.files, fileList.other))
return d
def testOneLine(self):
# This example line taken from the docstring for FTPFileListProtocol
line = '-rw-r--r-- 1 root other 531 Jan 29 03:26 README'
def check(((file,), other)):
self.failIf(other, 'unexpect unparsable lines: %s' % repr(other))
self.failUnless(file['filetype'] == '-', 'misparsed fileitem')
self.failUnless(file['perms'] == 'rw-r--r--', 'misparsed perms')
self.failUnless(file['owner'] == 'root', 'misparsed fileitem')
self.failUnless(file['group'] == 'other', 'misparsed fileitem')
self.failUnless(file['size'] == 531, 'misparsed fileitem')
self.failUnless(file['date'] == 'Jan 29 03:26', 'misparsed fileitem')
self.failUnless(file['filename'] == 'README', 'misparsed fileitem')
self.failUnless(file['nlinks'] == 1, 'misparsed nlinks')
self.failIf(file['linktarget'], 'misparsed linktarget')
return self.getFilesForLines([line]).addCallback(check)
def testVariantLines(self):
line1 = 'drw-r--r-- 2 root other 531 Jan 9 2003 A'
line2 = 'lrw-r--r-- 1 root other 1 Jan 29 03:26 B -> A'
line3 = 'woohoo! '
def check(((file1, file2), (other,))):
self.failUnless(other == 'woohoo! \r', 'incorrect other line')
# file 1
self.failUnless(file1['filetype'] == 'd', 'misparsed fileitem')
self.failUnless(file1['perms'] == 'rw-r--r--', 'misparsed perms')
self.failUnless(file1['owner'] == 'root', 'misparsed owner')
self.failUnless(file1['group'] == 'other', 'misparsed group')
self.failUnless(file1['size'] == 531, 'misparsed size')
self.failUnless(file1['date'] == 'Jan 9 2003', 'misparsed date')
self.failUnless(file1['filename'] == 'A', 'misparsed filename')
self.failUnless(file1['nlinks'] == 2, 'misparsed nlinks')
self.failIf(file1['linktarget'], 'misparsed linktarget')
# file 2
self.failUnless(file2['filetype'] == 'l', 'misparsed fileitem')
self.failUnless(file2['perms'] == 'rw-r--r--', 'misparsed perms')
self.failUnless(file2['owner'] == 'root', 'misparsed owner')
self.failUnless(file2['group'] == 'other', 'misparsed group')
self.failUnless(file2['size'] == 1, 'misparsed size')
self.failUnless(file2['date'] == 'Jan 29 03:26', 'misparsed date')
self.failUnless(file2['filename'] == 'B', 'misparsed filename')
self.failUnless(file2['nlinks'] == 1, 'misparsed nlinks')
self.failUnless(file2['linktarget'] == 'A', 'misparsed linktarget')
return self.getFilesForLines([line1, line2, line3]).addCallback(check)
def testUnknownLine(self):
def check((files, others)):
self.failIf(files, 'unexpected file entries')
self.failUnless(others == ['ABC\r', 'not a file\r'],
'incorrect unparsable lines: %s' % repr(others))
return self.getFilesForLines(['ABC', 'not a file']).addCallback(check)
def test_filenameWithUnescapedSpace(self):
'''
Will parse filenames and linktargets containing unescaped
space characters.
'''
line1 = 'drw-r--r-- 2 root other 531 Jan 9 2003 A B'
line2 = (
'lrw-r--r-- 1 root other 1 Jan 29 03:26 '
'B A -> D C/A B'
)
def check((files, others)):
self.assertEqual([], others, 'unexpected others entries')
self.assertEqual(
'A B', files[0]['filename'], 'misparsed filename')
self.assertEqual(
'B A', files[1]['filename'], 'misparsed filename')
self.assertEqual(
'D C/A B', files[1]['linktarget'], 'misparsed linktarget')
return self.getFilesForLines([line1, line2]).addCallback(check)
def test_filenameWithEscapedSpace(self):
'''
Will parse filenames and linktargets containing escaped
space characters.
'''
line1 = 'drw-r--r-- 2 root other 531 Jan 9 2003 A\ B'
line2 = (
'lrw-r--r-- 1 root other 1 Jan 29 03:26 '
'B A -> D\ C/A B'
)
def check((files, others)):
self.assertEqual([], others, 'unexpected others entries')
self.assertEqual(
'A B', files[0]['filename'], 'misparsed filename')
self.assertEqual(
'B A', files[1]['filename'], 'misparsed filename')
self.assertEqual(
'D C/A B', files[1]['linktarget'], 'misparsed linktarget')
return self.getFilesForLines([line1, line2]).addCallback(check)
def testYear(self):
# This example derived from bug description in issue 514.
fileList = ftp.FTPFileListProtocol()
exampleLine = (
'-rw-r--r-- 1 root other 531 Jan 29 2003 README\n')
class PrintLine(protocol.Protocol):
def connectionMade(self):
self.transport.write(exampleLine)
self.transport.loseConnection()
def check(ignored):
file = fileList.files[0]
self.failUnless(file['size'] == 531, 'misparsed fileitem')
self.failUnless(file['date'] == 'Jan 29 2003', 'misparsed fileitem')
self.failUnless(file['filename'] == 'README', 'misparsed fileitem')
d = loopback.loopbackAsync(PrintLine(), fileList)
return d.addCallback(check)
class FTPClientTests(unittest.TestCase):
def testFailedRETR(self):
f = protocol.Factory()
f.noisy = 0
port = reactor.listenTCP(0, f, interface="127.0.0.1")
self.addCleanup(port.stopListening)
portNum = port.getHost().port
# This test data derived from a bug report by ranty on #twisted
responses = ['220 ready, dude (vsFTPd 1.0.0: beat me, break me)',
# USER anonymous
'331 Please specify the password.',
# PASS twisted@twistedmatrix.com
'230 Login successful. Have fun.',
# TYPE I
'200 Binary it is, then.',
# PASV
'227 Entering Passive Mode (127,0,0,1,%d,%d)' %
(portNum >> 8, portNum & 0xff),
# RETR /file/that/doesnt/exist
'550 Failed to open file.']
f.buildProtocol = lambda addr: PrintLines(responses)
cc = protocol.ClientCreator(reactor, ftp.FTPClient, passive=1)
d = cc.connectTCP('127.0.0.1', portNum)
def gotClient(client):
p = protocol.Protocol()
return client.retrieveFile('/file/that/doesnt/exist', p)
d.addCallback(gotClient)
return self.assertFailure(d, ftp.CommandFailed)
def test_errbacksUponDisconnect(self):
"""
Test the ftp command errbacks when a connection lost happens during
the operation.
"""
ftpClient = ftp.FTPClient()
tr = proto_helpers.StringTransportWithDisconnection()
ftpClient.makeConnection(tr)
tr.protocol = ftpClient
d = ftpClient.list('some path', Dummy())
m = []
def _eb(failure):
m.append(failure)
return None
d.addErrback(_eb)
from twisted.internet.main import CONNECTION_LOST
ftpClient.connectionLost(failure.Failure(CONNECTION_LOST))
self.failUnless(m, m)
return d
class FTPClientTestCase(unittest.TestCase):
"""
Test advanced FTP client commands.
"""
def setUp(self):
"""
Create a FTP client and connect it to fake transport.
"""
self.client = ftp.FTPClient()
self.transport = proto_helpers.StringTransportWithDisconnection()
self.client.makeConnection(self.transport)
self.transport.protocol = self.client
def tearDown(self):
"""
Deliver disconnection notification to the client so that it can
perform any cleanup which may be required.
"""
self.client.connectionLost(error.ConnectionLost())
def _testLogin(self):
"""
Test the login part.
"""
self.assertEqual(self.transport.value(), '')
self.client.lineReceived(
'331 Guest login ok, type your email address as password.')
self.assertEqual(self.transport.value(), 'USER anonymous\r\n')
self.transport.clear()
self.client.lineReceived(
'230 Anonymous login ok, access restrictions apply.')
self.assertEqual(self.transport.value(), 'TYPE I\r\n')
self.transport.clear()
self.client.lineReceived('200 Type set to I.')
def test_CDUP(self):
"""
Test the CDUP command.
L{ftp.FTPClient.cdup} should return a Deferred which fires with a
sequence of one element which is the string the server sent
indicating that the command was executed successfully.
(XXX - This is a bad API)
"""
def cbCdup(res):
self.assertEqual(res[0], '250 Requested File Action Completed OK')
self._testLogin()
d = self.client.cdup().addCallback(cbCdup)
self.assertEqual(self.transport.value(), 'CDUP\r\n')
self.transport.clear()
self.client.lineReceived('250 Requested File Action Completed OK')
return d
def test_failedCDUP(self):
"""
Test L{ftp.FTPClient.cdup}'s handling of a failed CDUP command.
When the CDUP command fails, the returned Deferred should errback
with L{ftp.CommandFailed}.
"""
self._testLogin()
d = self.client.cdup()
self.assertFailure(d, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'CDUP\r\n')
self.transport.clear()
self.client.lineReceived('550 ..: No such file or directory')
return d
def test_PWD(self):
"""
Test the PWD command.
L{ftp.FTPClient.pwd} should return a Deferred which fires with a
sequence of one element which is a string representing the current
working directory on the server.
(XXX - This is a bad API)
"""
def cbPwd(res):
self.assertEqual(ftp.parsePWDResponse(res[0]), "/bar/baz")
self._testLogin()
d = self.client.pwd().addCallback(cbPwd)
self.assertEqual(self.transport.value(), 'PWD\r\n')
self.client.lineReceived('257 "/bar/baz"')
return d
def test_failedPWD(self):
"""
Test a failure in PWD command.
When the PWD command fails, the returned Deferred should errback
with L{ftp.CommandFailed}.
"""
self._testLogin()
d = self.client.pwd()
self.assertFailure(d, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'PWD\r\n')
self.client.lineReceived('550 /bar/baz: No such file or directory')
return d
def test_CWD(self):
"""
Test the CWD command.
L{ftp.FTPClient.cwd} should return a Deferred which fires with a
sequence of one element which is the string the server sent
indicating that the command was executed successfully.
(XXX - This is a bad API)
"""
def cbCwd(res):
self.assertEqual(res[0], '250 Requested File Action Completed OK')
self._testLogin()
d = self.client.cwd("bar/foo").addCallback(cbCwd)
self.assertEqual(self.transport.value(), 'CWD bar/foo\r\n')
self.client.lineReceived('250 Requested File Action Completed OK')
return d
def test_failedCWD(self):
"""
Test a failure in CWD command.
When the PWD command fails, the returned Deferred should errback
with L{ftp.CommandFailed}.
"""
self._testLogin()
d = self.client.cwd("bar/foo")
self.assertFailure(d, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'CWD bar/foo\r\n')
self.client.lineReceived('550 bar/foo: No such file or directory')
return d
def test_passiveRETR(self):
"""
Test the RETR command in passive mode: get a file and verify its
content.
L{ftp.FTPClient.retrieveFile} should return a Deferred which fires
with the protocol instance passed to it after the download has
completed.
(XXX - This API should be based on producers and consumers)
"""
def cbRetr(res, proto):
self.assertEqual(proto.buffer, 'x' * 1000)
def cbConnect(host, port, factory):
self.assertEqual(host, '127.0.0.1')
self.assertEqual(port, 12345)
proto = factory.buildProtocol((host, port))
proto.makeConnection(proto_helpers.StringTransport())
self.client.lineReceived(
'150 File status okay; about to open data connection.')
proto.dataReceived("x" * 1000)
proto.connectionLost(failure.Failure(error.ConnectionDone("")))
self.client.connectFactory = cbConnect
self._testLogin()
proto = _BufferingProtocol()
d = self.client.retrieveFile("spam", proto)
d.addCallback(cbRetr, proto)
self.assertEqual(self.transport.value(), 'PASV\r\n')
self.transport.clear()
self.client.lineReceived('227 Entering Passive Mode (%s).' %
(ftp.encodeHostPort('127.0.0.1', 12345),))
self.assertEqual(self.transport.value(), 'RETR spam\r\n')
self.transport.clear()
self.client.lineReceived('226 Transfer Complete.')
return d
def test_RETR(self):
"""
Test the RETR command in non-passive mode.
Like L{test_passiveRETR} but in the configuration where the server
establishes the data connection to the client, rather than the other
way around.
"""
self.client.passive = False
def generatePort(portCmd):
portCmd.text = 'PORT %s' % (ftp.encodeHostPort('127.0.0.1', 9876),)
portCmd.protocol.makeConnection(proto_helpers.StringTransport())
portCmd.protocol.dataReceived("x" * 1000)
portCmd.protocol.connectionLost(
failure.Failure(error.ConnectionDone("")))
def cbRetr(res, proto):
self.assertEqual(proto.buffer, 'x' * 1000)
self.client.generatePortCommand = generatePort
self._testLogin()
proto = _BufferingProtocol()
d = self.client.retrieveFile("spam", proto)
d.addCallback(cbRetr, proto)
self.assertEqual(self.transport.value(), 'PORT %s\r\n' %
(ftp.encodeHostPort('127.0.0.1', 9876),))
self.transport.clear()
self.client.lineReceived('200 PORT OK')
self.assertEqual(self.transport.value(), 'RETR spam\r\n')
self.transport.clear()
self.client.lineReceived('226 Transfer Complete.')
return d
def test_failedRETR(self):
"""
Try to RETR an unexisting file.
L{ftp.FTPClient.retrieveFile} should return a Deferred which
errbacks with L{ftp.CommandFailed} if the server indicates the file
cannot be transferred for some reason.
"""
def cbConnect(host, port, factory):
self.assertEqual(host, '127.0.0.1')
self.assertEqual(port, 12345)
proto = factory.buildProtocol((host, port))
proto.makeConnection(proto_helpers.StringTransport())
self.client.lineReceived(
'150 File status okay; about to open data connection.')
proto.connectionLost(failure.Failure(error.ConnectionDone("")))
self.client.connectFactory = cbConnect
self._testLogin()
proto = _BufferingProtocol()
d = self.client.retrieveFile("spam", proto)
self.assertFailure(d, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'PASV\r\n')
self.transport.clear()
self.client.lineReceived('227 Entering Passive Mode (%s).' %
(ftp.encodeHostPort('127.0.0.1', 12345),))
self.assertEqual(self.transport.value(), 'RETR spam\r\n')
self.transport.clear()
self.client.lineReceived('550 spam: No such file or directory')
return d
def test_lostRETR(self):
"""
Try a RETR, but disconnect during the transfer.
L{ftp.FTPClient.retrieveFile} should return a Deferred which
errbacks with L{ftp.ConnectionLost)
"""
self.client.passive = False
l = []
def generatePort(portCmd):
portCmd.text = 'PORT %s' % (ftp.encodeHostPort('127.0.0.1', 9876),)
tr = proto_helpers.StringTransportWithDisconnection()
portCmd.protocol.makeConnection(tr)
tr.protocol = portCmd.protocol
portCmd.protocol.dataReceived("x" * 500)
l.append(tr)
self.client.generatePortCommand = generatePort
self._testLogin()
proto = _BufferingProtocol()
d = self.client.retrieveFile("spam", proto)
self.assertEqual(self.transport.value(), 'PORT %s\r\n' %
(ftp.encodeHostPort('127.0.0.1', 9876),))
self.transport.clear()
self.client.lineReceived('200 PORT OK')
self.assertEqual(self.transport.value(), 'RETR spam\r\n')
self.assert_(l)
l[0].loseConnection()
self.transport.loseConnection()
self.assertFailure(d, ftp.ConnectionLost)
return d
def test_passiveSTOR(self):
"""
Test the STOR command: send a file and verify its content.
L{ftp.FTPClient.storeFile} should return a two-tuple of Deferreds.
The first of which should fire with a protocol instance when the
data connection has been established and is responsible for sending
the contents of the file. The second of which should fire when the
upload has completed, the data connection has been closed, and the
server has acknowledged receipt of the file.
(XXX - storeFile should take a producer as an argument, instead, and
only return a Deferred which fires when the upload has succeeded or
failed).
"""
tr = proto_helpers.StringTransport()
def cbStore(sender):
self.client.lineReceived(
'150 File status okay; about to open data connection.')
sender.transport.write("x" * 1000)
sender.finish()
sender.connectionLost(failure.Failure(error.ConnectionDone("")))
def cbFinish(ign):
self.assertEqual(tr.value(), "x" * 1000)
def cbConnect(host, port, factory):
self.assertEqual(host, '127.0.0.1')
self.assertEqual(port, 12345)
proto = factory.buildProtocol((host, port))
proto.makeConnection(tr)
self.client.connectFactory = cbConnect
self._testLogin()
d1, d2 = self.client.storeFile("spam")
d1.addCallback(cbStore)
d2.addCallback(cbFinish)
self.assertEqual(self.transport.value(), 'PASV\r\n')
self.transport.clear()
self.client.lineReceived('227 Entering Passive Mode (%s).' %
(ftp.encodeHostPort('127.0.0.1', 12345),))
self.assertEqual(self.transport.value(), 'STOR spam\r\n')
self.transport.clear()
self.client.lineReceived('226 Transfer Complete.')
return defer.gatherResults([d1, d2])
def test_failedSTOR(self):
"""
Test a failure in the STOR command.
If the server does not acknowledge successful receipt of the
uploaded file, the second Deferred returned by
L{ftp.FTPClient.storeFile} should errback with L{ftp.CommandFailed}.
"""
tr = proto_helpers.StringTransport()
def cbStore(sender):
self.client.lineReceived(
'150 File status okay; about to open data connection.')
sender.transport.write("x" * 1000)
sender.finish()
sender.connectionLost(failure.Failure(error.ConnectionDone("")))
def cbConnect(host, port, factory):
self.assertEqual(host, '127.0.0.1')
self.assertEqual(port, 12345)
proto = factory.buildProtocol((host, port))
proto.makeConnection(tr)
self.client.connectFactory = cbConnect
self._testLogin()
d1, d2 = self.client.storeFile("spam")
d1.addCallback(cbStore)
self.assertFailure(d2, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'PASV\r\n')
self.transport.clear()
self.client.lineReceived('227 Entering Passive Mode (%s).' %
(ftp.encodeHostPort('127.0.0.1', 12345),))
self.assertEqual(self.transport.value(), 'STOR spam\r\n')
self.transport.clear()
self.client.lineReceived(
'426 Transfer aborted. Data connection closed.')
return defer.gatherResults([d1, d2])
def test_STOR(self):
"""
Test the STOR command in non-passive mode.
Like L{test_passiveSTOR} but in the configuration where the server
establishes the data connection to the client, rather than the other
way around.
"""
tr = proto_helpers.StringTransport()
self.client.passive = False
def generatePort(portCmd):
portCmd.text = 'PORT %s' % ftp.encodeHostPort('127.0.0.1', 9876)
portCmd.protocol.makeConnection(tr)
def cbStore(sender):
self.assertEqual(self.transport.value(), 'PORT %s\r\n' %
(ftp.encodeHostPort('127.0.0.1', 9876),))
self.transport.clear()
self.client.lineReceived('200 PORT OK')
self.assertEqual(self.transport.value(), 'STOR spam\r\n')
self.transport.clear()
self.client.lineReceived(
'150 File status okay; about to open data connection.')
sender.transport.write("x" * 1000)
sender.finish()
sender.connectionLost(failure.Failure(error.ConnectionDone("")))
self.client.lineReceived('226 Transfer Complete.')
def cbFinish(ign):
self.assertEqual(tr.value(), "x" * 1000)
self.client.generatePortCommand = generatePort
self._testLogin()
d1, d2 = self.client.storeFile("spam")
d1.addCallback(cbStore)
d2.addCallback(cbFinish)
return defer.gatherResults([d1, d2])
def test_passiveLIST(self):
"""
Test the LIST command.
L{ftp.FTPClient.list} should return a Deferred which fires with a
protocol instance which was passed to list after the command has
succeeded.
(XXX - This is a very unfortunate API; if my understanding is
correct, the results are always at least line-oriented, so allowing
a per-line parser function to be specified would make this simpler,
but a default implementation should really be provided which knows
how to deal with all the formats used in real servers, so
application developers never have to care about this insanity. It
would also be nice to either get back a Deferred of a list of
filenames or to be able to consume the files as they are received
(which the current API does allow, but in a somewhat inconvenient
fashion) -exarkun)
"""
def cbList(res, fileList):
fls = [f["filename"] for f in fileList.files]
expected = ["foo", "bar", "baz"]
expected.sort()
fls.sort()
self.assertEqual(fls, expected)
def cbConnect(host, port, factory):
self.assertEqual(host, '127.0.0.1')
self.assertEqual(port, 12345)
proto = factory.buildProtocol((host, port))
proto.makeConnection(proto_helpers.StringTransport())
self.client.lineReceived(
'150 File status okay; about to open data connection.')
sending = [
'-rw-r--r-- 0 spam egg 100 Oct 10 2006 foo\r\n',
'-rw-r--r-- 3 spam egg 100 Oct 10 2006 bar\r\n',
'-rw-r--r-- 4 spam egg 100 Oct 10 2006 baz\r\n',
]
for i in sending:
proto.dataReceived(i)
proto.connectionLost(failure.Failure(error.ConnectionDone("")))
self.client.connectFactory = cbConnect
self._testLogin()
fileList = ftp.FTPFileListProtocol()
d = self.client.list('foo/bar', fileList).addCallback(cbList, fileList)
self.assertEqual(self.transport.value(), 'PASV\r\n')
self.transport.clear()
self.client.lineReceived('227 Entering Passive Mode (%s).' %
(ftp.encodeHostPort('127.0.0.1', 12345),))
self.assertEqual(self.transport.value(), 'LIST foo/bar\r\n')
self.client.lineReceived('226 Transfer Complete.')
return d
def test_LIST(self):
"""
Test the LIST command in non-passive mode.
Like L{test_passiveLIST} but in the configuration where the server
establishes the data connection to the client, rather than the other
way around.
"""
self.client.passive = False
def generatePort(portCmd):
portCmd.text = 'PORT %s' % (ftp.encodeHostPort('127.0.0.1', 9876),)
portCmd.protocol.makeConnection(proto_helpers.StringTransport())
self.client.lineReceived(
'150 File status okay; about to open data connection.')
sending = [
'-rw-r--r-- 0 spam egg 100 Oct 10 2006 foo\r\n',
'-rw-r--r-- 3 spam egg 100 Oct 10 2006 bar\r\n',
'-rw-r--r-- 4 spam egg 100 Oct 10 2006 baz\r\n',
]
for i in sending:
portCmd.protocol.dataReceived(i)
portCmd.protocol.connectionLost(
failure.Failure(error.ConnectionDone("")))
def cbList(res, fileList):
fls = [f["filename"] for f in fileList.files]
expected = ["foo", "bar", "baz"]
expected.sort()
fls.sort()
self.assertEqual(fls, expected)
self.client.generatePortCommand = generatePort
self._testLogin()
fileList = ftp.FTPFileListProtocol()
d = self.client.list('foo/bar', fileList).addCallback(cbList, fileList)
self.assertEqual(self.transport.value(), 'PORT %s\r\n' %
(ftp.encodeHostPort('127.0.0.1', 9876),))
self.transport.clear()
self.client.lineReceived('200 PORT OK')
self.assertEqual(self.transport.value(), 'LIST foo/bar\r\n')
self.transport.clear()
self.client.lineReceived('226 Transfer Complete.')
return d
def test_failedLIST(self):
"""
Test a failure in LIST command.
L{ftp.FTPClient.list} should return a Deferred which fails with
L{ftp.CommandFailed} if the server indicates the indicated path is
invalid for some reason.
"""
def cbConnect(host, port, factory):
self.assertEqual(host, '127.0.0.1')
self.assertEqual(port, 12345)
proto = factory.buildProtocol((host, port))
proto.makeConnection(proto_helpers.StringTransport())
self.client.lineReceived(
'150 File status okay; about to open data connection.')
proto.connectionLost(failure.Failure(error.ConnectionDone("")))
self.client.connectFactory = cbConnect
self._testLogin()
fileList = ftp.FTPFileListProtocol()
d = self.client.list('foo/bar', fileList)
self.assertFailure(d, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'PASV\r\n')
self.transport.clear()
self.client.lineReceived('227 Entering Passive Mode (%s).' %
(ftp.encodeHostPort('127.0.0.1', 12345),))
self.assertEqual(self.transport.value(), 'LIST foo/bar\r\n')
self.client.lineReceived('550 foo/bar: No such file or directory')
return d
def test_NLST(self):
"""
Test the NLST command in non-passive mode.
L{ftp.FTPClient.nlst} should return a Deferred which fires with a
list of filenames when the list command has completed.
"""
self.client.passive = False
def generatePort(portCmd):
portCmd.text = 'PORT %s' % (ftp.encodeHostPort('127.0.0.1', 9876),)
portCmd.protocol.makeConnection(proto_helpers.StringTransport())
self.client.lineReceived(
'150 File status okay; about to open data connection.')
portCmd.protocol.dataReceived('foo\r\n')
portCmd.protocol.dataReceived('bar\r\n')
portCmd.protocol.dataReceived('baz\r\n')
portCmd.protocol.connectionLost(
failure.Failure(error.ConnectionDone("")))
def cbList(res, proto):
fls = proto.buffer.splitlines()
expected = ["foo", "bar", "baz"]
expected.sort()
fls.sort()
self.assertEqual(fls, expected)
self.client.generatePortCommand = generatePort
self._testLogin()
lstproto = _BufferingProtocol()
d = self.client.nlst('foo/bar', lstproto).addCallback(cbList, lstproto)
self.assertEqual(self.transport.value(), 'PORT %s\r\n' %
(ftp.encodeHostPort('127.0.0.1', 9876),))
self.transport.clear()
self.client.lineReceived('200 PORT OK')
self.assertEqual(self.transport.value(), 'NLST foo/bar\r\n')
self.client.lineReceived('226 Transfer Complete.')
return d
def test_passiveNLST(self):
"""
Test the NLST command.
Like L{test_passiveNLST} but in the configuration where the server
establishes the data connection to the client, rather than the other
way around.
"""
def cbList(res, proto):
fls = proto.buffer.splitlines()
expected = ["foo", "bar", "baz"]
expected.sort()
fls.sort()
self.assertEqual(fls, expected)
def cbConnect(host, port, factory):
self.assertEqual(host, '127.0.0.1')
self.assertEqual(port, 12345)
proto = factory.buildProtocol((host, port))
proto.makeConnection(proto_helpers.StringTransport())
self.client.lineReceived(
'150 File status okay; about to open data connection.')
proto.dataReceived('foo\r\n')
proto.dataReceived('bar\r\n')
proto.dataReceived('baz\r\n')
proto.connectionLost(failure.Failure(error.ConnectionDone("")))
self.client.connectFactory = cbConnect
self._testLogin()
lstproto = _BufferingProtocol()
d = self.client.nlst('foo/bar', lstproto).addCallback(cbList, lstproto)
self.assertEqual(self.transport.value(), 'PASV\r\n')
self.transport.clear()
self.client.lineReceived('227 Entering Passive Mode (%s).' %
(ftp.encodeHostPort('127.0.0.1', 12345),))
self.assertEqual(self.transport.value(), 'NLST foo/bar\r\n')
self.client.lineReceived('226 Transfer Complete.')
return d
def test_failedNLST(self):
"""
Test a failure in NLST command.
L{ftp.FTPClient.nlst} should return a Deferred which fails with
L{ftp.CommandFailed} if the server indicates the indicated path is
invalid for some reason.
"""
tr = proto_helpers.StringTransport()
def cbConnect(host, port, factory):
self.assertEqual(host, '127.0.0.1')
self.assertEqual(port, 12345)
proto = factory.buildProtocol((host, port))
proto.makeConnection(tr)
self.client.lineReceived(
'150 File status okay; about to open data connection.')
proto.connectionLost(failure.Failure(error.ConnectionDone("")))
self.client.connectFactory = cbConnect
self._testLogin()
lstproto = _BufferingProtocol()
d = self.client.nlst('foo/bar', lstproto)
self.assertFailure(d, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'PASV\r\n')
self.transport.clear()
self.client.lineReceived('227 Entering Passive Mode (%s).' %
(ftp.encodeHostPort('127.0.0.1', 12345),))
self.assertEqual(self.transport.value(), 'NLST foo/bar\r\n')
self.client.lineReceived('550 foo/bar: No such file or directory')
return d
def test_renameFromTo(self):
"""
L{ftp.FTPClient.rename} issues I{RNTO} and I{RNFR} commands and returns
a L{Deferred} which fires when a file has successfully been renamed.
"""
self._testLogin()
d = self.client.rename("/spam", "/ham")
self.assertEqual(self.transport.value(), 'RNFR /spam\r\n')
self.transport.clear()
fromResponse = (
'350 Requested file action pending further information.\r\n')
self.client.lineReceived(fromResponse)
self.assertEqual(self.transport.value(), 'RNTO /ham\r\n')
toResponse = (
'250 Requested File Action Completed OK')
self.client.lineReceived(toResponse)
d.addCallback(self.assertEqual, ([fromResponse], [toResponse]))
return d
def test_renameFromToEscapesPaths(self):
"""
L{ftp.FTPClient.rename} issues I{RNTO} and I{RNFR} commands with paths
escaped according to U{http://cr.yp.to/ftp/filesystem.html}.
"""
self._testLogin()
fromFile = "/foo/ba\nr/baz"
toFile = "/qu\nux"
self.client.rename(fromFile, toFile)
self.client.lineReceived("350 ")
self.client.lineReceived("250 ")
self.assertEqual(
self.transport.value(),
"RNFR /foo/ba\x00r/baz\r\n"
"RNTO /qu\x00ux\r\n")
def test_renameFromToFailingOnFirstError(self):
"""
The L{Deferred} returned by L{ftp.FTPClient.rename} is errbacked with
L{CommandFailed} if the I{RNFR} command receives an error response code
(for example, because the file does not exist).
"""
self._testLogin()
d = self.client.rename("/spam", "/ham")
self.assertEqual(self.transport.value(), 'RNFR /spam\r\n')
self.transport.clear()
self.client.lineReceived('550 Requested file unavailable.\r\n')
# The RNTO should not execute since the RNFR failed.
self.assertEqual(self.transport.value(), '')
return self.assertFailure(d, ftp.CommandFailed)
def test_renameFromToFailingOnRenameTo(self):
"""
The L{Deferred} returned by L{ftp.FTPClient.rename} is errbacked with
L{CommandFailed} if the I{RNTO} command receives an error response code
(for example, because the destination directory does not exist).
"""
self._testLogin()
d = self.client.rename("/spam", "/ham")
self.assertEqual(self.transport.value(), 'RNFR /spam\r\n')
self.transport.clear()
self.client.lineReceived('350 Requested file action pending further information.\r\n')
self.assertEqual(self.transport.value(), 'RNTO /ham\r\n')
self.client.lineReceived('550 Requested file unavailable.\r\n')
return self.assertFailure(d, ftp.CommandFailed)
def test_makeDirectory(self):
"""
L{ftp.FTPClient.makeDirectory} issues a I{MKD} command and returns a
L{Deferred} which is called back with the server's response if the
directory is created.
"""
self._testLogin()
d = self.client.makeDirectory("/spam")
self.assertEqual(self.transport.value(), 'MKD /spam\r\n')
self.client.lineReceived('257 "/spam" created.')
return d.addCallback(self.assertEqual, ['257 "/spam" created.'])
def test_makeDirectoryPathEscape(self):
"""
L{ftp.FTPClient.makeDirectory} escapes the path name it sends according
to U{http://cr.yp.to/ftp/filesystem.html}.
"""
self._testLogin()
d = self.client.makeDirectory("/sp\nam")
self.assertEqual(self.transport.value(), 'MKD /sp\x00am\r\n')
# This is necessary to make the Deferred fire. The Deferred needs
# to fire so that tearDown doesn't cause it to errback and fail this
# or (more likely) a later test.
self.client.lineReceived('257 win')
return d
def test_failedMakeDirectory(self):
"""
L{ftp.FTPClient.makeDirectory} returns a L{Deferred} which is errbacked
with L{CommandFailed} if the server returns an error response code.
"""
self._testLogin()
d = self.client.makeDirectory("/spam")
self.assertEqual(self.transport.value(), 'MKD /spam\r\n')
self.client.lineReceived('550 PERMISSION DENIED')
return self.assertFailure(d, ftp.CommandFailed)
def test_getDirectory(self):
"""
Test the getDirectory method.
L{ftp.FTPClient.getDirectory} should return a Deferred which fires with
the current directory on the server. It wraps PWD command.
"""
def cbGet(res):
self.assertEqual(res, "/bar/baz")
self._testLogin()
d = self.client.getDirectory().addCallback(cbGet)
self.assertEqual(self.transport.value(), 'PWD\r\n')
self.client.lineReceived('257 "/bar/baz"')
return d
def test_failedGetDirectory(self):
"""
Test a failure in getDirectory method.
The behaviour should be the same as PWD.
"""
self._testLogin()
d = self.client.getDirectory()
self.assertFailure(d, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'PWD\r\n')
self.client.lineReceived('550 /bar/baz: No such file or directory')
return d
def test_anotherFailedGetDirectory(self):
"""
Test a different failure in getDirectory method.
The response should be quoted to be parsed, so it returns an error
otherwise.
"""
self._testLogin()
d = self.client.getDirectory()
self.assertFailure(d, ftp.CommandFailed)
self.assertEqual(self.transport.value(), 'PWD\r\n')
self.client.lineReceived('257 /bar/baz')
return d
def test_removeFile(self):
"""
L{ftp.FTPClient.removeFile} sends a I{DELE} command to the server for
the indicated file and returns a Deferred which fires after the server
sends a 250 response code.
"""
self._testLogin()
d = self.client.removeFile("/tmp/test")
self.assertEqual(self.transport.value(), 'DELE /tmp/test\r\n')
response = '250 Requested file action okay, completed.'
self.client.lineReceived(response)
return d.addCallback(self.assertEqual, [response])
def test_failedRemoveFile(self):
"""
If the server returns a response code other than 250 in response to a
I{DELE} sent by L{ftp.FTPClient.removeFile}, the L{Deferred} returned
by C{removeFile} is errbacked with a L{Failure} wrapping a
L{CommandFailed}.
"""
self._testLogin()
d = self.client.removeFile("/tmp/test")
self.assertEqual(self.transport.value(), 'DELE /tmp/test\r\n')
response = '501 Syntax error in parameters or arguments.'
self.client.lineReceived(response)
d = self.assertFailure(d, ftp.CommandFailed)
d.addCallback(lambda exc: self.assertEqual(exc.args, ([response],)))
return d
def test_unparsableRemoveFileResponse(self):
"""
If the server returns a response line which cannot be parsed, the
L{Deferred} returned by L{ftp.FTPClient.removeFile} is errbacked with a
L{BadResponse} containing the response.
"""
self._testLogin()
d = self.client.removeFile("/tmp/test")
response = '765 blah blah blah'
self.client.lineReceived(response)
d = self.assertFailure(d, ftp.BadResponse)
d.addCallback(lambda exc: self.assertEqual(exc.args, ([response],)))
return d
def test_multilineRemoveFileResponse(self):
"""
If the server returns multiple response lines, the L{Deferred} returned
by L{ftp.FTPClient.removeFile} is still fired with a true value if the
ultimate response code is 250.
"""
self._testLogin()
d = self.client.removeFile("/tmp/test")
response = ['250-perhaps a progress report',
'250 okay']
map(self.client.lineReceived, response)
return d.addCallback(self.assertTrue)
def test_removeDirectory(self):
"""
L{ftp.FTPClient.removeDirectory} sends a I{RMD} command to the server
for the indicated directory and returns a Deferred which fires after
the server sends a 250 response code.
"""
self._testLogin()
d = self.client.removeDirectory('/tmp/test')
self.assertEqual(self.transport.value(), 'RMD /tmp/test\r\n')
response = '250 Requested file action okay, completed.'
self.client.lineReceived(response)
return d.addCallback(self.assertEqual, [response])
def test_failedRemoveDirectory(self):
"""
If the server returns a response code other than 250 in response to a
I{RMD} sent by L{ftp.FTPClient.removeDirectory}, the L{Deferred}
returned by C{removeDirectory} is errbacked with a L{Failure} wrapping
a L{CommandFailed}.
"""
self._testLogin()
d = self.client.removeDirectory("/tmp/test")
self.assertEqual(self.transport.value(), 'RMD /tmp/test\r\n')
response = '501 Syntax error in parameters or arguments.'
self.client.lineReceived(response)
d = self.assertFailure(d, ftp.CommandFailed)
d.addCallback(lambda exc: self.assertEqual(exc.args, ([response],)))
return d
def test_unparsableRemoveDirectoryResponse(self):
"""
If the server returns a response line which cannot be parsed, the
L{Deferred} returned by L{ftp.FTPClient.removeDirectory} is errbacked
with a L{BadResponse} containing the response.
"""
self._testLogin()
d = self.client.removeDirectory("/tmp/test")
response = '765 blah blah blah'
self.client.lineReceived(response)
d = self.assertFailure(d, ftp.BadResponse)
d.addCallback(lambda exc: self.assertEqual(exc.args, ([response],)))
return d
def test_multilineRemoveDirectoryResponse(self):
"""
If the server returns multiple response lines, the L{Deferred} returned
by L{ftp.FTPClient.removeDirectory} is still fired with a true value
if the ultimate response code is 250.
"""
self._testLogin()
d = self.client.removeDirectory("/tmp/test")
response = ['250-perhaps a progress report',
'250 okay']
map(self.client.lineReceived, response)
return d.addCallback(self.assertTrue)
class FTPClientBasicTests(unittest.TestCase):
def testGreeting(self):
# The first response is captured as a greeting.
ftpClient = ftp.FTPClientBasic()
ftpClient.lineReceived('220 Imaginary FTP.')
self.assertEqual(['220 Imaginary FTP.'], ftpClient.greeting)
def testResponseWithNoMessage(self):
# Responses with no message are still valid, i.e. three digits followed
# by a space is complete response.
ftpClient = ftp.FTPClientBasic()
ftpClient.lineReceived('220 ')
self.assertEqual(['220 '], ftpClient.greeting)
def testMultilineResponse(self):
ftpClient = ftp.FTPClientBasic()
ftpClient.transport = proto_helpers.StringTransport()
ftpClient.lineReceived('220 Imaginary FTP.')
# Queue (and send) a dummy command, and set up a callback to capture the
# result
deferred = ftpClient.queueStringCommand('BLAH')
result = []
deferred.addCallback(result.append)
deferred.addErrback(self.fail)
# Send the first line of a multiline response.
ftpClient.lineReceived('210-First line.')
self.assertEqual([], result)
# Send a second line, again prefixed with "nnn-".
ftpClient.lineReceived('123-Second line.')
self.assertEqual([], result)
# Send a plain line of text, no prefix.
ftpClient.lineReceived('Just some text.')
self.assertEqual([], result)
# Now send a short (less than 4 chars) line.
ftpClient.lineReceived('Hi')
self.assertEqual([], result)
# Now send an empty line.
ftpClient.lineReceived('')
self.assertEqual([], result)
# And a line with 3 digits in it, and nothing else.
ftpClient.lineReceived('321')
self.assertEqual([], result)
# Now finish it.
ftpClient.lineReceived('210 Done.')
self.assertEqual(
['210-First line.',
'123-Second line.',
'Just some text.',
'Hi',
'',
'321',
'210 Done.'], result[0])
def test_noPasswordGiven(self):
"""
Passing None as the password avoids sending the PASS command.
"""
# Create a client, and give it a greeting.
ftpClient = ftp.FTPClientBasic()
ftpClient.transport = proto_helpers.StringTransport()
ftpClient.lineReceived('220 Welcome to Imaginary FTP.')
# Queue a login with no password
ftpClient.queueLogin('bob', None)
self.assertEqual('USER bob\r\n', ftpClient.transport.value())
# Clear the test buffer, acknowledge the USER command.
ftpClient.transport.clear()
ftpClient.lineReceived('200 Hello bob.')
# The client shouldn't have sent anything more (i.e. it shouldn't have
# sent a PASS command).
self.assertEqual('', ftpClient.transport.value())
def test_noPasswordNeeded(self):
"""
Receiving a 230 response to USER prevents PASS from being sent.
"""
# Create a client, and give it a greeting.
ftpClient = ftp.FTPClientBasic()
ftpClient.transport = proto_helpers.StringTransport()
ftpClient.lineReceived('220 Welcome to Imaginary FTP.')
# Queue a login with no password
ftpClient.queueLogin('bob', 'secret')
self.assertEqual('USER bob\r\n', ftpClient.transport.value())
# Clear the test buffer, acknowledge the USER command with a 230
# response code.
ftpClient.transport.clear()
ftpClient.lineReceived('230 Hello bob. No password needed.')
# The client shouldn't have sent anything more (i.e. it shouldn't have
# sent a PASS command).
self.assertEqual('', ftpClient.transport.value())
class PathHandling(unittest.TestCase):
def testNormalizer(self):
for inp, outp in [('a', ['a']),
('/a', ['a']),
('/', []),
('a/b/c', ['a', 'b', 'c']),
('/a/b/c', ['a', 'b', 'c']),
('/a/', ['a']),
('a/', ['a'])]:
self.assertEqual(ftp.toSegments([], inp), outp)
for inp, outp in [('b', ['a', 'b']),
('b/', ['a', 'b']),
('/b', ['b']),
('/b/', ['b']),
('b/c', ['a', 'b', 'c']),
('b/c/', ['a', 'b', 'c']),
('/b/c', ['b', 'c']),
('/b/c/', ['b', 'c'])]:
self.assertEqual(ftp.toSegments(['a'], inp), outp)
for inp, outp in [('//', []),
('//a', ['a']),
('a//', ['a']),
('a//b', ['a', 'b'])]:
self.assertEqual(ftp.toSegments([], inp), outp)
for inp, outp in [('//', []),
('//b', ['b']),
('b//c', ['a', 'b', 'c'])]:
self.assertEqual(ftp.toSegments(['a'], inp), outp)
for inp, outp in [('..', []),
('../', []),
('a/..', ['x']),
('/a/..', []),
('/a/b/..', ['a']),
('/a/b/../', ['a']),
('/a/b/../c', ['a', 'c']),
('/a/b/../c/', ['a', 'c']),
('/a/b/../../c', ['c']),
('/a/b/../../c/', ['c']),
('/a/b/../../c/..', []),
('/a/b/../../c/../', [])]:
self.assertEqual(ftp.toSegments(['x'], inp), outp)
for inp in ['..', '../', 'a/../..', 'a/../../',
'/..', '/../', '/a/../..', '/a/../../',
'/a/b/../../..']:
self.assertRaises(ftp.InvalidPath, ftp.toSegments, [], inp)
for inp in ['../..', '../../', '../a/../..']:
self.assertRaises(ftp.InvalidPath, ftp.toSegments, ['x'], inp)
class IsGlobbingExpressionTests(unittest.TestCase):
"""
Tests for _isGlobbingExpression utility function.
"""
def test_isGlobbingExpressionEmptySegments(self):
"""
_isGlobbingExpression will return False for None, or empty
segments.
"""
self.assertFalse(ftp._isGlobbingExpression())
self.assertFalse(ftp._isGlobbingExpression([]))
self.assertFalse(ftp._isGlobbingExpression(None))
def test_isGlobbingExpressionNoGlob(self):
"""
_isGlobbingExpression will return False for plain segments.
Also, it only checks the last segment part (filename) and will not
check the path name.
"""
self.assertFalse(ftp._isGlobbingExpression(['ignore', 'expr']))
self.assertFalse(ftp._isGlobbingExpression(['*.txt', 'expr']))
def test_isGlobbingExpressionGlob(self):
"""
_isGlobbingExpression will return True for segments which contains
globbing characters in the last segment part (filename).
"""
self.assertTrue(ftp._isGlobbingExpression(['ignore', '*.txt']))
self.assertTrue(ftp._isGlobbingExpression(['ignore', '[a-b].txt']))
self.assertTrue(ftp._isGlobbingExpression(['ignore', 'fil?.txt']))
class BaseFTPRealmTests(unittest.TestCase):
"""
Tests for L{ftp.BaseFTPRealm}, a base class to help define L{IFTPShell}
realms with different user home directory policies.
"""
def test_interface(self):
"""
L{ftp.BaseFTPRealm} implements L{IRealm}.
"""
self.assertTrue(verifyClass(IRealm, ftp.BaseFTPRealm))
def test_getHomeDirectory(self):
"""
L{ftp.BaseFTPRealm} calls its C{getHomeDirectory} method with the
avatarId being requested to determine the home directory for that
avatar.
"""
result = filepath.FilePath(self.mktemp())
avatars = []
class TestRealm(ftp.BaseFTPRealm):
def getHomeDirectory(self, avatarId):
avatars.append(avatarId)
return result
realm = TestRealm(self.mktemp())
iface, avatar, logout = realm.requestAvatar(
"alice@example.com", None, ftp.IFTPShell)
self.assertIsInstance(avatar, ftp.FTPShell)
self.assertEqual(avatar.filesystemRoot, result)
def test_anonymous(self):
"""
L{ftp.BaseFTPRealm} returns an L{ftp.FTPAnonymousShell} instance for
anonymous avatar requests.
"""
anonymous = self.mktemp()
realm = ftp.BaseFTPRealm(anonymous)
iface, avatar, logout = realm.requestAvatar(
checkers.ANONYMOUS, None, ftp.IFTPShell)
self.assertIsInstance(avatar, ftp.FTPAnonymousShell)
self.assertEqual(avatar.filesystemRoot, filepath.FilePath(anonymous))
def test_notImplemented(self):
"""
L{ftp.BaseFTPRealm.getHomeDirectory} should be overridden by a subclass
and raises L{NotImplementedError} if it is not.
"""
realm = ftp.BaseFTPRealm(self.mktemp())
self.assertRaises(NotImplementedError, realm.getHomeDirectory, object())
class FTPRealmTestCase(unittest.TestCase):
"""
Tests for L{ftp.FTPRealm}.
"""
def test_getHomeDirectory(self):
"""
L{ftp.FTPRealm} accepts an extra directory to its initializer and treats
the avatarId passed to L{ftp.FTPRealm.getHomeDirectory} as a single path
segment to construct a child of that directory.
"""
base = '/path/to/home'
realm = ftp.FTPRealm(self.mktemp(), base)
home = realm.getHomeDirectory('alice@example.com')
self.assertEqual(
filepath.FilePath(base).child('alice@example.com'), home)
def test_defaultHomeDirectory(self):
"""
If no extra directory is passed to L{ftp.FTPRealm}, it uses C{"/home"}
as the base directory containing all user home directories.
"""
realm = ftp.FTPRealm(self.mktemp())
home = realm.getHomeDirectory('alice@example.com')
self.assertEqual(filepath.FilePath('/home/alice@example.com'), home)
class SystemFTPRealmTests(unittest.TestCase):
"""
Tests for L{ftp.SystemFTPRealm}.
"""
skip = nonPOSIXSkip
def test_getHomeDirectory(self):
"""
L{ftp.SystemFTPRealm.getHomeDirectory} treats the avatarId passed to it
as a username in the underlying platform and returns that account's home
directory.
"""
# Try to pick a username that will have a home directory.
user = getpass.getuser()
# Try to find their home directory in a different way than used by the
# implementation. Maybe this is silly and can only introduce spurious
# failures due to system-specific configurations.
import pwd
expected = pwd.getpwnam(user).pw_dir
realm = ftp.SystemFTPRealm(self.mktemp())
home = realm.getHomeDirectory(user)
self.assertEqual(home, filepath.FilePath(expected))
def test_noSuchUser(self):
"""
L{ftp.SystemFTPRealm.getHomeDirectory} raises L{UnauthorizedLogin} when
passed a username which has no corresponding home directory in the
system's accounts database.
"""
user = insecureRandom(4).encode('hex')
realm = ftp.SystemFTPRealm(self.mktemp())
self.assertRaises(UnauthorizedLogin, realm.getHomeDirectory, user)
class ErrnoToFailureTestCase(unittest.TestCase):
"""
Tests for L{ftp.errnoToFailure} errno checking.
"""
def test_notFound(self):
"""
C{errno.ENOENT} should be translated to L{ftp.FileNotFoundError}.
"""
d = ftp.errnoToFailure(errno.ENOENT, "foo")
return self.assertFailure(d, ftp.FileNotFoundError)
def test_permissionDenied(self):
"""
C{errno.EPERM} should be translated to L{ftp.PermissionDeniedError}.
"""
d = ftp.errnoToFailure(errno.EPERM, "foo")
return self.assertFailure(d, ftp.PermissionDeniedError)
def test_accessDenied(self):
"""
C{errno.EACCES} should be translated to L{ftp.PermissionDeniedError}.
"""
d = ftp.errnoToFailure(errno.EACCES, "foo")
return self.assertFailure(d, ftp.PermissionDeniedError)
def test_notDirectory(self):
"""
C{errno.ENOTDIR} should be translated to L{ftp.IsNotADirectoryError}.
"""
d = ftp.errnoToFailure(errno.ENOTDIR, "foo")
return self.assertFailure(d, ftp.IsNotADirectoryError)
def test_fileExists(self):
"""
C{errno.EEXIST} should be translated to L{ftp.FileExistsError}.
"""
d = ftp.errnoToFailure(errno.EEXIST, "foo")
return self.assertFailure(d, ftp.FileExistsError)
def test_isDirectory(self):
"""
C{errno.EISDIR} should be translated to L{ftp.IsADirectoryError}.
"""
d = ftp.errnoToFailure(errno.EISDIR, "foo")
return self.assertFailure(d, ftp.IsADirectoryError)
def test_passThrough(self):
"""
If an unknown errno is passed to L{ftp.errnoToFailure}, it should let
the originating exception pass through.
"""
try:
raise RuntimeError("bar")
except:
d = ftp.errnoToFailure(-1, "foo")
return self.assertFailure(d, RuntimeError)
class AnonymousFTPShellTestCase(unittest.TestCase):
"""
Test anonymous shell properties.
"""
def test_anonymousWrite(self):
"""
Check that L{ftp.FTPAnonymousShell} returns an error when trying to
open it in write mode.
"""
shell = ftp.FTPAnonymousShell('')
d = shell.openForWriting(('foo',))
self.assertFailure(d, ftp.PermissionDeniedError)
return d
class IFTPShellTestsMixin:
"""
Generic tests for the C{IFTPShell} interface.
"""
def directoryExists(self, path):
"""
Test if the directory exists at C{path}.
@param path: the relative path to check.
@type path: C{str}.
@return: C{True} if C{path} exists and is a directory, C{False} if
it's not the case
@rtype: C{bool}
"""
raise NotImplementedError()
def createDirectory(self, path):
"""
Create a directory in C{path}.
@param path: the relative path of the directory to create, with one
segment.
@type path: C{str}
"""
raise NotImplementedError()
def fileExists(self, path):
"""
Test if the file exists at C{path}.
@param path: the relative path to check.
@type path: C{str}.
@return: C{True} if C{path} exists and is a file, C{False} if it's not
the case.
@rtype: C{bool}
"""
raise NotImplementedError()
def createFile(self, path, fileContent=''):
"""
Create a file named C{path} with some content.
@param path: the relative path of the file to create, without
directory.
@type path: C{str}
@param fileContent: the content of the file.
@type fileContent: C{str}
"""
raise NotImplementedError()
def test_createDirectory(self):
"""
C{directoryExists} should report correctly about directory existence,
and C{createDirectory} should create a directory detectable by
C{directoryExists}.
"""
self.assertFalse(self.directoryExists('bar'))
self.createDirectory('bar')
self.assertTrue(self.directoryExists('bar'))
def test_createFile(self):
"""
C{fileExists} should report correctly about file existence, and
C{createFile} should create a file detectable by C{fileExists}.
"""
self.assertFalse(self.fileExists('file.txt'))
self.createFile('file.txt')
self.assertTrue(self.fileExists('file.txt'))
def test_makeDirectory(self):
"""
Create a directory and check it ends in the filesystem.
"""
d = self.shell.makeDirectory(('foo',))
def cb(result):
self.assertTrue(self.directoryExists('foo'))
return d.addCallback(cb)
def test_makeDirectoryError(self):
"""
Creating a directory that already exists should fail with a
C{ftp.FileExistsError}.
"""
self.createDirectory('foo')
d = self.shell.makeDirectory(('foo',))
return self.assertFailure(d, ftp.FileExistsError)
def test_removeDirectory(self):
"""
Try to remove a directory and check it's removed from the filesystem.
"""
self.createDirectory('bar')
d = self.shell.removeDirectory(('bar',))
def cb(result):
self.assertFalse(self.directoryExists('bar'))
return d.addCallback(cb)
def test_removeDirectoryOnFile(self):
"""
removeDirectory should not work in file and fail with a
C{ftp.IsNotADirectoryError}.
"""
self.createFile('file.txt')
d = self.shell.removeDirectory(('file.txt',))
return self.assertFailure(d, ftp.IsNotADirectoryError)
def test_removeNotExistingDirectory(self):
"""
Removing directory that doesn't exist should fail with a
C{ftp.FileNotFoundError}.
"""
d = self.shell.removeDirectory(('bar',))
return self.assertFailure(d, ftp.FileNotFoundError)
def test_removeFile(self):
"""
Try to remove a file and check it's removed from the filesystem.
"""
self.createFile('file.txt')
d = self.shell.removeFile(('file.txt',))
def cb(res):
self.assertFalse(self.fileExists('file.txt'))
d.addCallback(cb)
return d
def test_removeFileOnDirectory(self):
"""
removeFile should not work on directory.
"""
self.createDirectory('ned')
d = self.shell.removeFile(('ned',))
return self.assertFailure(d, ftp.IsADirectoryError)
def test_removeNotExistingFile(self):
"""
Try to remove a non existent file, and check it raises a
L{ftp.FileNotFoundError}.
"""
d = self.shell.removeFile(('foo',))
return self.assertFailure(d, ftp.FileNotFoundError)
def test_list(self):
"""
Check the output of the list method.
"""
self.createDirectory('ned')
self.createFile('file.txt')
d = self.shell.list(('.',))
def cb(l):
l.sort()
self.assertEqual(l,
[('file.txt', []), ('ned', [])])
return d.addCallback(cb)
def test_listWithStat(self):
"""
Check the output of list with asked stats.
"""
self.createDirectory('ned')
self.createFile('file.txt')
d = self.shell.list(('.',), ('size', 'permissions',))
def cb(l):
l.sort()
self.assertEqual(len(l), 2)
self.assertEqual(l[0][0], 'file.txt')
self.assertEqual(l[1][0], 'ned')
# Size and permissions are reported differently between platforms
# so just check they are present
self.assertEqual(len(l[0][1]), 2)
self.assertEqual(len(l[1][1]), 2)
return d.addCallback(cb)
def test_listWithInvalidStat(self):
"""
Querying an invalid stat should result to a C{AttributeError}.
"""
self.createDirectory('ned')
d = self.shell.list(('.',), ('size', 'whateverstat',))
return self.assertFailure(d, AttributeError)
def test_listFile(self):
"""
Check the output of the list method on a file.
"""
self.createFile('file.txt')
d = self.shell.list(('file.txt',))
def cb(l):
l.sort()
self.assertEqual(l,
[('file.txt', [])])
return d.addCallback(cb)
def test_listNotExistingDirectory(self):
"""
list on a directory that doesn't exist should fail with a
L{ftp.FileNotFoundError}.
"""
d = self.shell.list(('foo',))
return self.assertFailure(d, ftp.FileNotFoundError)
def test_access(self):
"""
Try to access a resource.
"""
self.createDirectory('ned')
d = self.shell.access(('ned',))
return d
def test_accessNotFound(self):
"""
access should fail on a resource that doesn't exist.
"""
d = self.shell.access(('foo',))
return self.assertFailure(d, ftp.FileNotFoundError)
def test_openForReading(self):
"""
Check that openForReading returns an object providing C{ftp.IReadFile}.
"""
self.createFile('file.txt')
d = self.shell.openForReading(('file.txt',))
def cb(res):
self.assertTrue(ftp.IReadFile.providedBy(res))
d.addCallback(cb)
return d
def test_openForReadingNotFound(self):
"""
openForReading should fail with a C{ftp.FileNotFoundError} on a file
that doesn't exist.
"""
d = self.shell.openForReading(('ned',))
return self.assertFailure(d, ftp.FileNotFoundError)
def test_openForReadingOnDirectory(self):
"""
openForReading should not work on directory.
"""
self.createDirectory('ned')
d = self.shell.openForReading(('ned',))
return self.assertFailure(d, ftp.IsADirectoryError)
def test_openForWriting(self):
"""
Check that openForWriting returns an object providing C{ftp.IWriteFile}.
"""
d = self.shell.openForWriting(('foo',))
def cb1(res):
self.assertTrue(ftp.IWriteFile.providedBy(res))
return res.receive().addCallback(cb2)
def cb2(res):
self.assertTrue(IConsumer.providedBy(res))
d.addCallback(cb1)
return d
def test_openForWritingExistingDirectory(self):
"""
openForWriting should not be able to open a directory that already
exists.
"""
self.createDirectory('ned')
d = self.shell.openForWriting(('ned',))
return self.assertFailure(d, ftp.IsADirectoryError)
def test_openForWritingInNotExistingDirectory(self):
"""
openForWring should fail with a L{ftp.FileNotFoundError} if you specify
a file in a directory that doesn't exist.
"""
self.createDirectory('ned')
d = self.shell.openForWriting(('ned', 'idonotexist', 'foo'))
return self.assertFailure(d, ftp.FileNotFoundError)
def test_statFile(self):
"""
Check the output of the stat method on a file.
"""
fileContent = 'wobble\n'
self.createFile('file.txt', fileContent)
d = self.shell.stat(('file.txt',), ('size', 'directory'))
def cb(res):
self.assertEqual(res[0], len(fileContent))
self.assertFalse(res[1])
d.addCallback(cb)
return d
def test_statDirectory(self):
"""
Check the output of the stat method on a directory.
"""
self.createDirectory('ned')
d = self.shell.stat(('ned',), ('size', 'directory'))
def cb(res):
self.assertTrue(res[1])
d.addCallback(cb)
return d
def test_statOwnerGroup(self):
"""
Check the owner and groups stats.
"""
self.createDirectory('ned')
d = self.shell.stat(('ned',), ('owner', 'group'))
def cb(res):
self.assertEqual(len(res), 2)
d.addCallback(cb)
return d
def test_statHardlinksNotImplemented(self):
"""
If L{twisted.python.filepath.FilePath.getNumberOfHardLinks} is not
implemented, the number returned is 0
"""
pathFunc = self.shell._path
def raiseNotImplemented():
raise NotImplementedError
def notImplementedFilePath(path):
f = pathFunc(path)
f.getNumberOfHardLinks = raiseNotImplemented
return f
self.shell._path = notImplementedFilePath
self.createDirectory('ned')
d = self.shell.stat(('ned',), ('hardlinks',))
self.assertEqual(self.successResultOf(d), [0])
def test_statOwnerGroupNotImplemented(self):
"""
If L{twisted.python.filepath.FilePath.getUserID} or
L{twisted.python.filepath.FilePath.getGroupID} are not implemented,
the owner returned is "0" and the group is returned as "0"
"""
pathFunc = self.shell._path
def raiseNotImplemented():
raise NotImplementedError
def notImplementedFilePath(path):
f = pathFunc(path)
f.getUserID = raiseNotImplemented
f.getGroupID = raiseNotImplemented
return f
self.shell._path = notImplementedFilePath
self.createDirectory('ned')
d = self.shell.stat(('ned',), ('owner', 'group'))
self.assertEqual(self.successResultOf(d), ["0", '0'])
def test_statNotExisting(self):
"""
stat should fail with L{ftp.FileNotFoundError} on a file that doesn't
exist.
"""
d = self.shell.stat(('foo',), ('size', 'directory'))
return self.assertFailure(d, ftp.FileNotFoundError)
def test_invalidStat(self):
"""
Querying an invalid stat should result to a C{AttributeError}.
"""
self.createDirectory('ned')
d = self.shell.stat(('ned',), ('size', 'whateverstat'))
return self.assertFailure(d, AttributeError)
def test_rename(self):
"""
Try to rename a directory.
"""
self.createDirectory('ned')
d = self.shell.rename(('ned',), ('foo',))
def cb(res):
self.assertTrue(self.directoryExists('foo'))
self.assertFalse(self.directoryExists('ned'))
return d.addCallback(cb)
def test_renameNotExisting(self):
"""
Renaming a directory that doesn't exist should fail with
L{ftp.FileNotFoundError}.
"""
d = self.shell.rename(('foo',), ('bar',))
return self.assertFailure(d, ftp.FileNotFoundError)
class FTPShellTestCase(unittest.TestCase, IFTPShellTestsMixin):
"""
Tests for the C{ftp.FTPShell} object.
"""
def setUp(self):
"""
Create a root directory and instantiate a shell.
"""
self.root = filepath.FilePath(self.mktemp())
self.root.createDirectory()
self.shell = ftp.FTPShell(self.root)
def directoryExists(self, path):
"""
Test if the directory exists at C{path}.
"""
return self.root.child(path).isdir()
def createDirectory(self, path):
"""
Create a directory in C{path}.
"""
return self.root.child(path).createDirectory()
def fileExists(self, path):
"""
Test if the file exists at C{path}.
"""
return self.root.child(path).isfile()
def createFile(self, path, fileContent=''):
"""
Create a file named C{path} with some content.
"""
return self.root.child(path).setContent(fileContent)
class TestConsumer(object):
"""
A simple consumer for tests. It only works with non-streaming producers.
@ivar producer: an object providing
L{twisted.internet.interfaces.IPullProducer}.
"""
implements(IConsumer)
producer = None
def registerProducer(self, producer, streaming):
"""
Simple register of producer, checks that no register has happened
before.
"""
assert self.producer is None
self.buffer = []
self.producer = producer
self.producer.resumeProducing()
def unregisterProducer(self):
"""
Unregister the producer, it should be done after a register.
"""
assert self.producer is not None
self.producer = None
def write(self, data):
"""
Save the data received.
"""
self.buffer.append(data)
self.producer.resumeProducing()
class TestProducer(object):
"""
A dumb producer.
"""
def __init__(self, toProduce, consumer):
"""
@param toProduce: data to write
@type toProduce: C{str}
@param consumer: the consumer of data.
@type consumer: C{IConsumer}
"""
self.toProduce = toProduce
self.consumer = consumer
def start(self):
"""
Send the data to consume.
"""
self.consumer.write(self.toProduce)
class IReadWriteTestsMixin:
"""
Generic tests for the C{IReadFile} and C{IWriteFile} interfaces.
"""
def getFileReader(self, content):
"""
Return an object providing C{IReadFile}, ready to send data C{content}.
"""
raise NotImplementedError()
def getFileWriter(self):
"""
Return an object providing C{IWriteFile}, ready to receive data.
"""
raise NotImplementedError()
def getFileContent(self):
"""
Return the content of the file used.
"""
raise NotImplementedError()
def test_read(self):
"""
Test L{ftp.IReadFile}: the implementation should have a send method
returning a C{Deferred} which fires when all the data has been sent
to the consumer, and the data should be correctly send to the consumer.
"""
content = 'wobble\n'
consumer = TestConsumer()
def cbGet(reader):
return reader.send(consumer).addCallback(cbSend)
def cbSend(res):
self.assertEqual("".join(consumer.buffer), content)
return self.getFileReader(content).addCallback(cbGet)
def test_write(self):
"""
Test L{ftp.IWriteFile}: the implementation should have a receive
method returning a C{Deferred} which fires with a consumer ready to
receive data to be written. It should also have a close() method that
returns a Deferred.
"""
content = 'elbbow\n'
def cbGet(writer):
return writer.receive().addCallback(cbReceive, writer)
def cbReceive(consumer, writer):
producer = TestProducer(content, consumer)
consumer.registerProducer(None, True)
producer.start()
consumer.unregisterProducer()
return writer.close().addCallback(cbClose)
def cbClose(ignored):
self.assertEqual(self.getFileContent(), content)
return self.getFileWriter().addCallback(cbGet)
class FTPReadWriteTestCase(unittest.TestCase, IReadWriteTestsMixin):
"""
Tests for C{ftp._FileReader} and C{ftp._FileWriter}, the objects returned
by the shell in C{openForReading}/C{openForWriting}.
"""
def setUp(self):
"""
Create a temporary file used later.
"""
self.root = filepath.FilePath(self.mktemp())
self.root.createDirectory()
self.shell = ftp.FTPShell(self.root)
self.filename = "file.txt"
def getFileReader(self, content):
"""
Return a C{ftp._FileReader} instance with a file opened for reading.
"""
self.root.child(self.filename).setContent(content)
return self.shell.openForReading((self.filename,))
def getFileWriter(self):
"""
Return a C{ftp._FileWriter} instance with a file opened for writing.
"""
return self.shell.openForWriting((self.filename,))
def getFileContent(self):
"""
Return the content of the temporary file.
"""
return self.root.child(self.filename).getContent()
class CloseTestWriter:
implements(ftp.IWriteFile)
closeStarted = False
def receive(self):
self.s = StringIO()
fc = ftp.FileConsumer(self.s)
return defer.succeed(fc)
def close(self):
self.closeStarted = True
return self.d
class CloseTestShell:
def openForWriting(self, segs):
return defer.succeed(self.writer)
class FTPCloseTest(unittest.TestCase):
"""Tests that the server invokes IWriteFile.close"""
def test_write(self):
"""Confirm that FTP uploads (i.e. ftp_STOR) correctly call and wait
upon the IWriteFile object's close() method"""
f = ftp.FTP()
f.workingDirectory = ["root"]
f.shell = CloseTestShell()
f.shell.writer = CloseTestWriter()
f.shell.writer.d = defer.Deferred()
f.factory = ftp.FTPFactory()
f.factory.timeOut = None
f.makeConnection(StringIO())
di = ftp.DTP()
di.factory = ftp.DTPFactory(f)
f.dtpInstance = di
di.makeConnection(None)#
stor_done = []
d = f.ftp_STOR("path")
d.addCallback(stor_done.append)
# the writer is still receiving data
self.assertFalse(f.shell.writer.closeStarted, "close() called early")
di.dataReceived("some data here")
self.assertFalse(f.shell.writer.closeStarted, "close() called early")
di.connectionLost("reason is ignored")
# now we should be waiting in close()
self.assertTrue(f.shell.writer.closeStarted, "close() not called")
self.assertFalse(stor_done)
f.shell.writer.d.callback("allow close() to finish")
self.assertTrue(stor_done)
return d # just in case an errback occurred
class FTPResponseCodeTests(unittest.TestCase):
"""
Tests relating directly to response codes.
"""
def test_unique(self):
"""
All of the response code globals (for example C{RESTART_MARKER_REPLY} or
C{USR_NAME_OK_NEED_PASS}) have unique values and are present in the
C{RESPONSE} dictionary.
"""
allValues = set(ftp.RESPONSE)
seenValues = set()
for key, value in vars(ftp).items():
if isinstance(value, str) and key.isupper():
self.assertIn(
value, allValues,
"Code %r with value %r missing from RESPONSE dict" % (
key, value))
self.assertNotIn(
value, seenValues,
"Duplicate code %r with value %r" % (key, value))
seenValues.add(value)
| 35.097285
| 94
| 0.600964
|
44fdba74831f16efae82caae5c8ccfdaad9ab1d1
| 3,509
|
py
|
Python
|
stories/migrations/0001_initial.py
|
luterien/madcyoa
|
1af9140717fd00c5c671ccbcd09d75df51dee263
|
[
"Apache-2.0"
] | null | null | null |
stories/migrations/0001_initial.py
|
luterien/madcyoa
|
1af9140717fd00c5c671ccbcd09d75df51dee263
|
[
"Apache-2.0"
] | null | null | null |
stories/migrations/0001_initial.py
|
luterien/madcyoa
|
1af9140717fd00c5c671ccbcd09d75df51dee263
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-14 08:48
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Chapter',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('slug', models.SlugField(max_length=100)),
('text', models.TextField()),
('sort_order', models.IntegerField(default=1)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Choice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('slug', models.SlugField(max_length=100)),
('text', models.TextField()),
('sort_order', models.IntegerField(default=1)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Snippet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('slug', models.SlugField(max_length=100)),
('text', models.TextField()),
('sort_order', models.IntegerField(default=1)),
('is_completed', models.BooleanField(default=False)),
('is_failed', models.BooleanField(default=False)),
('starting_point', models.BooleanField(default=False)),
('checkpoint', models.BooleanField(default=False)),
('chapter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='related_snippets', to='stories.Chapter')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Story',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('slug', models.SlugField(max_length=100)),
('text', models.TextField()),
('sort_order', models.IntegerField(default=1)),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='choice',
name='source',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sources', to='stories.Snippet'),
),
migrations.AddField(
model_name='choice',
name='target',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='targets', to='stories.Snippet'),
),
migrations.AddField(
model_name='chapter',
name='story',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='related_chapters', to='stories.Story'),
),
]
| 38.988889
| 147
| 0.54346
|
13a28d9d54276c55c9677a549c7bb9a1b8c516da
| 12,348
|
py
|
Python
|
doc/source/conf.py
|
vighneshbirodkar/scikit-image
|
766745c0498a82852c3044fb205c9970ba1caee2
|
[
"BSD-3-Clause"
] | null | null | null |
doc/source/conf.py
|
vighneshbirodkar/scikit-image
|
766745c0498a82852c3044fb205c9970ba1caee2
|
[
"BSD-3-Clause"
] | 2
|
2016-01-08T18:30:49.000Z
|
2016-07-21T07:55:29.000Z
|
doc/source/conf.py
|
vighneshbirodkar/scikit-image
|
766745c0498a82852c3044fb205c9970ba1caee2
|
[
"BSD-3-Clause"
] | 2
|
2017-05-09T13:33:37.000Z
|
2018-12-23T10:57:18.000Z
|
# -*- coding: utf-8 -*-
#
# skimage documentation build configuration file, created by
# sphinx-quickstart on Sat Aug 22 13:00:30 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import skimage
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
curpath = os.path.dirname(__file__)
sys.path.append(os.path.join(curpath, '..', 'ext'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.imgmath',
'numpydoc',
'sphinx.ext.autosummary',
'sphinx.ext.intersphinx',
'sphinx.ext.linkcode',
'sphinx_gallery.gen_gallery'
]
autosummary_generate = True
#------------------------------------------------------------------------
# Sphinx-gallery configuration
#------------------------------------------------------------------------
sphinx_gallery_conf = {
'doc_module' : 'skimage',
# path to your examples scripts
'examples_dirs' : '../examples',
# path where to save gallery generated examples
'gallery_dirs' : 'auto_examples',
'mod_example_dir': 'api',
'reference_url' : {
'skimage': None,
'matplotlib': 'http://matplotlib.org',
'numpy': 'http://docs.scipy.org/doc/numpy-1.6.0',
'scipy': 'http://docs.scipy.org/doc/scipy-0.11.0/reference',}
}
# Determine if the matplotlib has a recent enough version of the
# plot_directive, otherwise use the local fork.
try:
from matplotlib.sphinxext import plot_directive
except ImportError:
use_matplotlib_plot_directive = False
else:
try:
use_matplotlib_plot_directive = (plot_directive.__version__ >= 2)
except AttributeError:
use_matplotlib_plot_directive = False
if use_matplotlib_plot_directive:
extensions.append('matplotlib.sphinxext.plot_directive')
else:
extensions.append('plot_directive')
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'skimage'
copyright = '2013, the scikit-image team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
with open('../../skimage/__init__.py') as f:
setup_lines = f.readlines()
version = 'vUndefined'
for l in setup_lines:
if l.startswith('__version__'):
version = l.split("'")[1]
break
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'scikit-image'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = 'skimage v%s docs' % version
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = '_static/favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'**': ['searchbox.html',
'navigation.html',
'localtoc.html',
'versions.html'],
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'scikitimagedoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('contents', 'scikit-image.tex', u'The scikit-image Documentation',
u'scikit-image development team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
latex_preamble = r'''
\usepackage{enumitem}
\setlistdepth{100}
\usepackage{amsmath}
\DeclareUnicodeCharacter{00A0}{\nobreakspace}
% In the parameters section, place a newline after the Parameters header
\usepackage{expdlist}
\let\latexdescription=\description
\def\description{\latexdescription{}{} \breaklabel}
% Make Examples/etc section headers smaller and more compact
\makeatletter
\titleformat{\paragraph}{\normalsize\py@HeaderFamily}%
{\py@TitleColor}{0em}{\py@TitleColor}{\py@NormalColor}
\titlespacing*{\paragraph}{0pt}{1ex}{0pt}
\makeatother
'''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
latex_use_modindex = False
# -----------------------------------------------------------------------------
# Numpy extensions
# -----------------------------------------------------------------------------
numpydoc_show_class_members = False
numpydoc_class_members_toctree = False
# -----------------------------------------------------------------------------
# Plots
# -----------------------------------------------------------------------------
plot_basedir = os.path.join(curpath, "plots")
plot_pre_code = """
import numpy as np
import matplotlib.pyplot as plt
np.random.seed(0)
import matplotlib
matplotlib.rcParams.update({
'font.size': 14,
'axes.titlesize': 12,
'axes.labelsize': 10,
'xtick.labelsize': 8,
'ytick.labelsize': 8,
'legend.fontsize': 10,
'figure.subplot.bottom': 0.2,
'figure.subplot.left': 0.2,
'figure.subplot.right': 0.9,
'figure.subplot.top': 0.85,
'figure.subplot.wspace': 0.4,
'text.usetex': False,
})
"""
plot_include_source = True
plot_formats = [('png', 100), ('pdf', 100)]
plot2rst_index_name = 'README'
plot2rst_rcparams = {'image.cmap' : 'gray',
'image.interpolation' : 'none'}
# -----------------------------------------------------------------------------
# intersphinx
# -----------------------------------------------------------------------------
_python_version_str = '{0.major}.{0.minor}'.format(sys.version_info)
_python_doc_base = 'http://docs.python.org/' + _python_version_str
intersphinx_mapping = {
'python': (_python_doc_base, None),
'numpy': ('http://docs.scipy.org/doc/numpy',
(None, './_intersphinx/numpy-objects.inv')),
'scipy': ('http://docs.scipy.org/doc/scipy/reference',
(None, './_intersphinx/scipy-objects.inv')),
'sklearn': ('http://scikit-learn.org/stable',
(None, './_intersphinx/sklearn-objects.inv')),
'matplotlib': ('http://matplotlib.org/',
(None, 'http://matplotlib.org/objects.inv'))
}
# ----------------------------------------------------------------------------
# Source code links
# ----------------------------------------------------------------------------
import inspect
from os.path import relpath, dirname
# Function courtesy of NumPy to return URLs containing line numbers
def linkcode_resolve(domain, info):
"""
Determine the URL corresponding to Python object
"""
if domain != 'py':
return None
modname = info['module']
fullname = info['fullname']
submod = sys.modules.get(modname)
if submod is None:
return None
obj = submod
for part in fullname.split('.'):
try:
obj = getattr(obj, part)
except:
return None
try:
fn = inspect.getsourcefile(obj)
except:
fn = None
if not fn:
return None
try:
source, lineno = inspect.findsource(obj)
except:
lineno = None
if lineno:
linespec = "#L%d" % (lineno + 1)
else:
linespec = ""
fn = relpath(fn, start=dirname(skimage.__file__))
if 'dev' in skimage.__version__:
return ("http://github.com/scikit-image/scikit-image/blob/"
"master/skimage/%s%s" % (fn, linespec))
else:
return ("http://github.com/scikit-image/scikit-image/blob/"
"v%s/skimage/%s%s" % (skimage.__version__, fn, linespec))
| 31.5
| 80
| 0.642776
|
316f66e4427a5da93f77be6cb560ad3744cd6985
| 1,978
|
py
|
Python
|
observations/api.py
|
dbca-wa/penguins
|
9e107d071cd38d9d05a673a52061753cae458d83
|
[
"Apache-2.0"
] | null | null | null |
observations/api.py
|
dbca-wa/penguins
|
9e107d071cd38d9d05a673a52061753cae458d83
|
[
"Apache-2.0"
] | 8
|
2021-03-31T20:09:47.000Z
|
2022-03-29T22:03:38.000Z
|
observations/api.py
|
dbca-wa/penguins
|
9e107d071cd38d9d05a673a52061753cae458d83
|
[
"Apache-2.0"
] | 3
|
2019-01-14T04:53:40.000Z
|
2019-01-22T01:46:31.000Z
|
from rest_framework import viewsets
from rest_framework import filters
from observations.models import PenguinCount, PenguinObservation, Video
from utils import RetrievePartialUpdateDestroyAPIView
import datetime
class PenguinCountViewSet(viewsets.ReadOnlyModelViewSet):
model = PenguinCount
class PenguinObservationViewSet(viewsets.ModelViewSet):
model = PenguinObservation
class VideoViewSet(viewsets.ModelViewSet):
model = Video
filter_backends = (filters.DjangoFilterBackend,)
filter_fields = ('camera', 'date')
def partial_update(self, request, pk=None):
if 'mark_complete' in request.DATA:
if request.DATA['mark_complete']:
self.get_object().completed_by.add(request.user)
pobs = PenguinObservation.objects.filter(
video=self.get_object(),
observer=request.user) # .update(validated=True)
for obs in pobs:
obs.validated = True
obs.save()
if pobs.count() == 0:
d = self.get_object().date
hour = self.get_object().end_time.hour
observation_date = datetime.datetime(
d.year,
d.month,
d.day,
hour,
0)
p = PenguinObservation(
video=self.get_object(),
observer=request.user,
site=self.get_object().camera.site,
seen=0,
comments="[default]No penguins reported",
validated=True,
date=observation_date)
p.save()
else:
self.get_object().completed_by.remove(request.user)
response = super(VideoViewSet, self).partial_update(request, pk)
return response
| 35.963636
| 72
| 0.548534
|
a4e4a9fc69ec9f755596ceb2ceccbc3f45791696
| 2,169
|
py
|
Python
|
hw/ip/otbn/dv/rig/rig/gens/untaken_branch.py
|
GregAC/opentitan
|
40b607b776d7b10cfc2899cc0d724d00dc0c91a2
|
[
"Apache-2.0"
] | 1,375
|
2019-11-05T15:11:00.000Z
|
2022-03-28T17:50:43.000Z
|
hw/ip/otbn/dv/rig/rig/gens/untaken_branch.py
|
GregAC/opentitan
|
40b607b776d7b10cfc2899cc0d724d00dc0c91a2
|
[
"Apache-2.0"
] | 7,045
|
2019-11-05T16:05:45.000Z
|
2022-03-31T23:08:08.000Z
|
hw/ip/otbn/dv/rig/rig/gens/untaken_branch.py
|
GregAC/opentitan
|
40b607b776d7b10cfc2899cc0d724d00dc0c91a2
|
[
"Apache-2.0"
] | 428
|
2019-11-05T15:00:20.000Z
|
2022-03-28T15:34:57.000Z
|
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
import random
from typing import Optional, List, Tuple
from ..model import Model
from ..program import ProgInsn, Program
from .branch_gen import BranchGen
from shared.insn_yaml import Insn
from ..snippet_gen import GenCont, GenRet
class UntakenBranch(BranchGen):
'''A snippet generator for branches that are not taken.'''
def gen(self,
cont: GenCont,
model: Model,
program: Program) -> Optional[GenRet]:
# Return None if this is the last instruction in the current gap
# because we need to either jump or do an ECALL to avoid getting stuck.
if program.get_insn_space_at(model.pc) <= 1:
return None
return super().gen(cont, model, program)
def pick_offset(self,
min_addr: int,
max_addr: int,
model: Model,
program: Program) -> int:
# The below code picks up aligned and unaligned addresses randomly with
# 50% probability.
tgt_addr = random.randrange(min_addr, max_addr + 1, 2)
return tgt_addr
def pick_second_op(self,
equals: List[int],
not_equals: List[int]) -> Optional[Tuple[int, Insn]]:
if not_equals:
chosen_insn = random.choices(self.insns, weights=self.weights)[0]
else:
chosen_insn = self.bne
bne_weight = self.weights[0]
if not bne_weight:
return None
# Generates instructions having registers with unequal values for beq
# and registers with equal values for bne.
grs2_choices = not_equals if chosen_insn.mnemonic == 'beq' else equals
assert grs2_choices
op_val_grs2 = random.choice(grs2_choices)
return (op_val_grs2, chosen_insn)
def update_for_insn(self,
model: Model,
prog_insn: ProgInsn) -> None:
model.update_for_insn(prog_insn)
model.pc += 4
| 32.863636
| 79
| 0.613186
|
9dca069f2390b06371f04d55ed8307421655900f
| 6,996
|
py
|
Python
|
sympy/unify/core.py
|
qcgm1978/sympy
|
cc46047f4449b525b7b0edd4c634bf93d6e7c83d
|
[
"BSD-3-Clause"
] | 1
|
2021-06-24T09:01:18.000Z
|
2021-06-24T09:01:18.000Z
|
sympy/unify/core.py
|
qcgm1978/sympy
|
cc46047f4449b525b7b0edd4c634bf93d6e7c83d
|
[
"BSD-3-Clause"
] | 3
|
2021-02-28T03:58:40.000Z
|
2021-03-07T06:12:47.000Z
|
sympy/unify/core.py
|
qcgm1978/sympy
|
cc46047f4449b525b7b0edd4c634bf93d6e7c83d
|
[
"BSD-3-Clause"
] | 1
|
2020-08-12T10:51:20.000Z
|
2020-08-12T10:51:20.000Z
|
""" Generic Unification algorithm for expression trees with lists of children
This implementation is a direct translation of
Artificial Intelligence: A Modern Approach by Stuart Russel and Peter Norvig
Second edition, section 9.2, page 276
It is modified in the following ways:
1. We allow associative and commutative Compound expressions. This results in
combinatorial blowup.
2. We explore the tree lazily.
3. We provide generic interfaces to symbolic algebra libraries in Python.
A more traditional version can be found here
http://aima.cs.berkeley.edu/python/logic.html
"""
from sympy.utilities.iterables import kbins
class Compound:
""" A little class to represent an interior node in the tree
This is analogous to SymPy.Basic for non-Atoms
"""
def __init__(self, op, args):
self.op = op
self.args = args
def __eq__(self, other):
return (type(self) == type(other) and self.op == other.op and
self.args == other.args)
def __hash__(self):
return hash((type(self), self.op, self.args))
def __str__(self):
return "%s[%s]" % (str(self.op), ', '.join(map(str, self.args)))
class Variable:
""" A Wild token """
def __init__(self, arg):
self.arg = arg
def __eq__(self, other):
return type(self) == type(other) and self.arg == other.arg
def __hash__(self):
return hash((type(self), self.arg))
def __str__(self):
return "Variable(%s)" % str(self.arg)
class CondVariable:
""" A wild token that matches conditionally
arg - a wild token
valid - an additional constraining function on a match
"""
def __init__(self, arg, valid):
self.arg = arg
self.valid = valid
def __eq__(self, other):
return (type(self) == type(other) and
self.arg == other.arg and
self.valid == other.valid)
def __hash__(self):
return hash((type(self), self.arg, self.valid))
def __str__(self):
return "CondVariable(%s)" % str(self.arg)
def unify(x, y, s=None, **fns):
""" Unify two expressions
Parameters
==========
x, y - expression trees containing leaves, Compounds and Variables
s - a mapping of variables to subtrees
Returns
=======
lazy sequence of mappings {Variable: subtree}
Examples
========
>>> from sympy.unify.core import unify, Compound, Variable
>>> expr = Compound("Add", ("x", "y"))
>>> pattern = Compound("Add", ("x", Variable("a")))
>>> next(unify(expr, pattern, {}))
{Variable(a): 'y'}
"""
s = s or {}
if x == y:
yield s
elif isinstance(x, (Variable, CondVariable)):
yield from unify_var(x, y, s, **fns)
elif isinstance(y, (Variable, CondVariable)):
yield from unify_var(y, x, s, **fns)
elif isinstance(x, Compound) and isinstance(y, Compound):
is_commutative = fns.get('is_commutative', lambda x: False)
is_associative = fns.get('is_associative', lambda x: False)
for sop in unify(x.op, y.op, s, **fns):
if is_associative(x) and is_associative(y):
a, b = (x, y) if len(x.args) < len(y.args) else (y, x)
if is_commutative(x) and is_commutative(y):
combs = allcombinations(a.args, b.args, 'commutative')
else:
combs = allcombinations(a.args, b.args, 'associative')
for aaargs, bbargs in combs:
aa = [unpack(Compound(a.op, arg)) for arg in aaargs]
bb = [unpack(Compound(b.op, arg)) for arg in bbargs]
yield from unify(aa, bb, sop, **fns)
elif len(x.args) == len(y.args):
yield from unify(x.args, y.args, sop, **fns)
elif is_args(x) and is_args(y) and len(x) == len(y):
if len(x) == 0:
yield s
else:
for shead in unify(x[0], y[0], s, **fns):
yield from unify(x[1:], y[1:], shead, **fns)
def unify_var(var, x, s, **fns):
if var in s:
yield from unify(s[var], x, s, **fns)
elif occur_check(var, x):
pass
elif isinstance(var, CondVariable) and var.valid(x):
yield assoc(s, var, x)
elif isinstance(var, Variable):
yield assoc(s, var, x)
def occur_check(var, x):
""" var occurs in subtree owned by x? """
if var == x:
return True
elif isinstance(x, Compound):
return occur_check(var, x.args)
elif is_args(x):
if any(occur_check(var, xi) for xi in x): return True
return False
def assoc(d, key, val):
""" Return copy of d with key associated to val """
d = d.copy()
d[key] = val
return d
def is_args(x):
""" Is x a traditional iterable? """
return type(x) in (tuple, list, set)
def unpack(x):
if isinstance(x, Compound) and len(x.args) == 1:
return x.args[0]
else:
return x
def allcombinations(A, B, ordered):
"""
Restructure A and B to have the same number of elements
ordered must be either 'commutative' or 'associative'
A and B can be rearranged so that the larger of the two lists is
reorganized into smaller sublists.
Examples
========
>>> from sympy.unify.core import allcombinations
>>> for x in allcombinations((1, 2, 3), (5, 6), 'associative'): print(x)
(((1,), (2, 3)), ((5,), (6,)))
(((1, 2), (3,)), ((5,), (6,)))
>>> for x in allcombinations((1, 2, 3), (5, 6), 'commutative'): print(x)
(((1,), (2, 3)), ((5,), (6,)))
(((1, 2), (3,)), ((5,), (6,)))
(((1,), (3, 2)), ((5,), (6,)))
(((1, 3), (2,)), ((5,), (6,)))
(((2,), (1, 3)), ((5,), (6,)))
(((2, 1), (3,)), ((5,), (6,)))
(((2,), (3, 1)), ((5,), (6,)))
(((2, 3), (1,)), ((5,), (6,)))
(((3,), (1, 2)), ((5,), (6,)))
(((3, 1), (2,)), ((5,), (6,)))
(((3,), (2, 1)), ((5,), (6,)))
(((3, 2), (1,)), ((5,), (6,)))
"""
if ordered == "commutative":
ordered = 11
if ordered == "associative":
ordered = None
sm, bg = (A, B) if len(A) < len(B) else (B, A)
for part in kbins(list(range(len(bg))), len(sm), ordered=ordered):
if bg == B:
yield tuple((a,) for a in A), partition(B, part)
else:
yield partition(A, part), tuple((b,) for b in B)
def partition(it, part):
""" Partition a tuple/list into pieces defined by indices
Examples
========
>>> from sympy.unify.core import partition
>>> partition((10, 20, 30, 40), [[0, 1, 2], [3]])
((10, 20, 30), (40,))
"""
return type(it)([index(it, ind) for ind in part])
def index(it, ind):
""" Fancy indexing into an indexable iterable (tuple, list)
Examples
========
>>> from sympy.unify.core import index
>>> index([10, 20, 30], (1, 2, 0))
[20, 30, 10]
"""
return type(it)([it[i] for i in ind])
| 30.155172
| 78
| 0.546598
|
aec99cd978c75cf0662b6527d2bd0665796d11f6
| 2,268
|
py
|
Python
|
ML_model.py
|
MichelKu/Stress_Hospital_Staff
|
027a37ad7483f91b9a465b7dc201d0c91e8723ab
|
[
"CC0-1.0"
] | null | null | null |
ML_model.py
|
MichelKu/Stress_Hospital_Staff
|
027a37ad7483f91b9a465b7dc201d0c91e8723ab
|
[
"CC0-1.0"
] | null | null | null |
ML_model.py
|
MichelKu/Stress_Hospital_Staff
|
027a37ad7483f91b9a465b7dc201d0c91e8723ab
|
[
"CC0-1.0"
] | 1
|
2021-04-08T09:15:00.000Z
|
2021-04-08T09:15:00.000Z
|
# Efter att tidtagit de olika modellerna föll valet på Random forest.
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.preprocessing import StandardScaler
import sqlite3
sc = StandardScaler()
classifier11 = RandomForestClassifier(n_estimators = 150, random_state = 0)
#classifier12 = RandomForestClassifier(n_estimators = 150, random_state = 0)
def train_col11():
con = sqlite3.connect("StressDatabase.db")
dataset = pd.read_sql_query("SELECT * FROM stress", con)
X = dataset.iloc[:, [0,1,2,4,5,7,8,9]].values # change columns here to find out effect.
y = dataset.iloc[:, 10].values # put the dependent varible here.
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size= 0.2, random_state= 0)
X_train = sc.fit_transform(X_train)
classifier11.fit(X_train, y_train)
def predict_col11(age, gender, Specialization, workHours, patientPerDay, overtimeWorkInterest, overtimeWorkPaid, sector):
y_pred = classifier11.predict(sc.transform([[age, gender, sector, Specialization, workHours, patientPerDay, overtimeWorkInterest, overtimeWorkPaid]]))
#print('from inside ML_model col11: ', y_pred)
y_pred = int(y_pred)
return { "col11_predict": y_pred }
# def train_col12():
# con = sqlite3.connect("StressDatabase.db")
# dataset = pd.read_sql_query("SELECT * FROM stress", con)
# X = dataset.iloc[:, [0,1,2,4,5,7,8,9]].values # change columns here to find out effect.
# y = dataset.iloc[:, 11].values # put the dependent varible here.
# X_train, X_test, y_train, y_test = train_test_split(X, y, test_size= 0.2, random_state= 0)
# X_train = sc.fit_transform(X_train)
# classifier12.fit(X_train, y_train)
# def predict_col12(age, gender, Specialization, workHours, patientPerDay, overtimeWorkInterest, overtimeWorkPaid, sector):
# y_pred = classifier12.predict(sc.transform([[age, gender, sector, Specialization, workHours, patientPerDay, overtimeWorkInterest, overtimeWorkPaid]]))
# #print('from inside ML_model col12: ', y_pred)
# y_pred = int(y_pred)
# return{"col12-predict": y_pred}
#train_col11()
#train_col12()
#predict_col11(2,1,6,1,1,1)
# predict_col12(2,2,2,2,2,1)
| 36
| 154
| 0.738977
|
19f702375a952a21c5e7987d48a0f3dff3b73ff9
| 11,195
|
py
|
Python
|
text/nltk/corpus/__init__.py
|
LisaDawn/TextBlob
|
6b76c3f568a3cea16389d6bdb1b3e5a9dc6043e9
|
[
"MIT"
] | 2
|
2015-03-10T14:33:16.000Z
|
2015-03-10T14:33:18.000Z
|
text/nltk/corpus/__init__.py
|
LisaDawn/TextBlob
|
6b76c3f568a3cea16389d6bdb1b3e5a9dc6043e9
|
[
"MIT"
] | null | null | null |
text/nltk/corpus/__init__.py
|
LisaDawn/TextBlob
|
6b76c3f568a3cea16389d6bdb1b3e5a9dc6043e9
|
[
"MIT"
] | null | null | null |
# Natural Language Toolkit: Corpus Readers
#
# Copyright (C) 2001-2013 NLTK Project
# Author: Edward Loper <edloper@gradient.cis.upenn.edu>
# URL: <http://www.nltk.org/>
# For license information, see LICENSE.TXT
# [xx] this docstring isnt' up-to-date!
"""
NLTK corpus readers. The modules in this package provide functions
that can be used to read corpus files in a variety of formats. These
functions can be used to read both the corpus files that are
distributed in the NLTK corpus package, and corpus files that are part
of external corpora.
Available Corpora
=================
Please see http://nltk.googlecode.com/svn/trunk/nltk_data/index.xml
for a complete list. Install corpora using nltk.download().
Corpus Reader Functions
=======================
Each corpus module defines one or more "corpus reader functions",
which can be used to read documents from that corpus. These functions
take an argument, ``item``, which is used to indicate which document
should be read from the corpus:
- If ``item`` is one of the unique identifiers listed in the corpus
module's ``items`` variable, then the corresponding document will
be loaded from the NLTK corpus package.
- If ``item`` is a filename, then that file will be read.
Additionally, corpus reader functions can be given lists of item
names; in which case, they will return a concatenation of the
corresponding documents.
Corpus reader functions are named based on the type of information
they return. Some common examples, and their return types, are:
- words(): list of str
- sents(): list of (list of str)
- paras(): list of (list of (list of str))
- tagged_words(): list of (str,str) tuple
- tagged_sents(): list of (list of (str,str))
- tagged_paras(): list of (list of (list of (str,str)))
- chunked_sents(): list of (Tree w/ (str,str) leaves)
- parsed_sents(): list of (Tree with str leaves)
- parsed_paras(): list of (list of (Tree with str leaves))
- xml(): A single xml ElementTree
- raw(): unprocessed corpus contents
For example, to read a list of the words in the Brown Corpus, use
``nltk.corpus.brown.words()``:
>>> from nltk.corpus import brown
>>> print(", ".join(brown.words()))
The, Fulton, County, Grand, Jury, said, ...
"""
import re
from nltk.tokenize import RegexpTokenizer
from nltk.tag import simplify_brown_tag, simplify_wsj_tag,\
simplify_alpino_tag, simplify_indian_tag,\
simplify_tag
from .util import LazyCorpusLoader
from .reader import *
abc = LazyCorpusLoader(
'abc', PlaintextCorpusReader, r'(?!\.).*\.txt', encoding=[
('science', 'latin_1'),
('rural', 'utf8')])
alpino = LazyCorpusLoader(
'alpino', AlpinoCorpusReader, tag_mapping_function=simplify_alpino_tag)
brown = LazyCorpusLoader(
'brown', CategorizedTaggedCorpusReader, r'c[a-z]\d\d',
cat_file='cats.txt', tag_mapping_function=simplify_brown_tag,
encoding="ascii")
cess_cat = LazyCorpusLoader(
'cess_cat', BracketParseCorpusReader, r'(?!\.).*\.tbf',
tag_mapping_function=simplify_tag, encoding='ISO-8859-2')
cess_esp = LazyCorpusLoader(
'cess_esp', BracketParseCorpusReader, r'(?!\.).*\.tbf',
tag_mapping_function=simplify_tag, encoding='ISO-8859-2')
cmudict = LazyCorpusLoader(
'cmudict', CMUDictCorpusReader, ['cmudict'])
comtrans = LazyCorpusLoader(
'comtrans', AlignedCorpusReader, r'(?!\.).*\.txt')
conll2000 = LazyCorpusLoader(
'conll2000', ConllChunkCorpusReader,
['train.txt', 'test.txt'], ('NP','VP','PP'),
tag_mapping_function=simplify_wsj_tag, encoding='ascii')
conll2002 = LazyCorpusLoader(
'conll2002', ConllChunkCorpusReader, '.*\.(test|train).*',
('LOC', 'PER', 'ORG', 'MISC'), encoding='utf-8')
conll2007 = LazyCorpusLoader(
'conll2007', DependencyCorpusReader, '.*\.(test|train).*', encoding=[
('eus', 'ISO-8859-2'),
('esp', 'utf8')])
dependency_treebank = LazyCorpusLoader(
'dependency_treebank', DependencyCorpusReader, '.*\.dp',
encoding='ascii')
floresta = LazyCorpusLoader(
'floresta', BracketParseCorpusReader, r'(?!\.).*\.ptb', '#',
tag_mapping_function=simplify_tag, encoding='ISO-8859-15')
framenet = LazyCorpusLoader(
'framenet_v15', FramenetCorpusReader, ['frRelation.xml','frameIndex.xml','fulltextIndex.xml','luIndex.xml','semTypes.xml'])
gazetteers = LazyCorpusLoader(
'gazetteers', WordListCorpusReader, r'(?!LICENSE|\.).*\.txt',
encoding='ISO-8859-2')
genesis = LazyCorpusLoader(
'genesis', PlaintextCorpusReader, r'(?!\.).*\.txt', encoding=[
('finnish|french|german', 'latin_1'),
('swedish', 'cp865'),
('.*', 'utf_8')])
gutenberg = LazyCorpusLoader(
'gutenberg', PlaintextCorpusReader, r'(?!\.).*\.txt', encoding='latin1')
# corpus not available with NLTK; these lines caused help(nltk.corpus) to break
#hebrew_treebank = LazyCorpusLoader(
# 'hebrew_treebank', BracketParseCorpusReader, r'.*\.txt')
ieer = LazyCorpusLoader(
'ieer', IEERCorpusReader, r'(?!README|\.).*')
inaugural = LazyCorpusLoader(
'inaugural', PlaintextCorpusReader, r'(?!\.).*\.txt', encoding='latin1')
# [XX] This should probably just use TaggedCorpusReader:
indian = LazyCorpusLoader(
'indian', IndianCorpusReader, r'(?!\.).*\.pos',
tag_mapping_function=simplify_indian_tag,
encoding='utf8')
ipipan = LazyCorpusLoader(
'ipipan', IPIPANCorpusReader, r'(?!\.).*morph\.xml')
jeita = LazyCorpusLoader(
'jeita', ChasenCorpusReader, r'.*\.chasen', encoding='utf-8')
knbc = LazyCorpusLoader(
'knbc/corpus1', KNBCorpusReader, r'.*/KN.*', encoding='euc-jp')
lin_thesaurus = LazyCorpusLoader(
'lin_thesaurus', LinThesaurusCorpusReader, r'.*\.lsp')
mac_morpho = LazyCorpusLoader(
'mac_morpho', MacMorphoCorpusReader, r'(?!\.).*\.txt',
tag_mapping_function=simplify_tag, encoding='latin-1')
machado = LazyCorpusLoader(
'machado', PortugueseCategorizedPlaintextCorpusReader,
r'(?!\.).*\.txt', cat_pattern=r'([a-z]*)/.*', encoding='latin-1')
movie_reviews = LazyCorpusLoader(
'movie_reviews', CategorizedPlaintextCorpusReader,
r'(?!\.).*\.txt', cat_pattern=r'(neg|pos)/.*',
encoding='ascii')
names = LazyCorpusLoader(
'names', WordListCorpusReader, r'(?!\.).*\.txt', encoding='ascii')
nps_chat = LazyCorpusLoader(
'nps_chat', NPSChatCorpusReader, r'(?!README|\.).*\.xml',
tag_mapping_function=simplify_wsj_tag)
pl196x = LazyCorpusLoader(
'pl196x', Pl196xCorpusReader, r'[a-z]-.*\.xml',
cat_file='cats.txt', textid_file='textids.txt', encoding='utf8')
ppattach = LazyCorpusLoader(
'ppattach', PPAttachmentCorpusReader, ['training', 'test', 'devset'])
ptb = LazyCorpusLoader( # Penn Treebank v3: WSJ and Brown portions
'ptb', CategorizedBracketParseCorpusReader, r'(WSJ/\d\d/WSJ_\d\d|BROWN/C[A-Z]/C[A-Z])\d\d.MRG',
cat_file='allcats.txt', tag_mapping_function=simplify_wsj_tag)
qc = LazyCorpusLoader(
'qc', StringCategoryCorpusReader, ['train.txt', 'test.txt'], encoding='ISO-8859-2')
reuters = LazyCorpusLoader(
'reuters', CategorizedPlaintextCorpusReader, '(training|test).*',
cat_file='cats.txt', encoding='ISO-8859-2')
rte = LazyCorpusLoader(
'rte', RTECorpusReader, r'(?!\.).*\.xml')
semcor = LazyCorpusLoader(
'semcor', SemcorCorpusReader, r'brown./tagfiles/br-.*\.xml')
senseval = LazyCorpusLoader(
'senseval', SensevalCorpusReader, r'(?!\.).*\.pos')
shakespeare = LazyCorpusLoader(
'shakespeare', XMLCorpusReader, r'(?!\.).*\.xml')
sinica_treebank = LazyCorpusLoader(
'sinica_treebank', SinicaTreebankCorpusReader, ['parsed'],
tag_mapping_function=simplify_tag, encoding='utf-8')
state_union = LazyCorpusLoader(
'state_union', PlaintextCorpusReader, r'(?!\.).*\.txt',
encoding='ISO-8859-2')
stopwords = LazyCorpusLoader(
'stopwords', WordListCorpusReader, r'(?!README|\.).*', encoding='utf8')
swadesh = LazyCorpusLoader(
'swadesh', SwadeshCorpusReader, r'(?!README|\.).*', encoding='utf8')
switchboard = LazyCorpusLoader(
'switchboard', SwitchboardCorpusReader,
tag_mapping_function=simplify_wsj_tag)
timit = LazyCorpusLoader(
'timit', TimitCorpusReader)
timit_tagged = LazyCorpusLoader(
'timit', TimitTaggedCorpusReader, '.+\.tags',
tag_mapping_function=simplify_wsj_tag, encoding='ascii')
toolbox = LazyCorpusLoader(
'toolbox', ToolboxCorpusReader, r'(?!.*(README|\.)).*\.(dic|txt)')
treebank = LazyCorpusLoader(
'treebank/combined', BracketParseCorpusReader, r'wsj_.*\.mrg',
tag_mapping_function=simplify_wsj_tag, encoding='ascii')
treebank_chunk = LazyCorpusLoader(
'treebank/tagged', ChunkedCorpusReader, r'wsj_.*\.pos',
sent_tokenizer=RegexpTokenizer(r'(?<=/\.)\s*(?![^\[]*\])', gaps=True),
para_block_reader=tagged_treebank_para_block_reader, encoding='ascii')
treebank_raw = LazyCorpusLoader(
'treebank/raw', PlaintextCorpusReader, r'wsj_.*', encoding='ISO-8859-2')
udhr = LazyCorpusLoader(
'udhr', UdhrCorpusReader)
verbnet = LazyCorpusLoader(
'verbnet', VerbnetCorpusReader, r'(?!\.).*\.xml')
webtext = LazyCorpusLoader(
'webtext', PlaintextCorpusReader, r'(?!README|\.).*\.txt', encoding='ISO-8859-2')
wordnet = LazyCorpusLoader(
'wordnet', WordNetCorpusReader)
wordnet_ic = LazyCorpusLoader(
'wordnet_ic', WordNetICCorpusReader, '.*\.dat')
words = LazyCorpusLoader(
'words', WordListCorpusReader, r'(?!README|\.).*', encoding='ascii')
ycoe = LazyCorpusLoader(
'ycoe', YCOECorpusReader)
# defined after treebank
propbank = LazyCorpusLoader(
'propbank', PropbankCorpusReader,
'prop.txt', 'frames/.*\.xml', 'verbs.txt',
lambda filename: re.sub(r'^wsj/\d\d/', '', filename),
treebank) # Must be defined *after* treebank corpus.
nombank = LazyCorpusLoader(
'nombank.1.0', NombankCorpusReader,
'nombank.1.0', 'frames/.*\.xml', 'nombank.1.0.words',
lambda filename: re.sub(r'^wsj/\d\d/', '', filename),
treebank) # Must be defined *after* treebank corpus.
propbank_ptb = LazyCorpusLoader(
'propbank', PropbankCorpusReader,
'prop.txt', 'frames/.*\.xml', 'verbs.txt',
lambda filename: filename.upper(),
ptb) # Must be defined *after* ptb corpus.
nombank_ptb = LazyCorpusLoader(
'nombank.1.0', NombankCorpusReader,
'nombank.1.0', 'frames/.*\.xml', 'nombank.1.0.words',
lambda filename: filename.upper(),
ptb) # Must be defined *after* ptb corpus.
def demo():
# This is out-of-date:
abc.demo()
brown.demo()
# chat80.demo()
cmudict.demo()
conll2000.demo()
conll2002.demo()
genesis.demo()
gutenberg.demo()
ieer.demo()
inaugural.demo()
indian.demo()
names.demo()
ppattach.demo()
senseval.demo()
shakespeare.demo()
sinica_treebank.demo()
state_union.demo()
stopwords.demo()
timit.demo()
toolbox.demo()
treebank.demo()
udhr.demo()
webtext.demo()
words.demo()
# ycoe.demo()
if __name__ == '__main__':
#demo()
pass
# ** this is for nose **
# unload all corpus after tests
def teardown_module(module=None):
import nltk.corpus
for name in dir(nltk.corpus):
obj = getattr(nltk.corpus, name, None)
if isinstance(obj, CorpusReader) and hasattr(obj, '_unload'):
obj._unload()
| 39.839858
| 127
| 0.689951
|
d75e4ed83d7612ba172d4dc33ade1bf9b794d5c2
| 759
|
py
|
Python
|
scripts/forcefield/test_ff.py
|
jhwnkim/nanopores
|
98b3dbb5d36464fbdc03f59d224d38e4255324ce
|
[
"MIT"
] | 8
|
2016-09-07T01:59:31.000Z
|
2021-03-06T12:14:31.000Z
|
scripts/forcefield/test_ff.py
|
jhwnkim/nanopores
|
98b3dbb5d36464fbdc03f59d224d38e4255324ce
|
[
"MIT"
] | null | null | null |
scripts/forcefield/test_ff.py
|
jhwnkim/nanopores
|
98b3dbb5d36464fbdc03f59d224d38e4255324ce
|
[
"MIT"
] | 4
|
2017-12-06T17:43:01.000Z
|
2020-05-01T05:41:14.000Z
|
# (c) 2017 Gregor Mitscha-Baude
import numpy as np
from nanopores import user_params
import nanopores.models.pughpore as pugh
from folders import fields
ddata = {2: dict(name="Dpugh", Nmax=1e5, dim=2, r=0.11, h=1.0),
3: dict(name="Dpugh", Nmax=2e6, dim=3, r=0.11, h=2.0)}
physp = dict(
bV = -0.08,
Qmol = 5.,
bulkcon = 1000.,
dnaqsdamp = 0.5882,
)
default = {
2: dict(physp, dim=2, h=.75, Nmax=1e5, diffusivity_data=ddata[2]),
3: dict(physp, dim=3, h=1.25, Nmax=7e5, diffusivity_data=ddata[3],
stokesiter=True)}
dim = 3
params = user_params(default[dim])
ran = np.linspace(-30, 30, 10)
X = [[0.,0.,t] for t in ran]
result = pugh.F_explicit(X, name="pugh_test", **params)
print result
print result["J"]
| 24.483871
| 70
| 0.631094
|
866ecd51fea15b427bb5e38717130887e781a264
| 2,208
|
py
|
Python
|
setup.py
|
basnijholt/pyfeast
|
b6d8832b3a101900ed8b50127c1884ef74b34750
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
basnijholt/pyfeast
|
b6d8832b3a101900ed8b50127c1884ef74b34750
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
basnijholt/pyfeast
|
b6d8832b3a101900ed8b50127c1884ef74b34750
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
import configparser
import sys
import os.path
import numpy
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
from create_cython_files import create_feast_pxd, create_feast_pyx
def guess_libraries():
"""Return the configuration for FEAST if it is available in a known way.
This is known to work with the FEAST binaries in the conda-forge channel."""
import ctypes.util
common_libs = ['mkl_rt', 'gfortran', 'iomp5']
for lib in ['blas', 'openblas']:
if ctypes.util.find_library(lib):
return common_libs + [lib]
else:
print('Cannot find MKL or openBLAS!')
sys.exit(1)
def guess_libraries_dirs():
return [os.path.join(sys.exec_prefix, 'lib')]
def guess_include_dirs():
return [os.path.join(sys.exec_prefix, 'include')]
def guess(key):
if key == 'library_dirs':
return guess_libraries_dirs()
elif key == 'include_dirs':
return guess_include_dirs()
elif key == 'libraries':
return guess_libraries()
def get_config(config_file='build.conf'):
# Read build configuration file.
configs = configparser.ConfigParser()
try:
with open(config_file) as f:
configs.read_file(f)
config = dict(configs['feast'])
except IOError:
print('User-configured build config.')
config = {}
except KeyError:
print('User-configured build config, '
'but no `feast` section.')
config = {}
keys = ['include_dirs', 'library_dirs', 'libraries']
for k in keys:
if k in config:
config[k] = config[k].split()
else:
print('Auto configuring `{}` (best guess)'.format(k))
config[k] = guess(k)
config['include_dirs'].append(numpy.get_include())
return config
if __name__ == '__main__':
ext_params = get_config()
create_feast_pxd()
create_feast_pyx()
ext_modules=[
Extension("feast",
sources=["feast.pyx"],
**ext_params,
)
]
setup(
name="pyfeast",
ext_modules=cythonize(ext_modules),
)
| 25.090909
| 80
| 0.622736
|
9eef4507a1049f444dd8f98165d5ad407fe8b023
| 1,263
|
py
|
Python
|
project/project/urls.py
|
minaee/cd557
|
4db87e1412709d1dff70a5d10642814e35a251f1
|
[
"MIT"
] | null | null | null |
project/project/urls.py
|
minaee/cd557
|
4db87e1412709d1dff70a5d10642814e35a251f1
|
[
"MIT"
] | null | null | null |
project/project/urls.py
|
minaee/cd557
|
4db87e1412709d1dff70a5d10642814e35a251f1
|
[
"MIT"
] | null | null | null |
"""project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/4.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, re_path, include
from django.views.static import serve
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('pages.urls')),
path('users/', include('users.urls')),
path('university/', include('university.urls')),
re_path(r'^media/(?P<path>.*)$', serve,{'document_root': settings.MEDIA_ROOT}),
re_path(r'^static/(?P<path>.*)$', serve,{'document_root': settings.STATIC_ROOT}),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 38.272727
| 85
| 0.700713
|
a8d84820ab9b497e8a89ca130036fc7a1dadbf32
| 18,418
|
py
|
Python
|
src/pgen/parser/tokenize.py
|
blockpy-edu/skulpt
|
dc70288aedcd7670605ef28f8525546440b39f93
|
[
"MIT"
] | 4
|
2020-01-19T01:42:06.000Z
|
2021-05-13T09:51:38.000Z
|
src/pgen/parser/tokenize.py
|
blockpy-edu/skulpt
|
dc70288aedcd7670605ef28f8525546440b39f93
|
[
"MIT"
] | null | null | null |
src/pgen/parser/tokenize.py
|
blockpy-edu/skulpt
|
dc70288aedcd7670605ef28f8525546440b39f93
|
[
"MIT"
] | 4
|
2019-10-16T21:50:53.000Z
|
2021-01-11T06:25:57.000Z
|
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation.
# All rights reserved.
"""Tokenization help for Python programs.
generate_tokens(readline) is a generator that breaks a stream of
text into Python tokens. It accepts a readline-like method which is called
repeatedly to get the next line of input (or "" for EOF). It generates
5-tuples with these members:
the token type (see token.py)
the token (a string)
the starting (row, column) indices of the token (a 2-tuple of ints)
the ending (row, column) indices of the token (a 2-tuple of ints)
the original line (string)
It is designed to match the working of the Python tokenizer exactly, except
that it produces COMMENT tokens for comments and gives type OP for all
operators
Older entry points
tokenize_loop(readline, tokeneater)
tokenize(readline, tokeneater=printtoken)
are the same, except instead of generating tokens, tokeneater is a callback
function to which the 5 fields described above are passed as 5 arguments,
each time a new token is found."""
__author__ = 'Ka-Ping Yee <ping@lfw.org>'
__credits__ = \
'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro'
import string, re
from codecs import BOM_UTF8, lookup
# Meredydd changed - why were we using a Python 2 token table from the
# filesystem that disagrees with our local token.py?
# from lib2to3.pgen2.token import *
# from import token
# __all__ = [x for x in dir(token) if x[0] != '_'] + ["tokenize",
# "generate_tokens", "untokenize"]
# del token
from token import *
def group(*choices): return '(' + '|'.join(choices) + ')'
def any(*choices): return group(*choices) + '*'
def maybe(*choices): return group(*choices) + '?'
Whitespace = r'[ \f\t]*'
Comment = r'#[^\r\n]*'
Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
Name = r'[a-zA-Z_]\w*'
Binnumber = r'0[bB][01]*'
Hexnumber = r'0[xX][\da-fA-F]*[lL]?'
Octnumber = r'0[oO]?[0-7]*[lL]?'
Decnumber = r'[1-9]\d*[lL]?'
Intnumber = group(Binnumber, Hexnumber, Octnumber, Decnumber)
Exponent = r'[eE][-+]?\d+'
Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent)
Expfloat = r'\d+' + Exponent
Floatnumber = group(Pointfloat, Expfloat)
Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]')
Number = group(Imagnumber, Floatnumber, Intnumber)
# Tail end of ' string.
Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
# Tail end of " string.
Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
# Tail end of ''' string.
Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
# Tail end of """ string.
Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
Triple = group("[ubUB]?[rR]?'''", '[ubUB]?[rR]?"""')
# Single-line ' or " string.
String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
# Because of leftmost-then-longest match semantics, be sure to put the
# longest operators first (e.g., if = came before ==, == would get
# recognized as two instances of =).
Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=",
r"//=?", r"->",
r"[+\-*/%&|^=<>]=?",
r"~")
Bracket = '[][(){}]'
Special = group(r'\r?\n', r'[:;.,`@]')
Funny = group(Operator, Bracket, Special)
PlainToken = group(Number, Funny, String, Name)
Token = Ignore + PlainToken
# First (or only) line of ' or " string.
ContStr = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
group("'", r'\\\r?\n'),
r'[uUbB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
group('"', r'\\\r?\n'))
PseudoExtras = group(r'\\\r?\n', Comment, Triple)
PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
tokenprog, pseudoprog, single3prog, double3prog = list(map(
re.compile, (Token, PseudoToken, Single3, Double3)))
endprogs = {"'": re.compile(Single), '"': re.compile(Double),
"'''": single3prog, '"""': double3prog,
"r'''": single3prog, 'r"""': double3prog,
"u'''": single3prog, 'u"""': double3prog,
"b'''": single3prog, 'b"""': double3prog,
"ur'''": single3prog, 'ur"""': double3prog,
"br'''": single3prog, 'br"""': double3prog,
"R'''": single3prog, 'R"""': double3prog,
"U'''": single3prog, 'U"""': double3prog,
"B'''": single3prog, 'B"""': double3prog,
"uR'''": single3prog, 'uR"""': double3prog,
"Ur'''": single3prog, 'Ur"""': double3prog,
"UR'''": single3prog, 'UR"""': double3prog,
"bR'''": single3prog, 'bR"""': double3prog,
"Br'''": single3prog, 'Br"""': double3prog,
"BR'''": single3prog, 'BR"""': double3prog,
'r': None, 'R': None,
'u': None, 'U': None,
'b': None, 'B': None}
triple_quoted = {}
for t in ("'''", '"""',
"r'''", 'r"""', "R'''", 'R"""',
"u'''", 'u"""', "U'''", 'U"""',
"b'''", 'b"""', "B'''", 'B"""',
"ur'''", 'ur"""', "Ur'''", 'Ur"""',
"uR'''", 'uR"""', "UR'''", 'UR"""',
"br'''", 'br"""', "Br'''", 'Br"""',
"bR'''", 'bR"""', "BR'''", 'BR"""',):
triple_quoted[t] = t
single_quoted = {}
for t in ("'", '"',
"r'", 'r"', "R'", 'R"',
"u'", 'u"', "U'", 'U"',
"b'", 'b"', "B'", 'B"',
"ur'", 'ur"', "Ur'", 'Ur"',
"uR'", 'uR"', "UR'", 'UR"',
"br'", 'br"', "Br'", 'Br"',
"bR'", 'bR"', "BR'", 'BR"',):
single_quoted[t] = t
tabsize = 8
class TokenError(Exception): pass
class StopTokenizing(Exception): pass
def printtoken(type, token, xxx_todo_changeme, xxx_todo_changeme1, line): # for testing
(srow, scol) = xxx_todo_changeme
(erow, ecol) = xxx_todo_changeme1
print("%d,%d-%d,%d:\t%s\t%s" % \
(srow, scol, erow, ecol, tok_name[type], repr(token)))
def tokenize(readline, tokeneater=printtoken):
"""
The tokenize() function accepts two parameters: one representing the
input stream, and one providing an output mechanism for tokenize().
The first parameter, readline, must be a callable object which provides
the same interface as the readline() method of built-in file objects.
Each call to the function should return one line of input as a string.
The second parameter, tokeneater, must also be a callable object. It is
called once for each token, with five arguments, corresponding to the
tuples generated by generate_tokens().
"""
try:
tokenize_loop(readline, tokeneater)
except StopTokenizing:
pass
# backwards compatible interface
def tokenize_loop(readline, tokeneater):
for token_info in generate_tokens(readline):
tokeneater(*token_info)
class Untokenizer:
def __init__(self):
self.tokens = []
self.prev_row = 1
self.prev_col = 0
def add_whitespace(self, start):
row, col = start
assert row <= self.prev_row
col_offset = col - self.prev_col
if col_offset:
self.tokens.append(" " * col_offset)
def untokenize(self, iterable):
for t in iterable:
if len(t) == 2:
self.compat(t, iterable)
break
tok_type, token, start, end, line = t
self.add_whitespace(start)
self.tokens.append(token)
self.prev_row, self.prev_col = end
if tok_type in (NEWLINE, NL):
self.prev_row += 1
self.prev_col = 0
return "".join(self.tokens)
def compat(self, token, iterable):
startline = False
indents = []
toks_append = self.tokens.append
toknum, tokval = token
if toknum in (NAME, NUMBER):
tokval += ' '
if toknum in (NEWLINE, NL):
startline = True
for tok in iterable:
toknum, tokval = tok[:2]
if toknum in (NAME, NUMBER):
tokval += ' '
if toknum == INDENT:
indents.append(tokval)
continue
elif toknum == DEDENT:
indents.pop()
continue
elif toknum in (NEWLINE, NL):
startline = True
elif startline and indents:
toks_append(indents[-1])
startline = False
toks_append(tokval)
cookie_re = re.compile("coding[:=]\s*([-\w.]+)")
def detect_encoding(readline):
"""
The detect_encoding() function is used to detect the encoding that should
be used to decode a Python source file. It requires one argment, readline,
in the same way as the tokenize() generator.
It will call readline a maximum of twice, and return the encoding used
(as a string) and a list of any lines (left as bytes) it has read
in.
It detects the encoding from the presence of a utf-8 bom or an encoding
cookie as specified in pep-0263. If both a bom and a cookie are present,
but disagree, a SyntaxError will be raised. If the encoding cookie is an
invalid charset, raise a SyntaxError.
If no encoding is specified, then the default of 'utf-8' will be returned.
"""
bom_found = False
encoding = None
def read_or_stop():
try:
return readline()
except StopIteration:
return b''
def find_cookie(line):
try:
line_string = line.decode('ascii')
except UnicodeDecodeError:
return None
matches = cookie_re.findall(line_string)
if not matches:
return None
encoding = matches[0]
try:
codec = lookup(encoding)
except LookupError:
# This behaviour mimics the Python interpreter
raise SyntaxError("unknown encoding: " + encoding)
if bom_found and codec.name != 'utf-8':
# This behaviour mimics the Python interpreter
raise SyntaxError('encoding problem: utf-8')
return encoding
first = read_or_stop()
if first.startswith(BOM_UTF8):
bom_found = True
first = first[3:]
if not first:
return 'utf-8', []
encoding = find_cookie(first)
if encoding:
return encoding, [first]
second = read_or_stop()
if not second:
return 'utf-8', [first]
encoding = find_cookie(second)
if encoding:
return encoding, [first, second]
return 'utf-8', [first, second]
def untokenize(iterable):
"""Transform tokens back into Python source code.
Each element returned by the iterable must be a token sequence
with at least two elements, a token number and token value. If
only two tokens are passed, the resulting output is poor.
Round-trip invariant for full input:
Untokenized source will match input source exactly
Round-trip invariant for limited intput:
# Output text will tokenize the back to the input
t1 = [tok[:2] for tok in generate_tokens(f.readline)]
newcode = untokenize(t1)
readline = iter(newcode.splitlines(1)).next
t2 = [tok[:2] for tokin generate_tokens(readline)]
assert t1 == t2
"""
ut = Untokenizer()
return ut.untokenize(iterable)
def generate_tokens(readline):
"""
The generate_tokens() generator requires one argment, readline, which
must be a callable object which provides the same interface as the
readline() method of built-in file objects. Each call to the function
should return one line of input as a string. Alternately, readline
can be a callable function terminating with StopIteration:
readline = open(myfile).next # Example of alternate readline
The generator produces 5-tuples with these members: the token type; the
token string; a 2-tuple (srow, scol) of ints specifying the row and
column where the token begins in the source; a 2-tuple (erow, ecol) of
ints specifying the row and column where the token ends in the source;
and the line on which the token was found. The line passed is the
logical line; continuation lines are included.
"""
lnum = parenlev = continued = 0
namechars, numchars = string.ascii_letters + '_', '0123456789'
contstr, needcont = '', 0
contline = None
indents = [0]
while 1: # loop over lines in stream
try:
line = readline()
except StopIteration:
line = ''
lnum = lnum + 1
pos, max = 0, len(line)
if contstr: # continued string
if not line:
raise TokenError("EOF in multi-line string", strstart)
endmatch = endprog.match(line)
if endmatch:
pos = end = endmatch.end(0)
yield (STRING, contstr + line[:end],
strstart, (lnum, end), contline + line)
contstr, needcont = '', 0
contline = None
elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
yield (ERRORTOKEN, contstr + line,
strstart, (lnum, len(line)), contline)
contstr = ''
contline = None
continue
else:
contstr = contstr + line
contline = contline + line
continue
elif parenlev == 0 and not continued: # new statement
if not line: break
column = 0
while pos < max: # measure leading whitespace
if line[pos] == ' ':
column = column + 1
elif line[pos] == '\t':
column = (column / tabsize + 1) * tabsize
elif line[pos] == '\f':
column = 0
else:
break
pos = pos + 1
if pos == max: break
if line[pos] in '#\r\n': # skip comments or blank lines
if line[pos] == '#':
comment_token = line[pos:].rstrip('\r\n')
nl_pos = pos + len(comment_token)
yield (COMMENT, comment_token,
(lnum, pos), (lnum, pos + len(comment_token)), line)
yield (NL, line[nl_pos:],
(lnum, nl_pos), (lnum, len(line)), line)
else:
yield ((NL, COMMENT)[line[pos] == '#'], line[pos:],
(lnum, pos), (lnum, len(line)), line)
continue
if column > indents[-1]: # count indents or dedents
indents.append(column)
yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
while column < indents[-1]:
if column not in indents:
raise IndentationError(
"unindent does not match any outer indentation level",
("<tokenize>", lnum, pos, line))
indents = indents[:-1]
yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
else: # continued statement
if not line:
raise TokenError("EOF in multi-line statement", (lnum, 0))
continued = 0
while pos < max:
pseudomatch = pseudoprog.match(line, pos)
if pseudomatch: # scan for tokens
start, end = pseudomatch.span(1)
spos, epos, pos = (lnum, start), (lnum, end), end
token, initial = line[start:end], line[start]
if initial in numchars or \
(initial == '.' and token != '.'): # ordinary number
yield (NUMBER, token, spos, epos, line)
elif initial in '\r\n':
newline = NEWLINE
if parenlev > 0:
newline = NL
yield (newline, token, spos, epos, line)
elif initial == '#':
assert not token.endswith("\n")
yield (COMMENT, token, spos, epos, line)
elif token in triple_quoted:
endprog = endprogs[token]
endmatch = endprog.match(line, pos)
if endmatch: # all on one line
pos = endmatch.end(0)
token = line[start:pos]
yield (STRING, token, spos, (lnum, pos), line)
else:
strstart = (lnum, start) # multiple lines
contstr = line[start:]
contline = line
break
elif initial in single_quoted or \
token[:2] in single_quoted or \
token[:3] in single_quoted:
if token[-1] == '\n': # continued string
strstart = (lnum, start)
endprog = (endprogs[initial] or endprogs[token[1]] or
endprogs[token[2]])
contstr, needcont = line[start:], 1
contline = line
break
else: # ordinary string
yield (STRING, token, spos, epos, line)
elif initial in namechars: # ordinary name
yield (NAME, token, spos, epos, line)
elif initial == '\\': # continued stmt
# This yield is new; needed for better idempotency:
yield (NL, token, spos, (lnum, pos), line)
continued = 1
else:
if initial in '([{':
parenlev = parenlev + 1
elif initial in ')]}':
parenlev = parenlev - 1
yield (OP, token, spos, epos, line)
else:
yield (ERRORTOKEN, line[pos],
(lnum, pos), (lnum, pos + 1), line)
pos = pos + 1
for indent in indents[1:]: # pop remaining indent levels
yield (DEDENT, '', (lnum, 0), (lnum, 0), '')
yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')
if __name__ == '__main__': # testing
import sys
if len(sys.argv) > 1:
tokenize(open(sys.argv[1]).readline)
else:
tokenize(sys.stdin.readline)
| 36.255906
| 88
| 0.531654
|
073918726a49278ba5dcaaf69dfabc5622ce723b
| 2,177
|
py
|
Python
|
models/decoders/simple_decoder.py
|
thanhhau097/keras_ocr
|
bf453aada51c86c9166cc780982683726b6c091e
|
[
"Apache-2.0"
] | 1
|
2020-03-06T10:53:28.000Z
|
2020-03-06T10:53:28.000Z
|
models/decoders/simple_decoder.py
|
thanhhau097/ocr
|
bf453aada51c86c9166cc780982683726b6c091e
|
[
"Apache-2.0"
] | 2
|
2020-01-28T22:13:55.000Z
|
2020-09-25T22:21:05.000Z
|
models/decoders/simple_decoder.py
|
thanhhau097/ocr
|
bf453aada51c86c9166cc780982683726b6c091e
|
[
"Apache-2.0"
] | 3
|
2020-01-24T07:22:17.000Z
|
2020-05-03T17:32:16.000Z
|
from keras.layers import *
from keras.layers.merge import add, concatenate
from utils.gpu_utils import gru
class SimpleDecoder():
def __init__(self):
pass
def __call__(self, input_tensor, *args, **kwargs):
# RNN layer
# lstm_1 = LSTM(256, return_sequences=True, kernel_initializer='he_normal', name='lstm1')(
# input_tensor) # (None, 32, 512)
# print(lstm_1)
# lstm_1b = LSTM(256, return_sequences=True, go_backwards=True, kernel_initializer='he_normal', name='lstm1_b')(
# input_tensor)
# reversed_lstm_1b = Lambda(lambda inputTensor: K.reverse(inputTensor, axes=1))(lstm_1b)
#
# lstm1_merged = add([lstm_1, reversed_lstm_1b]) # (None, 32, 512)
# lstm1_merged = BatchNormalization()(lstm1_merged)
#
# lstm_2 = LSTM(256, return_sequences=True, kernel_initializer='he_normal', name='lstm2')(lstm1_merged)
# lstm_2b = LSTM(256, return_sequences=True, go_backwards=True, kernel_initializer='he_normal', name='lstm2_b')(
# lstm1_merged)
# reversed_lstm_2b = Lambda(lambda inputTensor: K.reverse(inputTensor, axes=1))(lstm_2b)
#
# lstm2_merged = concatenate([lstm_2, reversed_lstm_2b]) # (None, 32, 1024)
# lstm2_merged = BatchNormalization()(lstm2_merged)
# Two layers of bidirectional GRUs
# GRU seems to work as well, if not better than LSTM:
gru_1 = gru(256, return_sequences=True,
kernel_initializer='he_normal', name='gru1')(input_tensor)
gru_1b = gru(256, return_sequences=True,
go_backwards=True, kernel_initializer='he_normal',
name='gru1_b')(input_tensor)
gru1_merged = add([gru_1, gru_1b])
gru_2 = gru(256, return_sequences=True,
kernel_initializer='he_normal', name='gru2')(gru1_merged)
gru_2b = gru(256, return_sequences=True, go_backwards=True,
kernel_initializer='he_normal', name='gru2_b')(gru1_merged)
# transforms RNN output to character activations:
inner = concatenate([gru_2, gru_2b])
return inner
| 47.326087
| 120
| 0.641249
|
4f79b3b6e720361ec05da634f362da0b311b9cb6
| 2,321
|
py
|
Python
|
pdlog/accessor.py
|
DataProphet/pdlog
|
5af741d18734dfd9e935311ba990cf09d2e88be6
|
[
"MIT"
] | 3
|
2020-05-27T14:46:50.000Z
|
2021-05-27T08:15:39.000Z
|
pdlog/accessor.py
|
DataProphet/pdlog
|
5af741d18734dfd9e935311ba990cf09d2e88be6
|
[
"MIT"
] | 4
|
2020-05-31T06:45:48.000Z
|
2020-05-31T07:02:39.000Z
|
pdlog/accessor.py
|
DataProphet/pdlog
|
5af741d18734dfd9e935311ba990cf09d2e88be6
|
[
"MIT"
] | null | null | null |
from typing import Any
import pandas as pd
from . import logging
@pd.api.extensions.register_dataframe_accessor("log")
class LogAccessor:
def __init__(self, data: pd.DataFrame):
self._data = data
def dropna(self, *args: Any, **kwargs: Any) -> pd.DataFrame:
return logging.log_filter(self._data, "dropna", *args, **kwargs)
def drop_duplicates(self, *args: Any, **kwargs: Any) -> pd.DataFrame:
return logging.log_filter(self._data, "drop_duplicates", *args, **kwargs)
def query(self, *args: Any, **kwargs: Any) -> pd.DataFrame:
return logging.log_filter(self._data, "query", *args, **kwargs)
def head(self, *args: Any, **kwargs: Any) -> pd.DataFrame:
return logging.log_filter(self._data, "head", *args, **kwargs)
def tail(self, *args: Any, **kwargs: Any) -> pd.DataFrame:
return logging.log_filter(self._data, "tail", *args, **kwargs)
def sample(self, *args: Any, **kwargs: Any) -> pd.DataFrame:
return logging.log_filter(self._data, "sample", *args, **kwargs)
def drop(self, *args: Any, **kwargs: Any) -> pd.DataFrame:
return logging.log_filter(self._data, "drop", *args, **kwargs)
def set_index(self, *args: Any, **kwargs: Any) -> pd.DataFrame:
return logging.log_change_index(self._data, "set_index", *args, **kwargs)
def reset_index(self, *args: Any, **kwargs: Any) -> pd.DataFrame:
return logging.log_change_index(self._data, "reset_index", *args, **kwargs)
def rename(self, *args: Any, **kwargs: Any) -> pd.DataFrame:
return logging.log_rename(self._data, "rename", *args, **kwargs)
def pivot(self, *args: Any, **kwargs: Any) -> pd.DataFrame:
return logging.log_reshape(self._data, "pivot", *args, **kwargs)
def melt(self, *args: Any, **kwargs: Any) -> pd.DataFrame:
return logging.log_reshape(self._data, "melt", *args, **kwargs)
def fillna(self, *args: Any, **kwargs: Any) -> pd.DataFrame:
return logging.log_fillna(self._data, "fillna", *args, **kwargs)
def bfill(self, *args: Any, **kwargs: Any) -> pd.DataFrame:
return logging.log_fillna(self._data, "bfill", *args, **kwargs)
def ffill(self, *args: Any, **kwargs: Any) -> pd.DataFrame:
return logging.log_fillna(self._data, "ffill", *args, **kwargs)
| 40.719298
| 83
| 0.646273
|
f360cb0716dfc877fab8ad2b977b3a1b825bde04
| 4,679
|
py
|
Python
|
evgraf/crystal_reduction.py
|
pmla/evgraf
|
13cb56e3fe5db35a41fab61e7a7a642c2f8e295e
|
[
"MIT"
] | 8
|
2019-12-12T13:27:00.000Z
|
2022-01-29T02:30:36.000Z
|
evgraf/crystal_reduction.py
|
pmla/evgraf
|
13cb56e3fe5db35a41fab61e7a7a642c2f8e295e
|
[
"MIT"
] | null | null | null |
evgraf/crystal_reduction.py
|
pmla/evgraf
|
13cb56e3fe5db35a41fab61e7a7a642c2f8e295e
|
[
"MIT"
] | null | null | null |
import numpy as np
from collections import namedtuple
from ase import Atoms
from ase.geometry import find_mic
from ase.geometry.dimensionality.disjoint_set import DisjointSet
from .crystal_reducer import CrystalReducer
Reduced = namedtuple('ReducedCrystal', 'rmsd factor atoms components map')
def assign_atoms_to_clusters(num_atoms, permutations):
uf = DisjointSet(num_atoms)
for p in permutations:
for i, e in enumerate(p):
uf.merge(i, e)
return uf.get_components(relabel=True)
def reduction_basis(n, H, pbc):
dim = sum(pbc)
# Extend the subgroup basis to 3D (if not already)
R = np.diag([n, n, n])
indices = np.where(pbc)[0]
for i in range(dim):
for j in range(dim):
R[indices[i], indices[j]] = H[i, j]
return R / n
def reduced_layout(reducer, rmsd, group_index, R, permutations, atoms):
num_atoms = len(reducer.comparator.atoms)
components = assign_atoms_to_clusters(num_atoms, permutations)
if num_atoms // group_index != len(np.bincount(components)):
return None
if len(np.unique(np.bincount(components))) > 1:
return None
# Collect atoms in contracted unit cell
indices = np.argsort(components)
collected = reducer.comparator.atoms[indices]
collected.set_cell(R @ atoms.cell, scale_atoms=False)
collected.wrap(eps=0)
clusters = components[indices]
ps = collected.get_positions()
parents = clusters * group_index
vmin, _ = find_mic(ps - ps[parents], collected.cell, pbc=collected.pbc)
positions = ps[parents] + vmin
m = num_atoms // group_index
numbers = collected.numbers.reshape((m, group_index))[:, 0]
meanpos = np.mean(positions.reshape((m, group_index, 3)), axis=1)
deltas = positions - meanpos[clusters]
rmsd_check = np.sqrt(np.sum(deltas**2) / num_atoms)
if abs(rmsd - rmsd_check) > 1E-12:
return None
reduced = Atoms(positions=meanpos, numbers=numbers,
cell=collected.cell, pbc=collected.pbc)
reduced.wrap(eps=0)
return reduced, components
def find_crystal_reductions(atoms):
"""Finds reductions of a crystal using the root-mean-square (RMS) distance.
A crystal reduction is defined by a translational symmetry in the input
structure. Each translational symmetry has an associated cost, which is the
RMS distance from the input structure to its symmetrized (i.e. reduced)
structure. The atomic coordinates in the reduced crystal are given by the
Euclidean average of the those in the input structure (after being wrapped
into the reduced unit cell).
If the crystal structure is perfect, the reduced crystal is the textbook
primitive unit cell and has a RMS distance of zero. As the atomic
coordinates in the input structure deviate from perfect translational
symmetry the RMS distance increases correspondingly. Similarly, the RMS
distance cannot decrease as the reduction factor increases.
See the tutorial for an example with illustrations.
Parameters:
atoms: ASE atoms object
The system to reduce.
Returns:
reduced: list
List of ReducedCrystal objects for reduction found. A ReducedCrystal
is a namedtuple with the following field names:
rmsd: float
RMS distance from input structure to reduced structure
factor: integer
The reduction factor
atoms: Atoms object
The reduced structure
components: integer ndarray
Describes how atoms in the input structure are combined in the
reduced structure
map: ndarray
Map from input cell to reduced cell
"""
reducer = CrystalReducer(atoms)
reductions = reducer.find_consistent_reductions()
invzperm = np.argsort(reducer.comparator.zpermutation)
reduced = {}
for rmsd, group_index, H, permutations in reductions:
R = reduction_basis(reducer.n, H, atoms.pbc)
R = reducer.comparator.invop @ R @ reducer.comparator.op
result = reduced_layout(reducer, rmsd, group_index, R, permutations,
atoms)
if result is not None:
reduced_atoms, components = result
key = group_index
entry = Reduced(rmsd=rmsd, factor=group_index, atoms=reduced_atoms,
components=components[invzperm],
map=R)
if key not in reduced:
reduced[key] = entry
else:
reduced[key] = min(reduced[key], entry, key=lambda x: x.rmsd)
return sorted(reduced.values(), key=lambda x: x.factor)
| 35.992308
| 79
| 0.673007
|
f5124a79217574a86438218182021646d58e5490
| 608
|
py
|
Python
|
app/deployments/migrations/0035_erddapdataset_greater_than_hourly.py
|
gulfofmaine/buoy_barn
|
f04047fe17a45dbb58a3aac7703edd16616a6dfd
|
[
"MIT"
] | 1
|
2021-08-06T19:26:06.000Z
|
2021-08-06T19:26:06.000Z
|
app/deployments/migrations/0035_erddapdataset_greater_than_hourly.py
|
gulfofmaine/buoy_barn
|
f04047fe17a45dbb58a3aac7703edd16616a6dfd
|
[
"MIT"
] | 152
|
2019-09-20T11:56:58.000Z
|
2022-03-25T10:01:12.000Z
|
app/deployments/migrations/0035_erddapdataset_greater_than_hourly.py
|
gulfofmaine/buoy_barn
|
f04047fe17a45dbb58a3aac7703edd16616a6dfd
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.7 on 2021-03-16 13:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('deployments', '0034_erddapdataset_refresh_attempted'),
]
operations = [
migrations.AddField(
model_name='erddapdataset',
name='greater_than_hourly',
field=models.BooleanField(default=False, help_text='Select if this dataset should only be refreshed at intervals of longer than 1/hour between refreshes (say once per day). Ask Alex to setup refreshing at a different rate.'),
),
]
| 32
| 237
| 0.682566
|
c09e10df0569fb64dd713aa3f09c9fded15688e2
| 3,622
|
py
|
Python
|
mmdetection/configs/_base_/models/faster_rcnn_r50_fpn_cocoOS_wAnchor.py
|
dimitymiller/openset_detection
|
1f020537f7089a588f08ec4cb534dbad439cf9ff
|
[
"BSD-3-Clause"
] | 4
|
2022-01-17T14:34:49.000Z
|
2022-03-24T05:33:56.000Z
|
mmdetection/configs/_base_/models/faster_rcnn_r50_fpn_cocoOS_wAnchor.py
|
MartinaProgrammer/openset_detection
|
e3dddbb70bbc204d7640b7b9a64936894b03a604
|
[
"BSD-3-Clause"
] | 1
|
2021-11-01T03:08:44.000Z
|
2021-11-05T01:39:33.000Z
|
mmdetection/configs/_base_/models/faster_rcnn_r50_fpn_cocoOS_wAnchor.py
|
MartinaProgrammer/openset_detection
|
e3dddbb70bbc204d7640b7b9a64936894b03a604
|
[
"BSD-3-Clause"
] | 1
|
2021-11-15T13:06:14.000Z
|
2021-11-15T13:06:14.000Z
|
# model settings
model = dict(
type='FasterRCNN',
pretrained='torchvision://resnet50',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
norm_eval=True,
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_generator=dict(
type='AnchorGenerator',
scales=[8],
ratios=[0.5, 1.0, 2.0],
strides=[4, 8, 16, 32, 64]),
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0]),
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='L1Loss', loss_weight=1.0)),
roi_head=dict(
type='StandardRoIHead',
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=dict(
type='Shared2FCBBoxHead',
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=50,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2]),
reg_class_agnostic=False,
loss_cls=dict(
type='AnchorwCrossEntropyLoss', loss_weight=1.0, anchor_weight = 0.05, num_classes = 50),
loss_bbox=dict(type='L1Loss', loss_weight=1.0))),
# model training and testing settings
train_cfg=dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
match_low_quality=True,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=-1,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_pre=2000,
max_per_img=1000,
nms=dict(type='nms', iou_threshold=0.7),
min_bbox_size=0),
rcnn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
match_low_quality=False,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False)),
test_cfg=dict(
rpn=dict(
nms_pre=1000,
max_per_img=1000,
nms=dict(type='nms', iou_threshold=0.7),
min_bbox_size=0),
rcnn=dict(
score_thr=0.05,
nms=dict(type='nms', iou_threshold=0.5),
max_per_img=100)
# soft-nms is also supported for rcnn testing
# e.g., nms=dict(type='soft_nms', iou_threshold=0.5, min_score=0.05)
))
| 33.229358
| 105
| 0.505798
|
28be0b096b8a0983f95d60fe40bc0d8cf988e2c3
| 42,823
|
py
|
Python
|
tencentcloud/dcdb/v20180411/dcdb_client.py
|
snowxmas/tencentcloud-sdk-python
|
fb527dcfc6b52a210e79d581f85cb8cde1ea9c85
|
[
"Apache-2.0"
] | null | null | null |
tencentcloud/dcdb/v20180411/dcdb_client.py
|
snowxmas/tencentcloud-sdk-python
|
fb527dcfc6b52a210e79d581f85cb8cde1ea9c85
|
[
"Apache-2.0"
] | null | null | null |
tencentcloud/dcdb/v20180411/dcdb_client.py
|
snowxmas/tencentcloud-sdk-python
|
fb527dcfc6b52a210e79d581f85cb8cde1ea9c85
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf8 -*-
# Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from tencentcloud.common.exception.tencent_cloud_sdk_exception import TencentCloudSDKException
from tencentcloud.common.abstract_client import AbstractClient
from tencentcloud.dcdb.v20180411 import models
class DcdbClient(AbstractClient):
_apiVersion = '2018-04-11'
_endpoint = 'dcdb.tencentcloudapi.com'
def CloneAccount(self, request):
"""本接口(CloneAccount)用于克隆实例账户。
:param request: Request instance for CloneAccount.
:type request: :class:`tencentcloud.dcdb.v20180411.models.CloneAccountRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.CloneAccountResponse`
"""
try:
params = request._serialize()
body = self.call("CloneAccount", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CloneAccountResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CloseDBExtranetAccess(self, request):
"""本接口(CloseDBExtranetAccess)用于关闭云数据库实例的外网访问。关闭外网访问后,外网地址将不可访问,查询实例列表接口将不返回对应实例的外网域名和端口信息。
:param request: Request instance for CloseDBExtranetAccess.
:type request: :class:`tencentcloud.dcdb.v20180411.models.CloseDBExtranetAccessRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.CloseDBExtranetAccessResponse`
"""
try:
params = request._serialize()
body = self.call("CloseDBExtranetAccess", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CloseDBExtranetAccessResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CopyAccountPrivileges(self, request):
"""本接口(CopyAccountPrivileges)用于复制云数据库账号的权限。
注意:相同用户名,不同Host是不同的账号,Readonly属性相同的账号之间才能复制权限。
:param request: Request instance for CopyAccountPrivileges.
:type request: :class:`tencentcloud.dcdb.v20180411.models.CopyAccountPrivilegesRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.CopyAccountPrivilegesResponse`
"""
try:
params = request._serialize()
body = self.call("CopyAccountPrivileges", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CopyAccountPrivilegesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateAccount(self, request):
"""本接口(CreateAccount)用于创建云数据库账号。一个实例可以创建多个不同的账号,相同的用户名+不同的host是不同的账号。
:param request: Request instance for CreateAccount.
:type request: :class:`tencentcloud.dcdb.v20180411.models.CreateAccountRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.CreateAccountResponse`
"""
try:
params = request._serialize()
body = self.call("CreateAccount", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateAccountResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateDCDBInstance(self, request):
"""本接口(CreateDCDBInstance)用于创建包年包月的云数据库实例,可通过传入实例规格、数据库版本号、购买时长等信息创建云数据库实例。
:param request: Request instance for CreateDCDBInstance.
:type request: :class:`tencentcloud.dcdb.v20180411.models.CreateDCDBInstanceRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.CreateDCDBInstanceResponse`
"""
try:
params = request._serialize()
body = self.call("CreateDCDBInstance", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateDCDBInstanceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteAccount(self, request):
"""本接口(DeleteAccount)用于删除云数据库账号。用户名+host唯一确定一个账号。
:param request: Request instance for DeleteAccount.
:type request: :class:`tencentcloud.dcdb.v20180411.models.DeleteAccountRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.DeleteAccountResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteAccount", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteAccountResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeAccountPrivileges(self, request):
"""本接口(DescribeAccountPrivileges)用于查询云数据库账号权限。
注意:注意:相同用户名,不同Host是不同的账号。
:param request: Request instance for DescribeAccountPrivileges.
:type request: :class:`tencentcloud.dcdb.v20180411.models.DescribeAccountPrivilegesRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.DescribeAccountPrivilegesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeAccountPrivileges", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeAccountPrivilegesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeAccounts(self, request):
"""本接口(DescribeAccounts)用于查询指定云数据库实例的账号列表。
:param request: Request instance for DescribeAccounts.
:type request: :class:`tencentcloud.dcdb.v20180411.models.DescribeAccountsRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.DescribeAccountsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeAccounts", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeAccountsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDBLogFiles(self, request):
"""本接口(DescribeDBLogFiles)用于获取数据库的各种日志列表,包括冷备、binlog、errlog和slowlog。
:param request: Request instance for DescribeDBLogFiles.
:type request: :class:`tencentcloud.dcdb.v20180411.models.DescribeDBLogFilesRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.DescribeDBLogFilesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeDBLogFiles", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDBLogFilesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDBParameters(self, request):
"""本接口(DescribeDBParameters)用于获取数据库的当前参数设置。
:param request: Request instance for DescribeDBParameters.
:type request: :class:`tencentcloud.dcdb.v20180411.models.DescribeDBParametersRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.DescribeDBParametersResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeDBParameters", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDBParametersResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDBSyncMode(self, request):
"""本接口(DescribeDBSyncMode)用于查询云数据库实例的同步模式。
:param request: Request instance for DescribeDBSyncMode.
:type request: :class:`tencentcloud.dcdb.v20180411.models.DescribeDBSyncModeRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.DescribeDBSyncModeResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeDBSyncMode", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDBSyncModeResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDCDBInstances(self, request):
"""查询云数据库实例列表,支持通过项目ID、实例ID、内网地址、实例名称等来筛选实例。
如果不指定任何筛选条件,则默认返回10条实例记录,单次请求最多支持返回100条实例记录。
:param request: Request instance for DescribeDCDBInstances.
:type request: :class:`tencentcloud.dcdb.v20180411.models.DescribeDCDBInstancesRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.DescribeDCDBInstancesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeDCDBInstances", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDCDBInstancesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDCDBPrice(self, request):
"""本接口(DescribeDCDBPrice)用于在购买实例前,查询实例的价格。
:param request: Request instance for DescribeDCDBPrice.
:type request: :class:`tencentcloud.dcdb.v20180411.models.DescribeDCDBPriceRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.DescribeDCDBPriceResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeDCDBPrice", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDCDBPriceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDCDBRenewalPrice(self, request):
"""本接口(DescribeDCDBRenewalPrice)用于在续费分布式数据库实例时,查询续费的价格。
:param request: Request instance for DescribeDCDBRenewalPrice.
:type request: :class:`tencentcloud.dcdb.v20180411.models.DescribeDCDBRenewalPriceRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.DescribeDCDBRenewalPriceResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeDCDBRenewalPrice", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDCDBRenewalPriceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDCDBSaleInfo(self, request):
"""本接口(DescribeDCDBSaleInfo)用于查询分布式数据库可售卖的地域和可用区信息。
:param request: Request instance for DescribeDCDBSaleInfo.
:type request: :class:`tencentcloud.dcdb.v20180411.models.DescribeDCDBSaleInfoRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.DescribeDCDBSaleInfoResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeDCDBSaleInfo", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDCDBSaleInfoResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDCDBShards(self, request):
"""本接口(DescribeDCDBShards)用于查询云数据库实例的分片信息。
:param request: Request instance for DescribeDCDBShards.
:type request: :class:`tencentcloud.dcdb.v20180411.models.DescribeDCDBShardsRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.DescribeDCDBShardsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeDCDBShards", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDCDBShardsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDCDBUpgradePrice(self, request):
"""本接口(DescribeDCDBUpgradePrice)用于查询升级分布式数据库实例价格。
:param request: Request instance for DescribeDCDBUpgradePrice.
:type request: :class:`tencentcloud.dcdb.v20180411.models.DescribeDCDBUpgradePriceRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.DescribeDCDBUpgradePriceResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeDCDBUpgradePrice", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDCDBUpgradePriceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDatabaseObjects(self, request):
"""本接口(DescribeDatabaseObjects)用于查询云数据库实例的数据库中的对象列表,包含表、存储过程、视图和函数。
:param request: Request instance for DescribeDatabaseObjects.
:type request: :class:`tencentcloud.dcdb.v20180411.models.DescribeDatabaseObjectsRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.DescribeDatabaseObjectsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeDatabaseObjects", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDatabaseObjectsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDatabaseTable(self, request):
"""本接口(DescribeDatabaseTable)用于查询云数据库实例的表信息。
:param request: Request instance for DescribeDatabaseTable.
:type request: :class:`tencentcloud.dcdb.v20180411.models.DescribeDatabaseTableRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.DescribeDatabaseTableResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeDatabaseTable", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDatabaseTableResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDatabases(self, request):
"""本接口(DescribeDatabases)用于查询云数据库实例的数据库列表。
:param request: Request instance for DescribeDatabases.
:type request: :class:`tencentcloud.dcdb.v20180411.models.DescribeDatabasesRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.DescribeDatabasesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeDatabases", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDatabasesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeOrders(self, request):
"""本接口(DescribeOrders)用于查询分布式数据库订单信息。传入订单ID来查询订单关联的分布式数据库实例,和对应的任务流程ID。
:param request: Request instance for DescribeOrders.
:type request: :class:`tencentcloud.dcdb.v20180411.models.DescribeOrdersRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.DescribeOrdersResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeOrders", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeOrdersResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeProjects(self, request):
"""本接口(DescribeProjects)用于查询项目列表
:param request: Request instance for DescribeProjects.
:type request: :class:`tencentcloud.dcdb.v20180411.models.DescribeProjectsRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.DescribeProjectsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeProjects", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeProjectsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeShardSpec(self, request):
"""查询可创建的分布式数据库可售卖的分片规格配置。
:param request: Request instance for DescribeShardSpec.
:type request: :class:`tencentcloud.dcdb.v20180411.models.DescribeShardSpecRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.DescribeShardSpecResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeShardSpec", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeShardSpecResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeSqlLogs(self, request):
"""本接口(DescribeSqlLogs)用于获取实例SQL日志。
:param request: Request instance for DescribeSqlLogs.
:type request: :class:`tencentcloud.dcdb.v20180411.models.DescribeSqlLogsRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.DescribeSqlLogsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeSqlLogs", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeSqlLogsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def FlushBinlog(self, request):
"""相当于在所有分片的mysqld中执行flush logs,完成切分的binlog将展示在各个分片控制台binlog列表里。
:param request: Request instance for FlushBinlog.
:type request: :class:`tencentcloud.dcdb.v20180411.models.FlushBinlogRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.FlushBinlogResponse`
"""
try:
params = request._serialize()
body = self.call("FlushBinlog", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.FlushBinlogResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def GrantAccountPrivileges(self, request):
"""本接口(GrantAccountPrivileges)用于给云数据库账号赋权。
注意:相同用户名,不同Host是不同的账号。
:param request: Request instance for GrantAccountPrivileges.
:type request: :class:`tencentcloud.dcdb.v20180411.models.GrantAccountPrivilegesRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.GrantAccountPrivilegesResponse`
"""
try:
params = request._serialize()
body = self.call("GrantAccountPrivileges", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.GrantAccountPrivilegesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def InitDCDBInstances(self, request):
"""本接口(InitDCDBInstances)用于初始化云数据库实例,包括设置默认字符集、表名大小写敏感等。
:param request: Request instance for InitDCDBInstances.
:type request: :class:`tencentcloud.dcdb.v20180411.models.InitDCDBInstancesRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.InitDCDBInstancesResponse`
"""
try:
params = request._serialize()
body = self.call("InitDCDBInstances", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.InitDCDBInstancesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyAccountDescription(self, request):
"""本接口(ModifyAccountDescription)用于修改云数据库账号备注。
注意:相同用户名,不同Host是不同的账号。
:param request: Request instance for ModifyAccountDescription.
:type request: :class:`tencentcloud.dcdb.v20180411.models.ModifyAccountDescriptionRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.ModifyAccountDescriptionResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyAccountDescription", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyAccountDescriptionResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyDBInstancesProject(self, request):
"""本接口(ModifyDBInstancesProject)用于修改云数据库实例所属项目。
:param request: Request instance for ModifyDBInstancesProject.
:type request: :class:`tencentcloud.dcdb.v20180411.models.ModifyDBInstancesProjectRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.ModifyDBInstancesProjectResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyDBInstancesProject", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyDBInstancesProjectResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyDBParameters(self, request):
"""本接口(ModifyDBParameters)用于修改数据库参数。
:param request: Request instance for ModifyDBParameters.
:type request: :class:`tencentcloud.dcdb.v20180411.models.ModifyDBParametersRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.ModifyDBParametersResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyDBParameters", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyDBParametersResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyDBSyncMode(self, request):
"""本接口(ModifyDBSyncMode)用于修改云数据库实例的同步模式。
:param request: Request instance for ModifyDBSyncMode.
:type request: :class:`tencentcloud.dcdb.v20180411.models.ModifyDBSyncModeRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.ModifyDBSyncModeResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyDBSyncMode", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyDBSyncModeResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def OpenDBExtranetAccess(self, request):
"""本接口(OpenDBExtranetAccess)用于开通云数据库实例的外网访问。开通外网访问后,您可通过外网域名和端口访问实例,可使用查询实例列表接口获取外网域名和端口信息。
:param request: Request instance for OpenDBExtranetAccess.
:type request: :class:`tencentcloud.dcdb.v20180411.models.OpenDBExtranetAccessRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.OpenDBExtranetAccessResponse`
"""
try:
params = request._serialize()
body = self.call("OpenDBExtranetAccess", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.OpenDBExtranetAccessResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def RenewDCDBInstance(self, request):
"""本接口(RenewDCDBInstance)用于续费分布式数据库实例。
:param request: Request instance for RenewDCDBInstance.
:type request: :class:`tencentcloud.dcdb.v20180411.models.RenewDCDBInstanceRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.RenewDCDBInstanceResponse`
"""
try:
params = request._serialize()
body = self.call("RenewDCDBInstance", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.RenewDCDBInstanceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ResetAccountPassword(self, request):
"""本接口(ResetAccountPassword)用于重置云数据库账号的密码。
注意:相同用户名,不同Host是不同的账号。
:param request: Request instance for ResetAccountPassword.
:type request: :class:`tencentcloud.dcdb.v20180411.models.ResetAccountPasswordRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.ResetAccountPasswordResponse`
"""
try:
params = request._serialize()
body = self.call("ResetAccountPassword", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ResetAccountPasswordResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpgradeDCDBInstance(self, request):
"""本接口(UpgradeDCDBInstance)用于升级分布式数据库实例。本接口完成下单和支付两个动作,如果发生支付失败的错误,调用用户账户相关接口中的支付订单接口(PayDeals)重新支付即可。
:param request: Request instance for UpgradeDCDBInstance.
:type request: :class:`tencentcloud.dcdb.v20180411.models.UpgradeDCDBInstanceRequest`
:rtype: :class:`tencentcloud.dcdb.v20180411.models.UpgradeDCDBInstanceResponse`
"""
try:
params = request._serialize()
body = self.call("UpgradeDCDBInstance", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpgradeDCDBInstanceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
| 42.357072
| 110
| 0.60269
|
0cd2d5731a912ff53cc95da54d9b900c43d9df09
| 35,676
|
py
|
Python
|
lib/gui/utils.py
|
oveis/DeepVideoFaceSwap
|
e507f94d4f5d74c36e41c386c6fb14bb745a4885
|
[
"MIT"
] | 5
|
2019-05-17T11:54:04.000Z
|
2020-10-06T18:45:17.000Z
|
lib/gui/utils.py
|
oveis/DeepVideoFaceSwap
|
e507f94d4f5d74c36e41c386c6fb14bb745a4885
|
[
"MIT"
] | null | null | null |
lib/gui/utils.py
|
oveis/DeepVideoFaceSwap
|
e507f94d4f5d74c36e41c386c6fb14bb745a4885
|
[
"MIT"
] | 5
|
2019-06-05T00:20:24.000Z
|
2019-09-15T15:40:23.000Z
|
#!/usr/bin/env python3
""" Utility functions for the GUI """
import logging
import os
import platform
import sys
import tkinter as tk
from tkinter import filedialog, ttk
from PIL import Image, ImageTk
from lib.Serializer import JSONSerializer
from .tooltip import Tooltip
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
_CONFIG = None
_IMAGES = None
def initialize_config(cli_opts, scaling_factor, pathcache, statusbar, session):
""" Initialize the config and add to global constant """
global _CONFIG # pylint: disable=global-statement
if _CONFIG is not None:
return
logger.debug("Initializing config: (cli_opts: %s, tk_vars: %s, pathcache: %s, statusbar: %s, "
"session: %s)", cli_opts, scaling_factor, pathcache, statusbar, session)
_CONFIG = Config(cli_opts, scaling_factor, pathcache, statusbar, session)
def get_config():
""" return the _CONFIG constant """
return _CONFIG
def initialize_images(pathcache=None):
""" Initialize the images and add to global constant """
global _IMAGES # pylint: disable=global-statement
if _IMAGES is not None:
return
logger.debug("Initializing images")
_IMAGES = Images(pathcache)
def get_images():
""" return the _CONFIG constant """
return _IMAGES
def set_slider_rounding(value, var, d_type, round_to, min_max):
""" Set the underlying variable to correct number based on slider rounding """
if d_type == float:
var.set(round(float(value), round_to))
else:
steps = range(min_max[0], min_max[1] + round_to, round_to)
value = min(steps, key=lambda x: abs(x - int(float(value))))
var.set(value)
def adjust_wraplength(event):
""" dynamically adjust the wraplength of a label on event """
label = event.widget
label.configure(wraplength=event.width - 1)
class FileHandler():
""" Raise a filedialog box and capture input """
def __init__(self, handletype, filetype, command=None, action=None,
variable=None):
logger.debug("Initializing %s: (Handletype: '%s', filetype: '%s', command: '%s', action: "
"'%s', variable: %s)", self.__class__.__name__, handletype, filetype, command,
action, variable)
self.handletype = handletype
self.contexts = {
"effmpeg": {
"input": {"extract": "filename",
"gen-vid": "dir",
"get-fps": "filename",
"get-info": "filename",
"mux-audio": "filename",
"rescale": "filename",
"rotate": "filename",
"slice": "filename"},
"output": {"extract": "dir",
"gen-vid": "savefilename",
"get-fps": "nothing",
"get-info": "nothing",
"mux-audio": "savefilename",
"rescale": "savefilename",
"rotate": "savefilename",
"slice": "savefilename"}
}
}
self.defaults = self.set_defaults()
self.kwargs = self.set_kwargs(filetype, command, action, variable)
self.retfile = getattr(self, self.handletype.lower())()
logger.debug("Initialized %s", self.__class__.__name__)
@property
def filetypes(self):
""" Set the filetypes for opening/saving """
all_files = ("All files", "*.*")
filetypes = {"default": (all_files,),
"alignments": [("JSON", "*.json"),
("Pickle", "*.p"),
("YAML", "*.yaml" "*.yml"), # pylint: disable=W1403
all_files],
"config": [("Faceswap config files", "*.fsw"), all_files],
"csv": [("Comma separated values", "*.csv"), all_files],
"image": [("Bitmap", "*.bmp"),
("JPG", "*.jpeg" "*.jpg"), # pylint: disable=W1403
("PNG", "*.png"),
("TIFF", "*.tif" "*.tiff"), # pylint: disable=W1403
all_files],
"state": [("State files", "*.json"), all_files],
"log": [("Log files", "*.log"), all_files],
"video": [("Audio Video Interleave", "*.avi"),
("Flash Video", "*.flv"),
("Matroska", "*.mkv"),
("MOV", "*.mov"),
("MP4", "*.mp4"),
("MPEG", "*.mpeg"),
("WebM", "*.webm"),
all_files]}
# Add in multi-select options
for key, val in filetypes.items():
if len(val) < 3:
continue
multi = ["{} Files".format(key.title())]
multi.append(" ".join([ftype[1] for ftype in val if ftype[0] != "All files"]))
val.insert(0, tuple(multi))
return filetypes
def set_defaults(self):
""" Set the default filetype to be first in list of filetypes,
or set a custom filetype if the first is not correct """
defaults = {key: val[0][1].replace("*", "")
for key, val in self.filetypes.items()}
defaults["default"] = None
defaults["video"] = ".mp4"
defaults["image"] = ".png"
logger.debug(defaults)
return defaults
def set_kwargs(self, filetype, command, action, variable=None):
""" Generate the required kwargs for the requested browser """
logger.debug("Setting Kwargs: (filetype: '%s', command: '%s': action: '%s', "
"variable: '%s')", filetype, command, action, variable)
kwargs = dict()
if self.handletype.lower() == "context":
self.set_context_handletype(command, action, variable)
if self.handletype.lower() in (
"open", "save", "filename", "filename_multi", "savefilename"):
kwargs["filetypes"] = self.filetypes[filetype]
if self.defaults.get(filetype, None):
kwargs['defaultextension'] = self.defaults[filetype]
if self.handletype.lower() == "save":
kwargs["mode"] = "w"
if self.handletype.lower() == "open":
kwargs["mode"] = "r"
logger.debug("Set Kwargs: %s", kwargs)
return kwargs
def set_context_handletype(self, command, action, variable):
""" Choose the correct file browser action based on context """
if self.contexts[command].get(variable, None) is not None:
handletype = self.contexts[command][variable][action]
else:
handletype = self.contexts[command][action]
logger.debug(handletype)
self.handletype = handletype
def open(self):
""" Open a file """
logger.debug("Popping Open browser")
return filedialog.askopenfile(**self.kwargs)
def save(self):
""" Save a file """
logger.debug("Popping Save browser")
return filedialog.asksaveasfile(**self.kwargs)
def dir(self):
""" Get a directory location """
logger.debug("Popping Dir browser")
return filedialog.askdirectory(**self.kwargs)
def savedir(self):
""" Get a save dir location """
logger.debug("Popping SaveDir browser")
return filedialog.askdirectory(**self.kwargs)
def filename(self):
""" Get an existing file location """
logger.debug("Popping Filename browser")
return filedialog.askopenfilename(**self.kwargs)
def filename_multi(self):
""" Get multiple existing file locations """
logger.debug("Popping Filename browser")
return filedialog.askopenfilenames(**self.kwargs)
def savefilename(self):
""" Get a save file location """
logger.debug("Popping SaveFilename browser")
return filedialog.asksaveasfilename(**self.kwargs)
@staticmethod
def nothing(): # pylint: disable=useless-return
""" Method that does nothing, used for disabling open/save pop up """
logger.debug("Popping Nothing browser")
return
class Images():
""" Holds locations of images and actual images
Don't call directly. Call get_images()
"""
def __init__(self, pathcache=None):
logger.debug("Initializing %s", self.__class__.__name__)
pathcache = get_config().pathcache if pathcache is None else pathcache
self.pathicons = os.path.join(pathcache, "icons")
self.pathpreview = os.path.join(pathcache, "preview")
self.pathoutput = None
self.previewoutput = None
self.previewtrain = dict()
self.errcount = 0
self.icons = dict()
self.icons["folder"] = ImageTk.PhotoImage(file=os.path.join(
self.pathicons, "open_folder.png"))
self.icons["load"] = ImageTk.PhotoImage(file=os.path.join(
self.pathicons, "open_file.png"))
self.icons["load_multi"] = ImageTk.PhotoImage(file=os.path.join(
self.pathicons, "open_file.png"))
self.icons["context"] = ImageTk.PhotoImage(file=os.path.join(
self.pathicons, "open_file.png"))
self.icons["save"] = ImageTk.PhotoImage(file=os.path.join(self.pathicons, "save.png"))
self.icons["reset"] = ImageTk.PhotoImage(file=os.path.join(self.pathicons, "reset.png"))
self.icons["clear"] = ImageTk.PhotoImage(file=os.path.join(self.pathicons, "clear.png"))
self.icons["graph"] = ImageTk.PhotoImage(file=os.path.join(self.pathicons, "graph.png"))
self.icons["zoom"] = ImageTk.PhotoImage(file=os.path.join(self.pathicons, "zoom.png"))
self.icons["move"] = ImageTk.PhotoImage(file=os.path.join(self.pathicons, "move.png"))
self.icons["favicon"] = ImageTk.PhotoImage(file=os.path.join(self.pathicons, "logo.png"))
logger.debug("Initialized %s: (icons: %s)", self.__class__.__name__, self.icons)
def delete_preview(self):
""" Delete the preview files """
logger.debug("Deleting previews")
for item in os.listdir(self.pathpreview):
if item.startswith(".gui_training_preview") and item.endswith(".jpg"):
fullitem = os.path.join(self.pathpreview, item)
logger.debug("Deleting: '%s'", fullitem)
os.remove(fullitem)
self.clear_image_cache()
def clear_image_cache(self):
""" Clear all cached images """
logger.debug("Clearing image cache")
self.pathoutput = None
self.previewoutput = None
self.previewtrain = dict()
@staticmethod
def get_images(imgpath):
""" Get the images stored within the given directory """
logger.trace("Getting images: '%s'", imgpath)
if not os.path.isdir(imgpath):
logger.debug("Folder does not exist")
return None
files = [os.path.join(imgpath, f)
for f in os.listdir(imgpath) if f.endswith((".png", ".jpg"))]
logger.trace("Image files: %s", files)
return files
def load_latest_preview(self):
""" Load the latest preview image for extract and convert """
logger.trace("Loading preview image")
imagefiles = self.get_images(self.pathoutput)
if not imagefiles or len(imagefiles) == 1:
logger.debug("No preview to display")
self.previewoutput = None
return
# Get penultimate file so we don't accidentally
# load a file that is being saved
show_file = sorted(imagefiles, key=os.path.getctime)[-2]
img = Image.open(show_file)
img.thumbnail((768, 432))
logger.trace("Displaying preview: '%s'", show_file)
self.previewoutput = (img, ImageTk.PhotoImage(img))
def load_training_preview(self):
""" Load the training preview images """
logger.trace("Loading Training preview images")
imagefiles = self.get_images(self.pathpreview)
modified = None
if not imagefiles:
logger.debug("No preview to display")
self.previewtrain = dict()
return
for img in imagefiles:
modified = os.path.getmtime(img) if modified is None else modified
name = os.path.basename(img)
name = os.path.splitext(name)[0]
name = name[name.rfind("_") + 1:].title()
try:
logger.trace("Displaying preview: '%s'", img)
size = self.get_current_size(name)
self.previewtrain[name] = [Image.open(img), None, modified]
self.resize_image(name, size)
self.errcount = 0
except ValueError:
# This is probably an error reading the file whilst it's
# being saved so ignore it for now and only pick up if
# there have been multiple consecutive fails
logger.warning("Unable to display preview: (image: '%s', attempt: %s)",
img, self.errcount)
if self.errcount < 10:
self.errcount += 1
else:
logger.error("Error reading the preview file for '%s'", img)
print("Error reading the preview file for {}".format(name))
self.previewtrain[name] = None
def get_current_size(self, name):
""" Return the size of the currently displayed image """
logger.trace("Getting size: '%s'", name)
if not self.previewtrain.get(name, None):
return None
img = self.previewtrain[name][1]
if not img:
return None
logger.trace("Got size: (name: '%s', width: '%s', height: '%s')",
name, img.width(), img.height())
return img.width(), img.height()
def resize_image(self, name, framesize):
""" Resize the training preview image
based on the passed in frame size """
logger.trace("Resizing image: (name: '%s', framesize: %s", name, framesize)
displayimg = self.previewtrain[name][0]
if framesize:
frameratio = float(framesize[0]) / float(framesize[1])
imgratio = float(displayimg.size[0]) / float(displayimg.size[1])
if frameratio <= imgratio:
scale = framesize[0] / float(displayimg.size[0])
size = (framesize[0], int(displayimg.size[1] * scale))
else:
scale = framesize[1] / float(displayimg.size[1])
size = (int(displayimg.size[0] * scale), framesize[1])
logger.trace("Scaling: (scale: %s, size: %s", scale, size)
# Hacky fix to force a reload if it happens to find corrupted
# data, probably due to reading the image whilst it is partially
# saved. If it continues to fail, then eventually raise.
for i in range(0, 1000):
try:
displayimg = displayimg.resize(size, Image.ANTIALIAS)
except OSError:
if i == 999:
raise
continue
break
self.previewtrain[name][1] = ImageTk.PhotoImage(displayimg)
class ContextMenu(tk.Menu): # pylint: disable=too-many-ancestors
""" Pop up menu """
def __init__(self, widget):
logger.debug("Initializing %s: (widget_class: '%s')",
self.__class__.__name__, widget.winfo_class())
super().__init__(tearoff=0)
self.widget = widget
self.standard_actions()
logger.debug("Initialized %s", self.__class__.__name__)
def standard_actions(self):
""" Standard menu actions """
self.add_command(label="Cut", command=lambda: self.widget.event_generate("<<Cut>>"))
self.add_command(label="Copy", command=lambda: self.widget.event_generate("<<Copy>>"))
self.add_command(label="Paste", command=lambda: self.widget.event_generate("<<Paste>>"))
self.add_separator()
self.add_command(label="Select all", command=self.select_all)
def cm_bind(self):
""" Bind the menu to the widget's Right Click event """
button = "<Button-2>" if platform.system() == "Darwin" else "<Button-3>"
logger.debug("Binding '%s' to '%s'", button, self.widget.winfo_class())
scaling_factor = get_config().scaling_factor if get_config() is not None else 1.0
x_offset = int(34 * scaling_factor)
self.widget.bind(button,
lambda event: self.tk_popup(event.x_root + x_offset, event.y_root, 0))
def select_all(self):
""" Select all for Text or Entry widgets """
logger.debug("Selecting all for '%s'", self.widget.winfo_class())
if self.widget.winfo_class() == "Text":
self.widget.focus_force()
self.widget.tag_add("sel", "1.0", "end")
else:
self.widget.focus_force()
self.widget.select_range(0, tk.END)
class ConsoleOut(ttk.Frame): # pylint: disable=too-many-ancestors
""" The Console out section of the GUI """
def __init__(self, parent, debug):
logger.debug("Initializing %s: (parent: %s, debug: %s)",
self.__class__.__name__, parent, debug)
ttk.Frame.__init__(self, parent)
self.pack(side=tk.TOP, anchor=tk.W, padx=10, pady=(2, 0),
fill=tk.BOTH, expand=True)
self.console = tk.Text(self)
rc_menu = ContextMenu(self.console)
rc_menu.cm_bind()
self.console_clear = get_config().tk_vars['consoleclear']
self.set_console_clear_var_trace()
self.debug = debug
self.build_console()
logger.debug("Initialized %s", self.__class__.__name__)
def set_console_clear_var_trace(self):
""" Set the trigger actions for the clear console var
when it has been triggered from elsewhere """
logger.debug("Set clear trace")
self.console_clear.trace("w", self.clear)
def build_console(self):
""" Build and place the console """
logger.debug("Build console")
self.console.config(width=100, height=6, bg="gray90", fg="black")
self.console.pack(side=tk.LEFT, anchor=tk.N, fill=tk.BOTH, expand=True)
scrollbar = ttk.Scrollbar(self, command=self.console.yview)
scrollbar.pack(side=tk.LEFT, fill="y")
self.console.configure(yscrollcommand=scrollbar.set)
self.redirect_console()
logger.debug("Built console")
def redirect_console(self):
""" Redirect stdout/stderr to console frame """
logger.debug("Redirect console")
if self.debug:
logger.info("Console debug activated. Outputting to main terminal")
else:
sys.stdout = SysOutRouter(console=self.console, out_type="stdout")
sys.stderr = SysOutRouter(console=self.console, out_type="stderr")
logger.debug("Redirected console")
def clear(self, *args): # pylint: disable=unused-argument
""" Clear the console output screen """
logger.debug("Clear console")
if not self.console_clear.get():
logger.debug("Console not set for clearing. Skipping")
return
self.console.delete(1.0, tk.END)
self.console_clear.set(False)
logger.debug("Cleared console")
class SysOutRouter():
""" Route stdout/stderr to the console window """
def __init__(self, console=None, out_type=None):
logger.debug("Initializing %s: (console: %s, out_type: '%s')",
self.__class__.__name__, console, out_type)
self.console = console
self.out_type = out_type
self.color = ("black" if out_type == "stdout" else "red")
logger.debug("Initialized %s", self.__class__.__name__)
def write(self, string):
""" Capture stdout/stderr """
self.console.insert(tk.END, string, self.out_type)
self.console.tag_config(self.out_type, foreground=self.color)
self.console.see(tk.END)
@staticmethod
def flush():
""" If flush is forced, send it to normal terminal """
sys.__stdout__.flush()
class Config():
""" Global configuration settings
Don't call directly. Call get_config()
"""
def __init__(self, cli_opts, scaling_factor, pathcache, statusbar, session):
logger.debug("Initializing %s: (cli_opts: %s, scaling_factor: %s, pathcache: %s, "
"statusbar: %s, session: %s)", self.__class__.__name__, cli_opts,
scaling_factor, pathcache, statusbar, session)
self.cli_opts = cli_opts
self.scaling_factor = scaling_factor
self.pathcache = pathcache
self.statusbar = statusbar
self.serializer = JSONSerializer
self.tk_vars = self.set_tk_vars()
self.command_notebook = None # set in command.py
self.session = session
logger.debug("Initialized %s", self.__class__.__name__)
@property
def command_tabs(self):
""" Return dict of command tab titles with their IDs """
return {self.command_notebook.tab(tab_id, "text").lower(): tab_id
for tab_id in range(0, self.command_notebook.index("end"))}
@staticmethod
def set_tk_vars():
""" TK Variables to be triggered by to indicate
what state various parts of the GUI should be in """
display = tk.StringVar()
display.set(None)
runningtask = tk.BooleanVar()
runningtask.set(False)
actioncommand = tk.StringVar()
actioncommand.set(None)
generatecommand = tk.StringVar()
generatecommand.set(None)
consoleclear = tk.BooleanVar()
consoleclear.set(False)
refreshgraph = tk.BooleanVar()
refreshgraph.set(False)
updatepreview = tk.BooleanVar()
updatepreview.set(False)
traintimeout = tk.IntVar()
traintimeout.set(120)
tk_vars = {"display": display,
"runningtask": runningtask,
"action": actioncommand,
"generate": generatecommand,
"consoleclear": consoleclear,
"refreshgraph": refreshgraph,
"updatepreview": updatepreview,
"traintimeout": traintimeout}
logger.debug(tk_vars)
return tk_vars
def load(self, command=None, filename=None):
""" Pop up load dialog for a saved config file """
logger.debug("Loading config: (command: '%s')", command)
if filename:
with open(filename, "r") as cfgfile:
cfg = self.serializer.unmarshal(cfgfile.read())
else:
cfgfile = FileHandler("open", "config").retfile
if not cfgfile:
return
cfg = self.serializer.unmarshal(cfgfile.read())
if not command and len(cfg.keys()) == 1:
command = list(cfg.keys())[0]
opts = self.get_command_options(cfg, command) if command else cfg
if not opts:
return
for cmd, opts in opts.items():
self.set_command_args(cmd, opts)
if command:
self.command_notebook.select(self.command_tabs[command])
self.add_to_recent(cfgfile.name, command)
logger.debug("Loaded config: (command: '%s', cfgfile: '%s')", command, cfgfile)
def get_command_options(self, cfg, command):
""" return the saved options for the requested
command, if not loading global options """
opts = cfg.get(command, None)
retval = {command: opts}
if not opts:
self.tk_vars["consoleclear"].set(True)
print("No {} section found in file".format(command))
logger.info("No %s section found in file", command)
retval = None
logger.debug(retval)
return retval
def set_command_args(self, command, options):
""" Pass the saved config items back to the CliOptions """
if not options:
return
for srcopt, srcval in options.items():
optvar = self.cli_opts.get_one_option_variable(command, srcopt)
if not optvar:
continue
optvar.set(srcval)
def save(self, command=None):
""" Save the current GUI state to a config file in json format """
logger.debug("Saving config: (command: '%s')", command)
cfgfile = FileHandler("save", "config").retfile
if not cfgfile:
return
cfg = self.cli_opts.get_option_values(command)
cfgfile.write(self.serializer.marshal(cfg))
cfgfile.close()
self.add_to_recent(cfgfile.name, command)
logger.debug("Saved config: (command: '%s', cfgfile: '%s')", command, cfgfile)
def add_to_recent(self, filename, command):
""" Add to recent files """
recent_filename = os.path.join(self.pathcache, ".recent.json")
logger.debug("Adding to recent files '%s': (%s, %s)", recent_filename, filename, command)
if not os.path.exists(recent_filename) or os.path.getsize(recent_filename) == 0:
recent_files = list()
else:
with open(recent_filename, "rb") as inp:
recent_files = self.serializer.unmarshal(inp.read().decode("utf-8"))
logger.debug("Initial recent files: %s", recent_files)
filenames = [recent[0] for recent in recent_files]
if filename in filenames:
idx = filenames.index(filename)
del recent_files[idx]
recent_files.insert(0, (filename, command))
recent_files = recent_files[:20]
logger.debug("Final recent files: %s", recent_files)
recent_json = self.serializer.marshal(recent_files)
with open(recent_filename, "wb") as out:
out.write(recent_json.encode("utf-8"))
class ControlBuilder():
# TODO Expand out for cli options
"""
Builds and returns a frame containing a tkinter control with label
Currently only setup for config items
Parameters
----------
parent: tkinter object
Parent tkinter object
title: str
Title of the control. Will be used for label text
dtype: datatype object
Datatype of the control
default: str
Default value for the control
selected_value: str, optional
Selected value for the control. If None, default will be used
choices: list or tuple, object
Used for combo boxes and radio control option setting
is_radio: bool, optional
Specifies to use a Radio control instead of combobox if choices are passed
rounding: int or float, optional
For slider controls. Sets the stepping
min_max: int or float, optional
For slider controls. Sets the min and max values
helptext: str, optional
Sets the tooltip text
radio_columns: int, optional
Sets the number of columns to use for grouping radio buttons
label_width: int, optional
Sets the width of the control label. Defaults to 20
control_width: int, optional
Sets the width of the control. Default is to auto expand
"""
def __init__(self, parent, title, dtype, default,
selected_value=None, choices=None, is_radio=False, rounding=None,
min_max=None, helptext=None, radio_columns=3, label_width=20, control_width=None):
logger.debug("Initializing %s: (parent: %s, title: %s, dtype: %s, default: %s, "
"selected_value: %s, choices: %s, is_radio: %s, rounding: %s, min_max: %s, "
"helptext: %s, radio_columns: %s, label_width: %s, control_width: %s)",
self.__class__.__name__, parent, title, dtype, default, selected_value,
choices, is_radio, rounding, min_max, helptext, radio_columns, label_width,
control_width)
self.title = title
self.default = default
self.frame = self.control_frame(parent, helptext)
self.control = self.set_control(dtype, choices, is_radio)
self.tk_var = self.set_tk_var(dtype, selected_value)
self.build_control(choices,
dtype,
rounding,
min_max,
radio_columns,
label_width,
control_width)
logger.debug("Initialized: %s", self.__class__.__name__)
# Frame, control type and varable
def control_frame(self, parent, helptext):
""" Frame to hold control and it's label """
logger.debug("Build control frame")
frame = ttk.Frame(parent)
frame.pack(side=tk.TOP, fill=tk.X)
if helptext is not None:
helptext = self.format_helptext(helptext)
Tooltip(frame, text=helptext, wraplength=720)
logger.debug("Built control frame")
return frame
def format_helptext(self, helptext):
""" Format the help text for tooltips """
logger.debug("Format control help: '%s'", self.title)
helptext = helptext.replace("\n\t", "\n - ").replace("%%", "%")
helptext = self.title + " - " + helptext
logger.debug("Formatted control help: (title: '%s', help: '%s'", self.title, helptext)
return helptext
def set_control(self, dtype, choices, is_radio):
""" Set the correct control type based on the datatype or for this option """
if choices and is_radio:
control = ttk.Radiobutton
elif choices:
control = ttk.Combobox
elif dtype == bool:
control = ttk.Checkbutton
elif dtype in (int, float):
control = ttk.Scale
else:
control = ttk.Entry
logger.debug("Setting control '%s' to %s", self.title, control)
return control
def set_tk_var(self, dtype, selected_value):
""" Correct variable type for control """
logger.debug("Setting tk variable: (title: '%s', dtype: %s, selected_value: %s)",
self.title, dtype, selected_value)
if dtype == bool:
var = tk.BooleanVar
elif dtype == int:
var = tk.IntVar
elif dtype == float:
var = tk.DoubleVar
else:
var = tk.StringVar
var = var(self.frame)
val = self.default if selected_value is None else selected_value
var.set(val)
logger.debug("Set tk variable: (title: '%s', type: %s, value: '%s')",
self.title, type(var), val)
return var
# Build the full control
def build_control(self, choices, dtype, rounding, min_max, radio_columns,
label_width, control_width):
""" Build the correct control type for the option passed through """
logger.debug("Build confog option control")
self.build_control_label(label_width)
self.build_one_control(choices, dtype, rounding, min_max, radio_columns, control_width)
logger.debug("Built option control")
def build_control_label(self, label_width):
""" Label for control """
logger.debug("Build control label: (title: '%s', label_width: %s)",
self.title, label_width)
title = self.title.replace("_", " ").title()
lbl = ttk.Label(self.frame, text=title, width=label_width, anchor=tk.W)
lbl.pack(padx=5, pady=5, side=tk.LEFT, anchor=tk.N)
logger.debug("Built control label: '%s'", self.title)
def build_one_control(self, choices, dtype, rounding, min_max, radio_columns, control_width):
""" Build and place the option controls """
logger.debug("Build control: (title: '%s', control: %s, choices: %s, dtype: %s, "
"rounding: %s, min_max: %s: radio_columns: %s, control_width: %s)",
self.title, self.control, choices, dtype, rounding, min_max, radio_columns,
control_width)
if self.control == ttk.Scale:
ctl = self.slider_control(dtype, rounding, min_max)
elif self.control == ttk.Radiobutton:
ctl = self.radio_control(choices, radio_columns)
else:
ctl = self.control_to_optionsframe(choices)
self.set_control_width(ctl, control_width)
ctl.pack(padx=5, pady=5, fill=tk.X, expand=True)
logger.debug("Built control: '%s'", self.title)
@staticmethod
def set_control_width(ctl, control_width):
""" Set the control width if required """
if control_width is not None:
ctl.config(width=control_width)
def radio_control(self, choices, columns):
""" Create a group of radio buttons """
logger.debug("Adding radio group: %s", self.title)
ctl = ttk.Frame(self.frame)
frames = list()
for _ in range(columns):
frame = ttk.Frame(ctl)
frame.pack(padx=5, pady=5, fill=tk.X, expand=True, side=tk.LEFT, anchor=tk.N)
frames.append(frame)
for idx, choice in enumerate(choices):
frame_id = idx % columns
radio = ttk.Radiobutton(frames[frame_id],
text=choice.title(),
value=choice,
variable=self.tk_var)
radio.pack(anchor=tk.W)
logger.debug("Adding radio option %s to column %s", choice, frame_id)
logger.debug("Added radio group: '%s'", self.title)
return ctl
def slider_control(self, dtype, rounding, min_max):
""" A slider control with corresponding Entry box """
logger.debug("Add slider control to Options Frame: (title: '%s', dtype: %s, rounding: %s, "
"min_max: %s)", self.title, dtype, rounding, min_max)
tbox = ttk.Entry(self.frame, width=8, textvariable=self.tk_var, justify=tk.RIGHT)
tbox.pack(padx=(0, 5), side=tk.RIGHT)
ctl = self.control(
self.frame,
variable=self.tk_var,
command=lambda val, var=self.tk_var, dt=dtype, rn=rounding, mm=min_max:
set_slider_rounding(val, var, dt, rn, mm))
rc_menu = ContextMenu(tbox)
rc_menu.cm_bind()
ctl["from_"] = min_max[0]
ctl["to"] = min_max[1]
logger.debug("Added slider control to Options Frame: %s", self.title)
return ctl
def control_to_optionsframe(self, choices):
""" Standard non-check buttons sit in the main options frame """
logger.debug("Add control to Options Frame: (title: '%s', control: %s, choices: %s)",
self.title, self.control, choices)
if self.control == ttk.Checkbutton:
ctl = self.control(self.frame, variable=self.tk_var, text=None)
else:
ctl = self.control(self.frame, textvariable=self.tk_var)
rc_menu = ContextMenu(ctl)
rc_menu.cm_bind()
if choices:
logger.debug("Adding combo choices: %s", choices)
ctl["values"] = [choice for choice in choices]
logger.debug("Added control to Options Frame: %s", self.title)
return ctl
| 41.971765
| 99
| 0.584427
|
8a94b65c8e7b0376f015cf470ac530673656f01f
| 4,086
|
py
|
Python
|
large_image/cache_util/cachefactory.py
|
juliejsanchez/large_image
|
ae3cd1e605d9ab32a6401325d1618e7ee857acc6
|
[
"Apache-2.0"
] | null | null | null |
large_image/cache_util/cachefactory.py
|
juliejsanchez/large_image
|
ae3cd1e605d9ab32a6401325d1618e7ee857acc6
|
[
"Apache-2.0"
] | null | null | null |
large_image/cache_util/cachefactory.py
|
juliejsanchez/large_image
|
ae3cd1e605d9ab32a6401325d1618e7ee857acc6
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#############################################################################
import threading
import math
try:
import psutil
except ImportError:
psutil = None
from cachetools import LRUCache
from .. import config
try:
from .memcache import MemCache
except ImportError:
MemCache = None
def pickAvailableCache(sizeEach, portion=8, maxItems=None):
"""
Given an estimated size of an item, return how many of those items would
fit in a fixed portion of the available virtual memory.
:param sizeEach: the expected size of an item that could be cached.
:param portion: the inverse fraction of the memory which can be used.
:param maxItems: if specified, the number of items is never more than this
value.
:return: the number of items that should be cached. Always at least two,
unless maxItems is less.
"""
# Estimate usage based on (1 / portion) of the total virtual memory.
if psutil:
memory = psutil.virtual_memory().total
else:
memory = 1024 ** 3
numItems = max(int(math.floor(memory / portion / sizeEach)), 2)
if maxItems:
numItems = min(numItems, maxItems)
return numItems
class CacheFactory(object):
logged = False
def getCacheSize(self, numItems):
if numItems is None:
defaultPortion = 32
try:
portion = int(config.getConfig('cache_python_memory_portion', defaultPortion))
if portion < 3:
portion = 3
except ValueError:
portion = defaultPortion
numItems = pickAvailableCache(256**2 * 4 * 2, portion)
return numItems
def getCache(self, numItems=None):
# memcached is the fallback default, if available.
cacheBackend = config.getConfig('cache_backend', 'python')
if cacheBackend:
cacheBackend = str(cacheBackend).lower()
cache = None
if cacheBackend == 'memcached' and MemCache and numItems is None:
# lock needed because pylibmc(memcached client) is not threadsafe
cacheLock = threading.Lock()
# check if credentials and location exist, otherwise assume
# location is 127.0.0.1 (localhost) with no password
url = config.getConfig('cache_memcached_url')
if not url:
url = '127.0.0.1'
memcachedUsername = config.getConfig('cache_memcached_username')
if not memcachedUsername:
memcachedUsername = None
memcachedPassword = config.getConfig('cache_memcached_password')
if not memcachedPassword:
memcachedPassword = None
try:
cache = MemCache(url, memcachedUsername, memcachedPassword,
mustBeAvailable=True)
except Exception:
config.getConfig('logger').info('Cannot use memcached for caching.')
cache = None
if cache is None: # fallback backend
cacheBackend = 'python'
cache = LRUCache(self.getCacheSize(numItems))
cacheLock = threading.Lock()
if numItems is None and not CacheFactory.logged:
config.getConfig('logprint').info('Using %s for large_image caching' % cacheBackend)
CacheFactory.logged = True
return cache, cacheLock
| 37.833333
| 96
| 0.61723
|
ecefe43cec65d278a8d5dec872724c2164393136
| 4,970
|
py
|
Python
|
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/GLES2/KHR/robustness.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/GLES2/KHR/robustness.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/GLES2/KHR/robustness.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
'''OpenGL extension KHR.robustness
This module customises the behaviour of the
OpenGL.raw.GLES2.KHR.robustness to provide a more
Python-friendly API
Overview (from the spec)
Several recent trends in how OpenGL ES integrates into modern computer
systems have created new requirements for robustness and security for GL
rendering contexts.
Additionally GPU architectures now support hardware fault detection;
for example, video memory supporting ECC (error correcting codes)
and error detection. GL contexts should be capable of recovering
from hardware faults such as uncorrectable memory errors. Along with
recovery from such hardware faults, the recovery mechanism can
also allow recovery from video memory access exceptions and system
software failures. System software failures can be due to device
changes or driver failures.
GL queries that return (write) some number of bytes to a
buffer indicated by a pointer parameter introduce risk of buffer
overflows that might be exploitable by malware. To address this,
queries with return value sizes that are not expressed directly by
the parameters to the query itself are given additional API
functions with an additional parameter that specifies the number of
bytes in the buffer and never writing bytes beyond that limit. This
is particularly useful for multi-threaded usage of GL contexts
in a "share group" where one context can change objects in ways that
can cause buffer overflows for another context's GL queries.
The original ARB_vertex_buffer_object extension includes an issue
that explicitly states program termination is allowed when
out-of-bounds vertex buffer object fetches occur. Modern graphics
hardware is capable of well-defined behavior in the case of out-of-
bounds vertex buffer object fetches. Older hardware may require
extra checks to enforce well-defined (and termination free)
behavior, but this expense is warranted when processing potentially
untrusted content.
The intent of this extension is to address some specific robustness
goals:
* For all existing GL queries, provide additional "safe" APIs
that limit data written to user pointers to a buffer size in
bytes that is an explicit additional parameter of the query.
* Provide a mechanism for a GL application to learn about
graphics resets that affect the context. When a graphics reset
occurs, the GL context becomes unusable and the application
must create a new context to continue operation. Detecting a
graphics reset happens through an inexpensive query.
* Define behavior of OpenGL calls made after a graphics reset.
* Provide an enable to guarantee that out-of-bounds buffer object
accesses by the GPU will have deterministic behavior and preclude
application instability or termination due to an incorrect buffer
access. Such accesses include vertex buffer fetches of
attributes and indices, and indexed reads of uniforms or
parameters from buffers.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/KHR/robustness.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GLES2 import _types, _glgets
from OpenGL.raw.GLES2.KHR.robustness import *
from OpenGL.raw.GLES2.KHR.robustness import _EXTENSION_NAME
def glInitRobustnessKHR():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
# INPUT glReadnPixels.data size not checked against bufSize
glReadnPixels=wrapper.wrapper(glReadnPixels).setInputArraySize(
'data', None
)
# INPUT glGetnUniformfv.params size not checked against bufSize
glGetnUniformfv=wrapper.wrapper(glGetnUniformfv).setInputArraySize(
'params', None
)
# INPUT glGetnUniformiv.params size not checked against bufSize
glGetnUniformiv=wrapper.wrapper(glGetnUniformiv).setInputArraySize(
'params', None
)
# INPUT glGetnUniformuiv.params size not checked against bufSize
glGetnUniformuiv=wrapper.wrapper(glGetnUniformuiv).setInputArraySize(
'params', None
)
# INPUT glReadnPixelsKHR.data size not checked against bufSize
glReadnPixelsKHR=wrapper.wrapper(glReadnPixelsKHR).setInputArraySize(
'data', None
)
# INPUT glGetnUniformfvKHR.params size not checked against bufSize
glGetnUniformfvKHR=wrapper.wrapper(glGetnUniformfvKHR).setInputArraySize(
'params', None
)
# INPUT glGetnUniformivKHR.params size not checked against bufSize
glGetnUniformivKHR=wrapper.wrapper(glGetnUniformivKHR).setInputArraySize(
'params', None
)
# INPUT glGetnUniformuivKHR.params size not checked against bufSize
glGetnUniformuivKHR=wrapper.wrapper(glGetnUniformuivKHR).setInputArraySize(
'params', None
)
### END AUTOGENERATED SECTION
| 44.774775
| 76
| 0.784105
|
3e894f9786ff0bfdbaba587bcccad2a36dfaf16b
| 7,672
|
py
|
Python
|
JumpscaleCore/servers/tmux/Tmux.py
|
gneumann333/jumpscaleX_core
|
777d249fa3668c6e802c2f765f4b82fb39c3e5fa
|
[
"Apache-2.0"
] | null | null | null |
JumpscaleCore/servers/tmux/Tmux.py
|
gneumann333/jumpscaleX_core
|
777d249fa3668c6e802c2f765f4b82fb39c3e5fa
|
[
"Apache-2.0"
] | null | null | null |
JumpscaleCore/servers/tmux/Tmux.py
|
gneumann333/jumpscaleX_core
|
777d249fa3668c6e802c2f765f4b82fb39c3e5fa
|
[
"Apache-2.0"
] | null | null | null |
from .Session import Session
from Jumpscale import j
import libtmux as tmuxp
import time
import psutil
JSBASE = j.baseclasses.object
skip = j.baseclasses.testtools._skip
class Tmux(j.baseclasses.object, j.baseclasses.testtools):
__jslocation__ = "j.servers.tmux"
def _init(self, **kwargs):
self._server = None
self._session = None
self._windows_active = {}
@property
def session(self):
"""
Always returns session `main`
:return:
"""
if self._session is None:
session = self.server.find_where({"session_name": "main"})
self._session = Session(session=session)
return self._session
def _find_procs_by_name(self, name, startswith_is_ok=True):
"Return a list of processes matching 'name'."
ls = []
for p in psutil.process_iter(attrs=["name"]):
# print(p.info['name'])
if p.info["name"] is None:
if p.status() == "zombie":
j.sal.process.kill(p.pid)
continue
if startswith_is_ok:
if p.info["name"].startswith(name):
ls.append(p)
else:
if p.info["name"] == name:
ls.append(p)
return ls
@property
def server(self):
def start():
cmd = "%s/bin/js_mux start" % j.core.myenv.config["DIR_BASE"]
j.sal.process.execute(cmd, die=True)
time.sleep(0.1)
if self._server is None:
self._server = tmuxp.Server()
if not self._server.has_session("main"):
start()
self._log_info("tmux server is running")
return self._server
def kill(self):
"""
kosmos 'j.servers.tmux.kill()'
"""
self.session.kill()
def pane_get(self, window="main", pane="main", reset=False):
w = self.window_get(window=window)
return w.pane_get(name=pane, killothers=False, reset=reset)
def window_get(self, window="main", reset=False):
s = self.session
return s.window_get(window, reset=reset)
def window_kill(self, window="main"):
s = self.session
w = s.window_get(window)
w.kill()
def execute(self, cmd, window="main", pane="main", reset=True):
"""
"""
p = self.pane_get(window=window, pane=pane, reset=reset)
p.execute(cmd)
return p
def cmd_get(
self,
name,
window_name=None,
pane_name="main",
cmd="",
path=None,
timeout=30,
env={},
ports=[],
cmd_stop=None,
process_strings=[],
):
"""
example
```
env={}
env["color"]="blue"
cmd = j.servers.tmux.cmd_get(name="test",pane="p21",cmd_start="ls /", env=env,stopcmd="killall...",process_strings=[])
cmd.stop()
cmd.start()
```
:param name: name of the command
:param window: window to use std multi, if None then use windowname as name
:param pane: pane in the window, make sure there is no overlap e.g. p11
:param cmd: command to execute in the pane
:param path: path where to execute
:param env: are the arguments wich will become env arguments, useful to pass variable to process
:param ports: array of ports this process will use
:param stopcmd: if specific command to use to stop a process
:param process_strings: which strings to check if the process is running
:return:
"""
if not window_name:
window_name = name
if not self.session.window_exists(window_name):
window = self.window_get(window=window_name, reset=True)
else:
window = self.window_get(window=window_name)
pane = window.pane_get(pane_name)
startup_cmd = j.servers.startupcmd.get(
name=name,
cmd_start=cmd,
path=path,
timeout=timeout,
env=env,
ports=ports,
cmd_stop=cmd_stop,
process_strings=process_strings,
)
startup_cmd._pane_ = pane
return startup_cmd
def panes_2x2_get(self, window_name="multi", reset=True):
"""
:param window_name:
:param reset:
:return: (p11,p12,p21,p22) are 4 panes
"""
window = self.window_get(window_name, reset=reset)
if len(window.panes) == 4 and reset is False:
p11 = window.pane_get(name="p11")
p12 = window.pane_get(name="p12")
p21 = window.pane_get(name="p21")
p22 = window.pane_get(name="p22")
else:
# xy notation
p11 = window.pane_get(name="p11", killothers=True)
p12 = p11.splitVertical("p12")
p21 = p11.splitHorizontal("p21")
p22 = p12.splitHorizontal("p22")
return p11, p12, p21, p22
def window_digitalme_get(self, window_name="digitalme", reset=True):
window = self.window_get(window_name, reset=reset)
if len(window.panes) == 6 and reset is False:
return window
else:
# xy notation
p11 = window.pane_get(name="p11", killothers=True)
p13 = p11.splitVertical("p13")
p21 = p11.splitHorizontal("p21")
p22 = p13.splitHorizontal("p22")
p12 = p11.splitVertical("p12")
p14 = p13.splitVertical("p14")
return window
def window_multi_get(self, window_name="multi", reset=False):
"""
kosmos 'j.servers.tmux.window_multi_get()'
:param window_name:
:param reset:
:return:
"""
window = self.window_get(window_name, reset=reset)
if len(window.panes) == 13 and reset is False:
return window
p11, p13, p31, p33 = self.panes_2x2_get(window_name, reset=reset)
p13.name_set("p13")
p31.name_set("p31")
p33.name_set("p33")
p12 = p11.splitVertical("p12")
p14 = p13.splitVertical("p14")
p21 = p11.splitHorizontal("p21")
p22 = p12.splitHorizontal("p22")
p23 = p13.splitHorizontal("p23")
p24 = p14.splitHorizontal("p24")
p41 = p31.splitHorizontal("p41")
p32 = p31.splitVertical("p32")
p42 = p41.splitVertical("p42")
return window
def test(self, name=""):
"""
kosmos 'j.servers.tmux.test()'
:return:
"""
j.builders.system.package.ensure("htop")
self.panes_2x2_get()
window = self.window_get("multi")
for pane in window.panes:
pane.execute("clear;echo %s" % pane.name)
p = self.execute("ls /", "multi", "p22")
assert p.process_obj.name() == "bash"
time.sleep(2)
p.process_obj_child
assert p.process_obj_child is None
p = self.execute("htop", "multi", "p22")
assert p.process_obj.is_running()
# assert p.process_obj.name()=="htop"
assert len(p.process_obj_children) == 1
assert p.process_obj.name() == "bash"
time.sleep(2)
p.process_obj_child
assert p.process_obj_child.name() == "htop"
assert p.process_obj.name() == "bash"
assert p.process_obj_child.name() == "htop"
p = self.execute("find /tmp", "test", "test")
res = p.out_get()
p = self.pane_get("test2", "test2", reset=True)
self._log_info("tests ok for tmux")
self._tests_run(name=name, die=True)
| 28.309963
| 126
| 0.557742
|
ed47371da84d739de137bb4329526f151cd3a293
| 8,512
|
py
|
Python
|
docs/conf.py
|
FRidh/python-geometry
|
62cb6210bcad3b1e4c1a7e0516ca17138793c1b3
|
[
"BSD-3-Clause"
] | 8
|
2015-12-16T04:39:12.000Z
|
2021-04-08T15:49:23.000Z
|
docs/conf.py
|
FRidh/python-geometry
|
62cb6210bcad3b1e4c1a7e0516ca17138793c1b3
|
[
"BSD-3-Clause"
] | 1
|
2015-08-07T15:03:02.000Z
|
2015-08-07T15:03:02.000Z
|
docs/conf.py
|
FRidh/python-geometry
|
62cb6210bcad3b1e4c1a7e0516ca17138793c1b3
|
[
"BSD-3-Clause"
] | 2
|
2015-03-23T02:03:04.000Z
|
2020-01-09T05:01:50.000Z
|
# -*- coding: utf-8 -*-
#
# python-geometry documentation build configuration file, created by
# sphinx-quickstart on Sat Apr 19 11:25:20 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'python-geometry'
copyright = u'2014, Frederik Rietdijk'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'python-geometrydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'python-geometry.tex', u'python-geometry Documentation',
u'Frederik Rietdijk', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'python-geometry', u'python-geometry Documentation',
[u'Frederik Rietdijk'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'python-geometry', u'python-geometry Documentation',
u'Frederik Rietdijk', 'python-geometry', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| 31.643123
| 79
| 0.718985
|
4e8c537a3dfb7649c1aa6fe48cfeaff1d1532a6c
| 28,710
|
py
|
Python
|
samtranslator/model/sam_resources.py
|
sliedig/serverless-application-model
|
ee7e6cd84cea349d7698b2a8d6695f554b5aa302
|
[
"Apache-2.0"
] | 2
|
2020-06-07T13:00:43.000Z
|
2021-11-29T22:29:52.000Z
|
samtranslator/model/sam_resources.py
|
davfaulk/serverless-application-model
|
6c963549c279868b6241e2abed907df74cb3d2b2
|
[
"Apache-2.0"
] | null | null | null |
samtranslator/model/sam_resources.py
|
davfaulk/serverless-application-model
|
6c963549c279868b6241e2abed907df74cb3d2b2
|
[
"Apache-2.0"
] | null | null | null |
""" SAM macro definitions """
from six import string_types
import samtranslator.model.eventsources
import samtranslator.model.eventsources.pull
import samtranslator.model.eventsources.push
import samtranslator.model.eventsources.cloudwatchlogs
from .api.api_generator import ApiGenerator
from .s3_utils.uri_parser import parse_s3_uri
from .tags.resource_tagging import get_tag_list
from samtranslator.model import (PropertyType, SamResourceMacro,
ResourceTypeResolver)
from samtranslator.model.apigateway import ApiGatewayDeployment, ApiGatewayStage
from samtranslator.model.dynamodb import DynamoDBTable
from samtranslator.model.exceptions import (InvalidEventException,
InvalidResourceException)
from samtranslator.model.function_policies import FunctionPolicies, PolicyTypes
from samtranslator.model.iam import IAMRole, IAMRolePolicies
from samtranslator.model.lambda_ import LambdaFunction, LambdaVersion, LambdaAlias
from samtranslator.model.types import dict_of, is_str, is_type, list_of, one_of, any_type
from samtranslator.translator import logical_id_generator
from samtranslator.translator.arn_generator import ArnGenerator
class SamFunction(SamResourceMacro):
"""SAM function macro.
"""
# Constants for Tagging
_SAM_KEY = "lambda:createdBy"
_SAM_VALUE = "SAM"
resource_type = 'AWS::Serverless::Function'
property_types = {
'FunctionName': PropertyType(False, one_of(is_str(), is_type(dict))),
'Handler': PropertyType(True, is_str()),
'Runtime': PropertyType(True, is_str()),
'CodeUri': PropertyType(False, one_of(is_str(), is_type(dict))),
'InlineCode': PropertyType(False, one_of(is_str(), is_type(dict))),
'DeadLetterQueue': PropertyType(False, is_type(dict)),
'Description': PropertyType(False, is_str()),
'MemorySize': PropertyType(False, is_type(int)),
'Timeout': PropertyType(False, is_type(int)),
'VpcConfig': PropertyType(False, is_type(dict)),
'Role': PropertyType(False, is_str()),
'Policies': PropertyType(False, one_of(is_str(), list_of(one_of(is_str(), is_type(dict), is_type(dict))))),
'Environment': PropertyType(False, dict_of(is_str(), is_type(dict))),
'Events': PropertyType(False, dict_of(is_str(), is_type(dict))),
'Tags': PropertyType(False, is_type(dict)),
'Tracing': PropertyType(False, one_of(is_type(dict), is_str())),
'KmsKeyArn': PropertyType(False, one_of(is_type(dict), is_str())),
'DeploymentPreference': PropertyType(False, is_type(dict)),
'ReservedConcurrentExecutions': PropertyType(False, any_type()),
# Intrinsic functions in value of Alias property are not supported, yet
'AutoPublishAlias': PropertyType(False, one_of(is_str()))
}
event_resolver = ResourceTypeResolver(samtranslator.model.eventsources, samtranslator.model.eventsources.pull,
samtranslator.model.eventsources.push,
samtranslator.model.eventsources.cloudwatchlogs)
# DeadLetterQueue
dead_letter_queue_policy_actions = {'SQS': 'sqs:SendMessage', 'SNS': 'sns:Publish'}
# Customers can refer to the following properties of SAM function
referable_properties = {
"Alias": LambdaAlias.resource_type,
"Version": LambdaVersion.resource_type,
}
def resources_to_link(self, resources):
try:
return {
'event_resources': self._event_resources_to_link(resources)
}
except InvalidEventException as e:
raise InvalidResourceException(self.logical_id, e.message)
def to_cloudformation(self, **kwargs):
"""Returns the Lambda function, role, and event resources to which this SAM Function corresponds.
:param dict kwargs: already-converted resources that may need to be modified when converting this \
macro to pure CloudFormation
:returns: a list of vanilla CloudFormation Resources, to which this Function expands
:rtype: list
"""
resources = []
intrinsics_resolver = kwargs["intrinsics_resolver"]
if self.DeadLetterQueue:
self._validate_dlq()
lambda_function = self._construct_lambda_function()
resources.append(lambda_function)
lambda_alias = None
if self.AutoPublishAlias:
alias_name = self._get_resolved_alias_name("AutoPublishAlias", self.AutoPublishAlias, intrinsics_resolver)
lambda_version = self._construct_version(lambda_function, intrinsics_resolver=intrinsics_resolver)
lambda_alias = self._construct_alias(alias_name, lambda_function, lambda_version)
resources.append(lambda_version)
resources.append(lambda_alias)
if self.DeploymentPreference:
self._validate_deployment_preference_and_add_update_policy(kwargs.get('deployment_preference_collection',
None),
lambda_alias, intrinsics_resolver)
managed_policy_map = kwargs.get('managed_policy_map', {})
if not managed_policy_map:
raise Exception('Managed policy map is empty, but should not be.')
execution_role = None
if lambda_function.Role is None:
execution_role = self._construct_role(managed_policy_map)
lambda_function.Role = execution_role.get_runtime_attr('arn')
resources.append(execution_role)
try:
resources += self._generate_event_resources(lambda_function, execution_role, kwargs['event_resources'],
lambda_alias=lambda_alias)
except InvalidEventException as e:
raise InvalidResourceException(self.logical_id, e.message)
return resources
def _get_resolved_alias_name(self, property_name, original_alias_value, intrinsics_resolver):
"""
Alias names can be supplied as an intrinsic function. This method tries to extract alias name from a reference
to a parameter. If it cannot completely resolve (ie. if a complex intrinsic function was used), then this
method raises an exception. If alias name is just a plain string, it will return as is
:param dict or string original_alias_value: Value of Alias property as provided by the customer
:param samtranslator.intrinsics.resolver.IntrinsicsResolver intrinsics_resolver: Instance of the resolver that
knows how to resolve parameter references
:return string: Alias name
:raises InvalidResourceException: If the value is a complex intrinsic function that cannot be resolved
"""
# Try to resolve.
resolved_alias_name = intrinsics_resolver.resolve_parameter_refs(original_alias_value)
if not isinstance(resolved_alias_name, string_types):
# This is still a dictionary which means we are not able to completely resolve intrinsics
raise InvalidResourceException(self.logical_id,
"'{}' must be a string or a Ref to a template parameter"
.format(property_name))
return resolved_alias_name
def _construct_lambda_function(self):
"""Constructs and returns the Lambda function.
:returns: a list containing the Lambda function and execution role resources
:rtype: list
"""
lambda_function = LambdaFunction(self.logical_id, depends_on=self.depends_on)
if self.FunctionName:
lambda_function.FunctionName = self.FunctionName
lambda_function.Handler = self.Handler
lambda_function.Runtime = self.Runtime
lambda_function.Description = self.Description
lambda_function.MemorySize = self.MemorySize
lambda_function.Timeout = self.Timeout
lambda_function.VpcConfig = self.VpcConfig
lambda_function.Role = self.Role
lambda_function.Environment = self.Environment
lambda_function.Code = self._construct_code_dict()
lambda_function.KmsKeyArn = self.KmsKeyArn
lambda_function.ReservedConcurrentExecutions = self.ReservedConcurrentExecutions
lambda_function.Tags = self._contruct_tag_list()
if self.Tracing:
lambda_function.TracingConfig = {"Mode": self.Tracing}
if self.DeadLetterQueue:
lambda_function.DeadLetterConfig = {"TargetArn": self.DeadLetterQueue['TargetArn']}
return lambda_function
def _contruct_tag_list(self):
if not bool(self.Tags):
self.Tags = {}
if self._SAM_KEY in self.Tags:
raise InvalidResourceException(self.logical_id, self._SAM_KEY + " is a reserved Tag key name and "
"cannot be set on your function. "
"Please change they tag key in the input.")
sam_tag = {self._SAM_KEY: self._SAM_VALUE}
# To maintain backwards compatibility with previous implementation, we *must* append SAM tag to the start of the
# tags list. Changing this ordering will trigger a update on Lambda Function resource. Even though this
# does not change the actual content of the tags, we don't want to trigger update of a resource without
# customer's knowledge.
return get_tag_list(sam_tag) + get_tag_list(self.Tags)
def _construct_role(self, managed_policy_map):
"""Constructs a Lambda execution role based on this SAM function's Policies property.
:returns: the generated IAM Role
:rtype: model.iam.IAMRole
"""
execution_role = IAMRole(self.logical_id + 'Role')
execution_role.AssumeRolePolicyDocument = IAMRolePolicies.lambda_assume_role_policy()
managed_policy_arns = [ArnGenerator.generate_aws_managed_policy_arn('service-role/AWSLambdaBasicExecutionRole')]
if self.Tracing:
managed_policy_arns.append(ArnGenerator.generate_aws_managed_policy_arn('AWSXrayWriteOnlyAccess'))
function_policies = FunctionPolicies({"Policies": self.Policies},
# No support for policy templates in the "core"
policy_template_processor=None)
policy_documents = []
if self.DeadLetterQueue:
policy_documents.append(IAMRolePolicies.dead_letter_queue_policy(
self.dead_letter_queue_policy_actions[self.DeadLetterQueue['Type']],
self.DeadLetterQueue['TargetArn']))
for index, policy_entry in enumerate(function_policies.get()):
if policy_entry.type is PolicyTypes.POLICY_STATEMENT:
policy_documents.append({
'PolicyName': execution_role.logical_id + 'Policy' + str(index),
'PolicyDocument': policy_entry.data
})
elif policy_entry.type is PolicyTypes.MANAGED_POLICY:
# There are three options:
# Managed Policy Name (string): Try to convert to Managed Policy ARN
# Managed Policy Arn (string): Insert it directly into the list
# Intrinsic Function (dict): Insert it directly into the list
#
# When you insert into managed_policy_arns list, de-dupe to prevent same ARN from showing up twice
#
policy_arn = policy_entry.data
if isinstance(policy_entry.data, string_types) and policy_entry.data in managed_policy_map:
policy_arn = managed_policy_map[policy_entry.data]
# De-Duplicate managed policy arns before inserting. Mainly useful
# when customer specifies a managed policy which is already inserted
# by SAM, such as AWSLambdaBasicExecutionRole
if policy_arn not in managed_policy_arns:
managed_policy_arns.append(policy_arn)
else:
# Policy Templates are not supported here in the "core"
raise InvalidResourceException(
self.logical_id,
"Policy at index {} in the 'Policies' property is not valid".format(index))
execution_role.ManagedPolicyArns = list(managed_policy_arns)
execution_role.Policies = policy_documents or None
return execution_role
def _validate_dlq(self):
"""Validates whether the DeadLetterQueue LogicalId is validation
:raise: InvalidResourceException
"""
# Validate required logical ids
valid_dlq_types = str(list(self.dead_letter_queue_policy_actions.keys()))
if not self.DeadLetterQueue.get('Type') or not self.DeadLetterQueue.get('TargetArn'):
raise InvalidResourceException(self.logical_id,
"'DeadLetterQueue' requires Type and TargetArn properties to be specified"
.format(valid_dlq_types))
# Validate required Types
if not self.DeadLetterQueue['Type'] in self.dead_letter_queue_policy_actions:
raise InvalidResourceException(self.logical_id,
"'DeadLetterQueue' requires Type of {}".format(valid_dlq_types))
def _event_resources_to_link(self, resources):
event_resources = {}
if self.Events:
for logical_id, event_dict in self.Events.items():
event_source = self.event_resolver.resolve_resource_type(event_dict).from_dict(
self.logical_id + logical_id, event_dict, logical_id)
event_resources[logical_id] = event_source.resources_to_link(resources)
return event_resources
def _generate_event_resources(self, lambda_function, execution_role, event_resources, lambda_alias=None):
"""Generates and returns the resources associated with this function's events.
:param model.lambda_.LambdaFunction lambda_function: generated Lambda function
:param iam.IAMRole execution_role: generated Lambda execution role
:param implicit_api: Global Implicit API resource where the implicit APIs get attached to, if necessary
:param implicit_api_stage: Global implicit API stage resource where implicit APIs get attached to, if necessary
:param event_resources: All the event sources associated with this Lambda function
:param model.lambda_.LambdaAlias lambda_alias: Optional Lambda Alias resource if we want to connect the
event sources to this alias
:returns: a list containing the function's event resources
:rtype: list
"""
resources = []
if self.Events:
for logical_id, event_dict in self.Events.items():
eventsource = self.event_resolver.resolve_resource_type(event_dict).from_dict(
lambda_function.logical_id + logical_id, event_dict, logical_id)
kwargs = {
# When Alias is provided, connect all event sources to the alias and *not* the function
'function': lambda_alias or lambda_function,
'role': execution_role,
}
for name, resource in event_resources[logical_id].items():
kwargs[name] = resource
resources += eventsource.to_cloudformation(**kwargs)
return resources
def _construct_code_dict(self):
if self.InlineCode:
return {
"ZipFile": self.InlineCode
}
elif self.CodeUri:
return self._construct_code_dict_code_uri()
else:
raise InvalidResourceException(self.logical_id, "Either 'InlineCode' or 'CodeUri' must be set")
def _construct_code_dict_code_uri(self):
"""Constructs the Lambda function's `Code property`_, from the SAM function's CodeUri property.
.. _Code property: \
http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lambda-function-code.html
:returns: a Code dict, containing the S3 Bucket, Key, and Version of the Lambda function code
:rtype: dict
"""
if isinstance(self.CodeUri, dict):
if not self.CodeUri.get("Bucket", None) or not self.CodeUri.get("Key", None):
# CodeUri is a dictionary but does not contain Bucket or Key property
raise InvalidResourceException(self.logical_id,
"'CodeUri' requires Bucket and Key properties to be specified")
s3_pointer = self.CodeUri
else:
# CodeUri is NOT a dictionary. Parse it as a string
s3_pointer = parse_s3_uri(self.CodeUri)
if s3_pointer is None:
raise InvalidResourceException(self.logical_id,
'\'CodeUri\' is not a valid S3 Uri of the form '
'"s3://bucket/key" with optional versionId query parameter.')
code = {
'S3Bucket': s3_pointer['Bucket'],
'S3Key': s3_pointer['Key']
}
if 'Version' in s3_pointer:
code['S3ObjectVersion'] = s3_pointer['Version']
return code
def _construct_version(self, function, intrinsics_resolver):
"""Constructs a Lambda Version resource that will be auto-published when CodeUri of the function changes.
Old versions will not be deleted without a direct reference from the CloudFormation template.
:param model.lambda_.LambdaFunction function: Lambda function object that is being connected to a version
:param model.intrinsics.resolver.IntrinsicsResolver intrinsics_resolver: Class that can help resolve
references to parameters present in CodeUri. It is a common usecase to set S3Key of Code to be a
template parameter. Need to resolve the values otherwise we will never detect a change in Code dict
:return: Lambda function Version resource
"""
code_dict = function.Code
if not code_dict:
raise ValueError("Lambda function code must be a valid non-empty dictionary")
if not intrinsics_resolver:
raise ValueError("intrinsics_resolver is required for versions creation")
# Resolve references to template parameters before creating hash. This will *not* resolve all intrinsics
# because we cannot resolve runtime values like Arn of a resource. For purposes of detecting changes, this
# is good enough. Here is why:
#
# When using intrinsic functions there are two cases when has must change:
# - Value of the template parameter changes
# - (or) LogicalId of a referenced resource changes ie. !GetAtt NewResource.Arn
#
# Later case will already change the hash because some value in the Code dictionary changes. We handle the
# first case by resolving references to template parameters. It is okay even if these references are
# present inside another intrinsic such as !Join. The resolver will replace the reference with the parameter's
# value and keep all other parts of !Join identical. This will still trigger a change in the hash.
code_dict = intrinsics_resolver.resolve_parameter_refs(code_dict)
# Construct the LogicalID of Lambda version by appending 10 characters of SHA of CodeUri. This is necessary
# to trigger creation of a new version every time code location changes. Since logicalId changes, CloudFormation
# will drop the old version and create a new one for us. We set a DeletionPolicy on the version resource to
# prevent CloudFormation from actually deleting the underlying version resource
#
# SHA Collisions: For purposes of triggering a new update, we are concerned about just the difference previous
# and next hashes. The chances that two subsequent hashes collide is fairly low.
prefix = "{id}Version".format(id=self.logical_id)
logical_id = logical_id_generator.LogicalIdGenerator(prefix, code_dict).gen()
retain_old_versions = {
"DeletionPolicy": "Retain"
}
lambda_version = LambdaVersion(logical_id=logical_id, attributes=retain_old_versions)
lambda_version.FunctionName = function.get_runtime_attr('name')
return lambda_version
def _construct_alias(self, name, function, version):
"""Constructs a Lambda Alias for the given function and pointing to the given version
:param string name: Name of the alias
:param model.lambda_.LambdaFunction function: Lambda function object to associate the alias with
:param model.lambda_.LambdaVersion version: Lambda version object to associate the alias with
:return: Lambda alias object
:rtype model.lambda_.LambdaAlias
"""
if not name:
raise ValueError("Alias name is required to create an alias")
logical_id = "{id}Alias{suffix}".format(id=function.logical_id, suffix=name)
alias = LambdaAlias(logical_id=logical_id)
alias.Name = name
alias.FunctionName = function.get_runtime_attr('name')
alias.FunctionVersion = version.get_runtime_attr("version")
return alias
def _validate_deployment_preference_and_add_update_policy(self, deployment_preference_collection, lambda_alias,
intrinsics_resolver):
if 'Enabled' in self.DeploymentPreference:
self.DeploymentPreference['Enabled'] = intrinsics_resolver.resolve_parameter_refs(
self.DeploymentPreference['Enabled'])
if isinstance(self.DeploymentPreference['Enabled'], dict):
raise InvalidResourceException(self.logical_id, "'Enabled' must be a boolean value")
if deployment_preference_collection is None:
raise ValueError('deployment_preference_collection required for parsing the deployment preference')
deployment_preference_collection.add(self.logical_id, self.DeploymentPreference)
if deployment_preference_collection.get(self.logical_id).enabled:
if self.AutoPublishAlias is None:
raise InvalidResourceException(
self.logical_id,
"'DeploymentPreference' requires AutoPublishAlias property to be specified")
if lambda_alias is None:
raise ValueError('lambda_alias expected for updating it with the appropriate update policy')
lambda_alias.set_resource_attribute("UpdatePolicy",
deployment_preference_collection.update_policy(
self.logical_id).to_dict())
class SamApi(SamResourceMacro):
"""SAM rest API macro.
"""
resource_type = 'AWS::Serverless::Api'
property_types = {
# Internal property set only by Implicit API plugin. If set to True, the API Event Source code will inject
# Lambda Integration URI to the Swagger. To preserve backwards compatibility, this must be set only for
# Implicit APIs. For Explicit APIs, customer is expected to set integration URI themselves.
# In the future, we might rename and expose this property to customers so they can have SAM manage Explicit APIs
# Swagger.
'__MANAGE_SWAGGER': PropertyType(False, is_type(bool)),
'Name': PropertyType(False, one_of(is_str(), is_type(dict))),
'StageName': PropertyType(True, one_of(is_str(), is_type(dict))),
'DefinitionBody': PropertyType(False, is_type(dict)),
'DefinitionUri': PropertyType(False, one_of(is_str(), is_type(dict))),
'CacheClusterEnabled': PropertyType(False, is_type(bool)),
'CacheClusterSize': PropertyType(False, is_str()),
'Variables': PropertyType(False, is_type(dict)),
'EndpointConfiguration': PropertyType(False, is_str()),
'MethodSettings': PropertyType(False, is_type(list)),
'BinaryMediaTypes': PropertyType(False, is_type(list)),
'Cors': PropertyType(False, one_of(is_str(), is_type(dict))),
'Auth': PropertyType(False, is_type(dict))
}
referable_properties = {
"Stage": ApiGatewayStage.resource_type,
"Deployment": ApiGatewayDeployment.resource_type,
}
def to_cloudformation(self, **kwargs):
"""Returns the API Gateway RestApi, Deployment, and Stage to which this SAM Api corresponds.
:param dict kwargs: already-converted resources that may need to be modified when converting this \
macro to pure CloudFormation
:returns: a list of vanilla CloudFormation Resources, to which this Function expands
:rtype: list
"""
resources = []
api_generator = ApiGenerator(self.logical_id,
self.CacheClusterEnabled,
self.CacheClusterSize,
self.Variables,
self.depends_on,
self.DefinitionBody,
self.DefinitionUri,
self.Name,
self.StageName,
endpoint_configuration=self.EndpointConfiguration,
method_settings=self.MethodSettings,
binary_media=self.BinaryMediaTypes,
cors=self.Cors,
auth=self.Auth)
rest_api, deployment, stage, permissions = api_generator.to_cloudformation()
resources.extend([rest_api, deployment, stage])
resources.extend(permissions)
return resources
class SamSimpleTable(SamResourceMacro):
"""SAM simple table macro.
"""
resource_type = 'AWS::Serverless::SimpleTable'
property_types = {
'PrimaryKey': PropertyType(False, dict_of(is_str(), is_str())),
'ProvisionedThroughput': PropertyType(False, dict_of(is_str(), one_of(is_type(int), is_type(dict)))),
'TableName': PropertyType(False, one_of(is_str(), is_type(dict))),
'Tags': PropertyType(False, is_type(dict)),
'SSESpecification': PropertyType(False, is_type(dict))
}
attribute_type_conversions = {
'String': 'S',
'Number': 'N',
'Binary': 'B'
}
def to_cloudformation(self, **kwargs):
dynamodb_resources = self._construct_dynamodb_table()
return [dynamodb_resources]
def _construct_dynamodb_table(self):
dynamodb_table = DynamoDBTable(self.logical_id, depends_on=self.depends_on)
if self.PrimaryKey:
primary_key = {
'AttributeName': self.PrimaryKey['Name'],
'AttributeType': self._convert_attribute_type(self.PrimaryKey['Type'])
}
else:
primary_key = {'AttributeName': 'id', 'AttributeType': 'S'}
dynamodb_table.AttributeDefinitions = [primary_key]
dynamodb_table.KeySchema = [{
'AttributeName': primary_key['AttributeName'],
'KeyType': 'HASH'
}]
if self.ProvisionedThroughput:
provisioned_throughput = self.ProvisionedThroughput
else:
provisioned_throughput = {'ReadCapacityUnits': 5, 'WriteCapacityUnits': 5}
dynamodb_table.ProvisionedThroughput = provisioned_throughput
if self.SSESpecification:
dynamodb_table.SSESpecification = self.SSESpecification
if self.TableName:
dynamodb_table.TableName = self.TableName
if bool(self.Tags):
dynamodb_table.Tags = get_tag_list(self.Tags)
return dynamodb_table
def _convert_attribute_type(self, attribute_type):
if attribute_type in self.attribute_type_conversions:
return self.attribute_type_conversions[attribute_type]
raise InvalidResourceException(self.logical_id, 'Invalid \'Type\' "{actual}".'.format(actual=attribute_type))
| 49.160959
| 120
| 0.653953
|
8a7203652f253930fe6f9204807a355b929a050b
| 616
|
py
|
Python
|
app.py
|
reveil/varnishtest
|
51c203300c0ba5a8d61b726b9dba5a9a0ff5f5aa
|
[
"MIT"
] | null | null | null |
app.py
|
reveil/varnishtest
|
51c203300c0ba5a8d61b726b9dba5a9a0ff5f5aa
|
[
"MIT"
] | null | null | null |
app.py
|
reveil/varnishtest
|
51c203300c0ba5a8d61b726b9dba5a9a0ff5f5aa
|
[
"MIT"
] | null | null | null |
from time import sleep
from flask import Flask, request
app = Flask('varnishtest')
def row(key, value):
return '<tr><td>' + str(key) + '</td><td>' + str(value) + '</td></tr>'
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def view(path):
result = '<!DOCTYPE html><html lang="en"><head><title>headers</title></head><body><table>'
result += row('URL', '/' + path)
for header, value in request.headers.items():
result += row(header, value)
result += '</table></body></html>'
if 'sleep' in request.args:
sleep(int(request.args['sleep']))
return result
| 28
| 94
| 0.592532
|
bfe863505d48e41d6577f847801e3f0fcc158abd
| 587
|
py
|
Python
|
busy_beaver/apps/slack_integration/api/slash_command.py
|
BinSquare/busy-beaver
|
b8063a7e434eb47e638697719896880781f9783f
|
[
"MIT"
] | 55
|
2019-05-05T01:20:58.000Z
|
2022-01-10T18:03:05.000Z
|
busy_beaver/apps/slack_integration/api/slash_command.py
|
BinSquare/busy-beaver
|
b8063a7e434eb47e638697719896880781f9783f
|
[
"MIT"
] | 222
|
2019-05-03T16:31:26.000Z
|
2021-08-28T23:49:03.000Z
|
busy_beaver/apps/slack_integration/api/slash_command.py
|
BinSquare/busy-beaver
|
b8063a7e434eb47e638697719896880781f9783f
|
[
"MIT"
] | 19
|
2019-04-27T19:49:32.000Z
|
2020-06-30T19:52:09.000Z
|
import logging
from flask import jsonify, request
from flask.views import MethodView
from .decorators import slack_verification_required
from busy_beaver.apps.slack_integration.slash_command import process_slash_command
logger = logging.getLogger(__name__)
class SlackSlashCommandDispatchResource(MethodView):
"""Dealing with slash commands"""
decorators = [slack_verification_required]
def post(self):
data = dict(request.form)
logger.info("Received Slack slash command", extra={"form_data": data})
return jsonify(process_slash_command(data))
| 27.952381
| 82
| 0.775128
|
47fe7ef22a5d34b271d786e4dd6c60943dfc734a
| 295
|
py
|
Python
|
Space-bound.py
|
VaultHack/Codename-GAME-
|
86faefb872298dc71494110bdc22ebb3f31a0350
|
[
"Apache-2.0"
] | 3
|
2018-03-21T18:27:40.000Z
|
2018-03-29T06:25:50.000Z
|
Space-bound.py
|
VaultHack/Codename-GAME-
|
86faefb872298dc71494110bdc22ebb3f31a0350
|
[
"Apache-2.0"
] | null | null | null |
Space-bound.py
|
VaultHack/Codename-GAME-
|
86faefb872298dc71494110bdc22ebb3f31a0350
|
[
"Apache-2.0"
] | 2
|
2018-03-21T18:27:55.000Z
|
2018-03-29T06:25:37.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Authors: Bogdan Pahomov(github: @VaultHack), Ostap Bodnar (@TheDevilsSon), Lana (@undergrave), Vitaliy Farbitnyk, Oleksandra Gorjievska (@SandraGorzhi)
# @Date: 2018-03-21
# @Email: shtormless@gmail.com Github: @VaultHack
#Team: GAMEDEV
| 42.142857
| 155
| 0.698305
|
bbae087e9d10e329a0516c6d8184c6d818a5f341
| 113
|
py
|
Python
|
transport/transport_common.py
|
osedoxin/LWZ303-RS232
|
505d9bba10e70e5b8cc845285110eb3f675127c2
|
[
"MIT"
] | null | null | null |
transport/transport_common.py
|
osedoxin/LWZ303-RS232
|
505d9bba10e70e5b8cc845285110eb3f675127c2
|
[
"MIT"
] | null | null | null |
transport/transport_common.py
|
osedoxin/LWZ303-RS232
|
505d9bba10e70e5b8cc845285110eb3f675127c2
|
[
"MIT"
] | 1
|
2022-01-16T12:36:26.000Z
|
2022-01-16T12:36:26.000Z
|
FLAG_CLOSE = b"CLOSE"
FLAG_CONNECT = b"CONNECT"
FLAG_HELLO = b"HELLO"
FLAG_READ = b"READ"
FLAG_RESET = b"RESET"
| 18.833333
| 25
| 0.725664
|
81cdf00f53e91747133d369cc2c70a561b8edad2
| 10,742
|
py
|
Python
|
topaz/modules/marshal.py
|
mswart/topaz
|
4bc02d6f4bf29c20f045223ecb6ae8a5cc9df2ae
|
[
"BSD-3-Clause"
] | 241
|
2015-01-02T18:49:09.000Z
|
2022-03-15T15:08:45.000Z
|
topaz/modules/marshal.py
|
mswart/topaz
|
4bc02d6f4bf29c20f045223ecb6ae8a5cc9df2ae
|
[
"BSD-3-Clause"
] | 16
|
2015-05-04T21:31:08.000Z
|
2020-06-04T22:49:36.000Z
|
topaz/modules/marshal.py
|
mswart/topaz
|
4bc02d6f4bf29c20f045223ecb6ae8a5cc9df2ae
|
[
"BSD-3-Clause"
] | 24
|
2015-02-15T05:35:11.000Z
|
2022-03-22T13:29:04.000Z
|
from __future__ import absolute_import
from topaz.module import ModuleDef
from topaz.objects.arrayobject import W_ArrayObject
from topaz.objects.boolobject import W_TrueObject, W_FalseObject
from topaz.objects.intobject import W_FixnumObject
from topaz.objects.hashobject import W_HashObject
from topaz.objects.nilobject import W_NilObject
from topaz.objects.stringobject import W_StringObject
from topaz.objects.symbolobject import W_SymbolObject
from topaz.objects.ioobject import W_IOObject
from topaz.objects.floatobject import W_FloatObject
import os
import math
class Marshal(object):
moduledef = ModuleDef("Marshal")
MAJOR_VERSION = 4
MINOR_VERSION = 8
NIL = 0x30
TRUE = 0x54
FALSE = 0x46
FIXNUM = 0x69
ARRAY = 0x5b
SYMBOL = 0x3a
IVAR = 0x49
STRING = 0x22
HASH = 0x7b
FLOAT = 0x66
@moduledef.setup_module
def setup_module(space, w_mod):
space.set_const(w_mod, "MAJOR_VERSION", space.newint(Marshal.MAJOR_VERSION))
space.set_const(w_mod, "MINOR_VERSION", space.newint(Marshal.MINOR_VERSION))
@staticmethod
def dump(space, w_obj):
bytes = []
if isinstance(w_obj, W_NilObject):
bytes.append(Marshal.NIL)
elif isinstance(w_obj, W_TrueObject):
bytes.append(Marshal.TRUE)
elif isinstance(w_obj, W_FalseObject):
bytes.append(Marshal.FALSE)
elif isinstance(w_obj, W_FixnumObject):
bytes.append(Marshal.FIXNUM)
bytes += Marshal.integer2bytes(space.int_w(w_obj))
elif isinstance(w_obj, W_FloatObject):
bytes.append(Marshal.FLOAT)
raw_value = space.float_w(w_obj)
int_value = int(raw_value)
float_value = raw_value
string = "" # None
if raw_value == int_value:
float_value = int_value
# repr would be more accurate here, but it weirdly does not translate
if str(raw_value) == "-0.0": # slowing things down
string = "-0"
else:
string = str(float_value)
else: # encode float
string = str(float_value)
length = len(string)
bytes += Marshal.integer2bytes(length)
for c in string:
bytes.append(ord(c))
elif isinstance(w_obj, W_ArrayObject):
array = space.listview(w_obj)
bytes.append(Marshal.ARRAY)
bytes += Marshal.integer2bytes(len(array))
for item in array:
bytes += Marshal.dump(space, item)
elif isinstance(w_obj, W_SymbolObject):
bytes.append(Marshal.SYMBOL)
symbol = space.symbol_w(w_obj)
bytes += Marshal.integer2bytes(len(symbol))
for char in symbol:
bytes.append(ord(char))
elif isinstance(w_obj, W_StringObject):
string = space.str_w(w_obj)
bytes.append(Marshal.IVAR)
bytes.append(Marshal.STRING)
bytes += Marshal.integer2bytes(len(string))
for char in string:
bytes.append(ord(char))
bytes.append(0x06)
# TODO: respect encoding
bytes += Marshal.dump(space, space.newsymbol("E"))
bytes += Marshal.dump(space, space.w_true)
elif isinstance(w_obj, W_HashObject):
bytes.append(Marshal.HASH)
hash_len = w_obj.strategy.len(w_obj.dict_storage)
hash_keys_w = w_obj.strategy.keys(w_obj.dict_storage)
bytes += Marshal.integer2bytes(hash_len)
for w_key in hash_keys_w:
bytes += Marshal.dump(space, w_key)
w_value = w_obj.strategy.getitem(w_obj.dict_storage, w_key)
bytes += Marshal.dump(space, w_value)
else:
raise NotImplementedError(type(w_obj))
return bytes
@staticmethod
def load(space, bytes, offset=0):
byte = bytes[offset]
if byte == Marshal.NIL:
return space.w_nil, 1
elif byte == Marshal.TRUE:
return space.w_true, 1
elif byte == Marshal.FALSE:
return space.w_false, 1
elif byte == Marshal.FIXNUM:
value, length = Marshal.bytes2integer(bytes, offset + 1)
return space.newint(value), length
elif byte == Marshal.FLOAT:
count, length = Marshal.bytes2integer(bytes, offset + 1)
chars = []
for i in range(length, length + count):
chars.append(chr(bytes[offset + i]))
return space.newfloat(float("".join(chars))), length
elif byte == Marshal.ARRAY:
count, skip = Marshal.bytes2integer(bytes, offset + 1)
array = []
for i in range(0, count):
assert skip > 0
element, l = Marshal.load(space, bytes, offset + skip)
skip += l
array.append(element)
return space.newarray(array), skip
elif byte == Marshal.SYMBOL:
count, length = Marshal.bytes2integer(bytes, offset + 1)
chars = []
for i in range(length, length + count):
chars.append(chr(bytes[offset + i]))
return space.newsymbol("".join(chars)), length + count
elif byte == Marshal.IVAR:
# TODO: fully interpret IVARS
if bytes[offset + 1] == Marshal.STRING:
count, length = Marshal.bytes2integer(bytes, offset + 2)
encoding = 6
chars = []
# TODO: take encoding into consideration
for i in range(length + 1, length + count + 1):
chars.append(chr(bytes[offset + i]))
return space.newstr_fromstr("".join(chars)), count + length + encoding
else:
raise NotImplementedError(bytes[offset + 1])
elif byte == Marshal.HASH:
count, skip = Marshal.bytes2integer(bytes, offset + 1)
w_hash = space.newhash()
for i in range(0, count):
assert skip > 0
k, s = Marshal.load(space, bytes, offset + skip)
skip += s
assert skip > 0
v, s = Marshal.load(space, bytes, offset + skip)
skip += s
w_hash.method_subscript_assign(space, k, v)
return w_hash, skip
else:
raise NotImplementedError(byte)
@moduledef.function("dump")
def method_dump(self, space, w_obj, w_io=None):
bytes = [4, 8]
bytes += Marshal.dump(space, w_obj)
string = "".join([chr(byte) for byte in bytes])
if w_io is not None:
assert isinstance(w_io, W_IOObject)
w_io.ensure_not_closed(space)
os.write(w_io.fd, string)
return w_io
else:
return space.newstr_fromstr(string)
@moduledef.function("load")
@moduledef.function("restore")
def method_load(self, space, w_obj):
string = ""
if isinstance(w_obj, W_IOObject):
w_obj.ensure_not_closed(space)
string = os.read(w_obj.fd, int(os.fstat(w_obj.fd).st_size))
elif isinstance(w_obj, W_StringObject):
string = space.str_w(w_obj)
else:
raise space.error(space.w_TypeError, "instance of IO needed")
if len(string) < 2:
raise space.error(space.w_ArgumentError, "marshal data too short")
bytes = [ord(string[i]) for i in range(0, len(string))]
if int(bytes[0]) != Marshal.MAJOR_VERSION or int(bytes[1]) != Marshal.MINOR_VERSION:
raise space.error(
space.w_TypeError,
"incompatible marshal file format (can't be read)\n"
"format version %s.%s required; %s.%s given"
% (Marshal.MAJOR_VERSION, Marshal.MINOR_VERSION, bytes[0], bytes[1])
)
return Marshal.load(space, bytes, 2)[0]
# extract integer from marshalled byte array
# least significant byte first
@staticmethod
def bytes2integer(bytes, offset):
if bytes[offset] >= 252:
value = 256 - bytes[offset + 1]
for i in range(2, 256 - bytes[offset] + 1):
value += (255 - bytes[offset + i]) * int(math.pow(256, i - 1))
return -value, 256 - bytes[offset] + 2
elif bytes[offset] > 0 and bytes[offset] < 6:
value = bytes[offset + 1]
for i in range(2, bytes[offset] + 1):
value += bytes[offset + i] * int(math.pow(256, i - 1))
return value, bytes[offset] + 2
else:
value = bytes[offset]
if value == 0:
return 0, 2
elif value > 127:
return value - 251, 2
else:
return value - 5, 2
# least significant byte first
@staticmethod
def integer2bytes(value):
bytes = []
if value > 2 ** 30 - 1:
raise NotImplementedError("Bignum")
if value > 2 ** 24 - 1:
bytes.append(4)
bytes.append(value % 256)
bytes.append((value >> 8) % 256)
bytes.append((value >> 16) % 256)
bytes.append((value >> 24) % 256)
elif value > 2 ** 16 - 1:
bytes.append(3)
bytes.append(value % 256)
bytes.append((value >> 8) % 256)
bytes.append((value >> 16) % 256)
elif value > 255:
bytes.append(2)
bytes.append(value % 256)
bytes.append((value >> 8) % 256)
elif value > 122:
bytes.append(1)
bytes.append(value)
elif value > 0:
bytes.append(value + 5)
elif value == 0:
bytes.append(0)
elif value > -124:
bytes.append(251 + value)
elif value > -257:
bytes.append(0xff)
bytes.append(256 + value)
elif value > -(2 ** 16 + 1):
bytes.append(0xfe)
bytes.append(value % 256)
bytes.append((value >> 8) % 256)
elif value > -(2 ** 24 + 1):
bytes.append(0xfd)
bytes.append(value % 256)
bytes.append((value >> 8) % 256)
bytes.append((value >> 16) % 256)
elif value > -(2 ** 30 + 1):
bytes.append(0xfc)
bytes.append(value % 256)
bytes.append((value >> 8) % 256)
bytes.append((value >> 16) % 256)
bytes.append((value >> 24) % 256)
else:
raise NotImplementedError("number too small")
return bytes
| 36.787671
| 92
| 0.551946
|
c266cde923eedfce2e6acc354a4f395465a50e4d
| 11,030
|
py
|
Python
|
servo/api.py
|
DanielHHowell/servox
|
789f6b1fe6afaf41b754a866d0f8bbbe079eab4f
|
[
"Apache-2.0"
] | null | null | null |
servo/api.py
|
DanielHHowell/servox
|
789f6b1fe6afaf41b754a866d0f8bbbe079eab4f
|
[
"Apache-2.0"
] | null | null | null |
servo/api.py
|
DanielHHowell/servox
|
789f6b1fe6afaf41b754a866d0f8bbbe079eab4f
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import annotations
import abc
import datetime
import enum
from typing import Any, Dict, List, Optional, Tuple, Union
import backoff
import devtools
import httpx
import pydantic
import servo.types
import servo.utilities
USER_AGENT = "github.com/opsani/servox"
class OptimizerStatuses(str, enum.Enum):
"""An enumeration of status types sent by the optimizer."""
ok = "ok"
invalid = "invalid"
unexpected_event = "unexpected-event"
cancelled = "cancel"
class ServoStatuses(str, enum.Enum):
"""An enumeration of status types sent from the servo."""
ok = "ok"
failed = "failed"
rejected = "rejected"
aborted = "aborted"
Statuses = Union[OptimizerStatuses, ServoStatuses]
class Reasons(str, enum.Enum):
success = "success"
unknown = "unknown"
unstable = "unstable"
class Events(str, enum.Enum):
hello = "HELLO"
whats_next = "WHATS_NEXT"
describe = "DESCRIPTION"
measure = "MEASUREMENT"
adjust = "ADJUSTMENT"
goodbye = "GOODBYE"
class Commands(str, enum.Enum):
describe = "DESCRIBE"
measure = "MEASURE"
adjust = "ADJUST"
sleep = "SLEEP"
@property
def response_event(self) -> Events:
if self == Commands.describe:
return Events.describe
elif self == Commands.measure:
return Events.measure
elif self == Commands.adjust:
return Events.adjust
else:
raise ValueError(f"unknoen command: {self}")
class Request(pydantic.BaseModel):
event: Union[Events, str] # TODO: Needs to be rethought -- used adhoc in some cases
param: Optional[Dict[str, Any]] # TODO: Switch to a union of supported types
class Config:
json_encoders = {
Events: lambda v: str(v),
}
class Status(pydantic.BaseModel):
status: Statuses
message: Optional[str] = None
reason: Optional[str] = None
state: Optional[Dict[str, Any]] = None
descriptor: Optional[Dict[str, Any]] = None
@classmethod
def ok(cls, message: Optional[str] = None, reason: str = Reasons.success, **kwargs) -> "Status":
"""Return a success (status="ok") status object."""
return cls(status=ServoStatuses.ok, message=message, reason=reason, **kwargs)
@classmethod
def from_error(cls, error: servo.errors.BaseError) -> "Status":
"""Return a status object representation from the given error."""
if isinstance(error, servo.errors.AdjustmentRejectedError):
status = ServoStatuses.rejected
else:
status = ServoStatuses.failed
return cls(status=status, message=str(error), reason=error.reason)
def dict(
self,
*,
exclude_unset: bool = True,
**kwargs,
) -> pydantic.DictStrAny:
return super().dict(exclude_unset=exclude_unset, **kwargs)
class SleepResponse(pydantic.BaseModel):
pass
# SleepResponse '{"cmd": "SLEEP", "param": {"duration": 60, "data": {"reason": "no active optimization pipeline"}}}'
# Instructions from servo on what to measure
class MeasureParams(pydantic.BaseModel):
metrics: List[str]
control: servo.types.Control
@pydantic.validator("metrics", always=True, pre=True)
@classmethod
def coerce_metrics(cls, value) -> List[str]:
if isinstance(value, dict):
return list(value.keys())
return value
@pydantic.validator('metrics', each_item=True, pre=True)
def _map_metrics(cls, v) -> str:
if isinstance(v, servo.Metric):
return v.name
return v
class CommandResponse(pydantic.BaseModel):
command: Commands = pydantic.Field(alias="cmd")
param: Optional[
Union[MeasureParams, Dict[str, Any]]
] # TODO: Switch to a union of supported types
class Config:
json_encoders = {
Commands: lambda v: str(v),
}
class Mixin(abc.ABC):
"""Provides functionality for interacting with the Opsani API via httpx.
The mixin requires the implementation of the `api_client_options` method
which is responsible for providing details around base URL, HTTP headers,
timeouts, proxies, SSL configuration, etc. for initializing
`httpx.AsyncClient` and `httpx.Client` instances.
"""
@property
@abc.abstractmethod
def api_client_options(self) -> Dict[str, Any]:
"""Return a dict of options for initializing httpx API client objects.
An implementation must be provided in subclasses derived from the mixin
and is responsible for appropriately configuring the base URL, HTTP
headers, timeouts, proxies, SSL configuration, transport flags, etc.
The dict returned is passed directly to the initializer of
`httpx.AsyncClient` and `httpx.Client` objects constructed by the
`api_client` and `api_client_sync` methods.
"""
...
def api_client(self, **kwargs) -> httpx.AsyncClient:
"""Return an asynchronous client for interacting with the Opsani API."""
return httpx.AsyncClient(**{**self.api_client_options, **kwargs})
def api_client_sync(self, **kwargs) -> httpx.Client:
"""Return a synchronous client for interacting with the Opsani API."""
return httpx.Client(**{**self.api_client_options, **kwargs})
async def report_progress(self, **kwargs) -> None:
"""Post a progress report to the Opsani API."""
request = self.progress_request(**kwargs)
status = await self._post_event(*request)
if status.status == OptimizerStatuses.ok:
pass
elif status.status == OptimizerStatuses.unexpected_event:
# We have lost sync with the backend, raise an exception to halt broken execution
raise servo.errors.UnexpectedEventError(status.reason)
elif status.status == OptimizerStatuses.cancelled:
# Optimizer wants to cancel the operation
raise servo.errors.EventCancelledError(status.reason)
elif status.status == OptimizerStatuses.invalid:
servo.logger.warning(f"progress report was rejected as invalid")
else:
raise ValueError(f"unknown error status: \"{status.status}\"")
def progress_request(
self,
operation: str,
progress: servo.types.Numeric,
started_at: datetime,
message: Optional[str],
*,
connector: Optional[str] = None,
event_context: Optional["servo.events.EventContext"] = None,
time_remaining: Optional[
Union[servo.types.Numeric, servo.types.Duration]
] = None,
logs: Optional[List[str]] = None,
) -> Tuple[str, Dict[str, Any]]:
def set_if(d: Dict, k: str, v: Any):
if v is not None:
d[k] = v
# Normalize progress to positive percentage
if progress < 1.0:
progress = progress * 100
# Calculate runtime
runtime = servo.types.Duration(datetime.datetime.now() - started_at)
# Produce human readable and remaining time in seconds values (if given)
if time_remaining:
if isinstance(time_remaining, (int, float)):
time_remaining_in_seconds = time_remaining
time_remaining = servo.types.Duration(time_remaining_in_seconds)
elif isinstance(time_remaining, datetime.timedelta):
time_remaining_in_seconds = time_remaining.total_seconds()
else:
raise ValueError(
f"Unknown value of type '{time_remaining.__class__.__name__}' for parameter 'time_remaining'"
)
else:
time_remaining_in_seconds = None
params = dict(
progress=float(progress),
runtime=float(runtime.total_seconds()),
)
set_if(params, "message", message)
return (operation, params)
def _is_fatal_status_code(error: Exception) -> bool:
if isinstance(error, httpx.HTTPStatusError):
if error.response.status_code < 500:
servo.logger.warning(f"Giving up on non-retryable HTTP status code {error.response.status_code} while requesting {error.request.url!r}.")
servo.logger.debug(f"HTTP request content: {devtools.pformat(error.request.read())}, response content: {devtools.pformat(error.response.content)}")
return True
return False
@backoff.on_exception(
backoff.expo,
httpx.HTTPError,
max_time=lambda: servo.current_servo() and servo.current_servo().config.settings.backoff.max_time(),
max_tries=lambda: servo.current_servo() and servo.current_servo().config.settings.backoff.max_tries(),
giveup=_is_fatal_status_code
)
async def _post_event(self, event: Events, param) -> Union[CommandResponse, Status]:
async with self.api_client() as client:
event_request = Request(event=event, param=param)
self.logger.trace(f"POST event request: {devtools.pformat(event_request)}")
try:
response = await client.post("servo", data=event_request.json())
response.raise_for_status()
response_json = response.json()
self.logger.trace(
f"POST event response ({response.status_code} {response.reason_phrase}): {devtools.pformat(response_json)}"
)
return pydantic.parse_obj_as(
Union[CommandResponse, Status], response_json
)
except (httpx.RequestError, httpx.HTTPError) as error:
self.logger.error(f"HTTP error \"{error.__class__.__name__}\" encountered while posting \"{event}\" event: {error}")
self.logger.trace(devtools.pformat(event_request))
raise
def descriptor_to_adjustments(descriptor: dict) -> List[servo.types.Adjustment]:
"""Return a list of adjustment objects from an Opsani API app descriptor."""
adjustments = []
for component_name, component in descriptor["application"]["components"].items():
for setting_name, attrs in component["settings"].items():
adjustment = servo.types.Adjustment(
component_name=component_name,
setting_name=setting_name,
value=attrs["value"],
)
adjustments.append(adjustment)
return adjustments
def adjustments_to_descriptor(adjustments: List[servo.types.Adjustment]) -> Dict[str, Any]:
components = {}
descriptor = { "state": { "application": { "components": components }}}
for adjustment in adjustments:
if not adjustment.component_name in components:
components[adjustment.component_name] = { "settings": {} }
components[adjustment.component_name]["settings"][adjustment.setting_name] = { "value": adjustment.value }
return descriptor
def user_agent() -> str:
return f"{USER_AGENT} v{servo.__version__}"
| 35.466238
| 163
| 0.644424
|
bb9f0b683834d69b2130af92411c80b5453b184f
| 5,982
|
py
|
Python
|
sdk/python/pulumi_azure_nextgen/keyvault/v20161001/secret.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 31
|
2020-09-21T09:41:01.000Z
|
2021-02-26T13:21:59.000Z
|
sdk/python/pulumi_azure_nextgen/keyvault/v20161001/secret.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 231
|
2020-09-21T09:38:45.000Z
|
2021-03-01T11:16:03.000Z
|
sdk/python/pulumi_azure_nextgen/keyvault/v20161001/secret.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 4
|
2020-09-29T14:14:59.000Z
|
2021-02-10T20:38:16.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['Secret']
class Secret(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
properties: Optional[pulumi.Input[pulumi.InputType['SecretPropertiesArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
secret_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
vault_name: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Resource information with extended details.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['SecretPropertiesArgs']] properties: Properties of the secret
:param pulumi.Input[str] resource_group_name: The name of the Resource Group to which the vault belongs.
:param pulumi.Input[str] secret_name: Name of the secret
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: The tags that will be assigned to the secret.
:param pulumi.Input[str] vault_name: Name of the vault
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if properties is None and not opts.urn:
raise TypeError("Missing required property 'properties'")
__props__['properties'] = properties
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['secret_name'] = secret_name
__props__['tags'] = tags
if vault_name is None and not opts.urn:
raise TypeError("Missing required property 'vault_name'")
__props__['vault_name'] = vault_name
__props__['location'] = None
__props__['name'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:keyvault:Secret"), pulumi.Alias(type_="azure-nextgen:keyvault/latest:Secret"), pulumi.Alias(type_="azure-nextgen:keyvault/v20180214:Secret"), pulumi.Alias(type_="azure-nextgen:keyvault/v20180214preview:Secret"), pulumi.Alias(type_="azure-nextgen:keyvault/v20190901:Secret"), pulumi.Alias(type_="azure-nextgen:keyvault/v20200401preview:Secret")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Secret, __self__).__init__(
'azure-nextgen:keyvault/v20161001:Secret',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Secret':
"""
Get an existing Secret resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return Secret(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
The supported Azure location where the key vault should be created.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the key vault.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output['outputs.SecretPropertiesResponse']:
"""
Properties of the secret
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
The tags that will be assigned to the key vault.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The resource type of the key vault.
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 42.126761
| 432
| 0.641591
|
cb2709fc7a46e230dfe4f7aa9ae68694c5ab46d8
| 258
|
py
|
Python
|
pysnmp/proto/secmod/rfc3414/__init__.py
|
RKinsey/pysnmp
|
96b5cf31e2f5d19f34d0dd1075014c488f6a5789
|
[
"BSD-2-Clause"
] | 492
|
2016-03-13T11:03:13.000Z
|
2022-03-21T02:52:57.000Z
|
pysnmp/proto/secmod/rfc3414/__init__.py
|
bartomo/pysnmp
|
becd15c79c9a6b5696928ecd50bf5cca8b1770a1
|
[
"BSD-2-Clause"
] | 372
|
2016-03-29T22:42:05.000Z
|
2022-03-26T10:28:25.000Z
|
pysnmp/proto/secmod/rfc3414/__init__.py
|
bartomo/pysnmp
|
becd15c79c9a6b5696928ecd50bf5cca8b1770a1
|
[
"BSD-2-Clause"
] | 197
|
2016-03-13T11:01:54.000Z
|
2022-03-07T19:52:15.000Z
|
#
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pysnmp/license.html
#
from pysnmp.proto.secmod.rfc3414 import service
SnmpUSMSecurityModel = service.SnmpUSMSecurityModel
| 25.8
| 59
| 0.77907
|
27bbe658d4d59d74ab3c4c12178e6e399d27181d
| 2,908
|
py
|
Python
|
tests/scripts/thread-cert/Cert_5_8_03_KeyIncrementRollOver.py
|
MarekPorwisz/openthread-zep
|
acd72411235a0630a4efaeac8969419d15fecdaa
|
[
"BSD-3-Clause"
] | 1
|
2022-03-18T11:20:13.000Z
|
2022-03-18T11:20:13.000Z
|
tests/scripts/thread-cert/Cert_5_8_03_KeyIncrementRollOver.py
|
MarekPorwisz/openthread-zep
|
acd72411235a0630a4efaeac8969419d15fecdaa
|
[
"BSD-3-Clause"
] | 3
|
2017-03-30T22:36:13.000Z
|
2020-05-29T15:04:28.000Z
|
tests/scripts/thread-cert/Cert_5_8_03_KeyIncrementRollOver.py
|
MarekPorwisz/openthread-zep
|
acd72411235a0630a4efaeac8969419d15fecdaa
|
[
"BSD-3-Clause"
] | 1
|
2016-07-05T14:44:21.000Z
|
2016-07-05T14:44:21.000Z
|
#!/usr/bin/env python3
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import unittest
import thread_cert
LEADER = 1
ROUTER = 2
class Cert_5_8_3_KeyIncrementRollOver(thread_cert.TestCase):
TOPOLOGY = {
LEADER: {
'key_sequence_counter': 127,
'key_switch_guardtime': 0,
'mode': 'rsdn',
'panid': 0xface,
'whitelist': [ROUTER]
},
ROUTER: {
'key_switch_guardtime': 0,
'mode': 'rsdn',
'panid': 0xface,
'router_selection_jitter': 1,
'whitelist': [LEADER]
},
}
def test(self):
self.nodes[LEADER].start()
self.simulator.go(4)
self.assertEqual(self.nodes[LEADER].get_state(), 'leader')
self.nodes[ROUTER].start()
self.simulator.go(5)
self.assertEqual(self.nodes[ROUTER].get_state(), 'router')
addrs = self.nodes[ROUTER].get_addrs()
for addr in addrs:
self.assertTrue(self.nodes[LEADER].ping(addr))
key_sequence_counter = self.nodes[LEADER].get_key_sequence_counter()
self.nodes[LEADER].set_key_sequence_counter(key_sequence_counter + 1)
addrs = self.nodes[ROUTER].get_addrs()
for addr in addrs:
self.assertTrue(self.nodes[LEADER].ping(addr))
if __name__ == '__main__':
unittest.main()
| 36.810127
| 78
| 0.686382
|
00538d30b30a0a51c16c73ab7671168eb768b019
| 639
|
py
|
Python
|
controls/chromecats/chromecats_main.py
|
daviddbarrero/pandora
|
3c28147e9abb1c7e9180f510506f0ae8677b7ed7
|
[
"MIT"
] | 1
|
2019-08-08T22:54:43.000Z
|
2019-08-08T22:54:43.000Z
|
controls/chromecats/chromecats_main.py
|
neura-8/pandora
|
3c28147e9abb1c7e9180f510506f0ae8677b7ed7
|
[
"MIT"
] | null | null | null |
controls/chromecats/chromecats_main.py
|
neura-8/pandora
|
3c28147e9abb1c7e9180f510506f0ae8677b7ed7
|
[
"MIT"
] | 1
|
2019-09-06T17:58:39.000Z
|
2019-09-06T17:58:39.000Z
|
from __future__ import print_function
import time
import pychromecast
# Your Chromecast device Friendly Name
device_friendly_name = "Andy"
chromecasts = pychromecast.get_chromecasts()
# select Chromecast device
print(chromecasts)
for x in range(len(chromecasts)):
print( chromecasts[x].device.friendly_name)
cast = next(cc for cc in chromecasts if cc.device.friendly_name == device_friendly_name)
# wait for the device
cast.wait()
# get media controller
mc = cast.media_controller
# set online video url
mc = cast.media_controller
mc.block_until_active()
print(mc.status)
def play():
mc.play()
def pause():
mc.pause()
| 15.585366
| 88
| 0.766823
|
80fc17b6a2c86289d6210d1aa985ffab19bfa6ae
| 2,234
|
py
|
Python
|
prescient/simulator/data_manager.py
|
iSoron/Prescient
|
a3c1d7c5840893ff43dca48c40dc90f083292d26
|
[
"BSD-3-Clause"
] | 1
|
2021-10-14T20:39:50.000Z
|
2021-10-14T20:39:50.000Z
|
prescient/simulator/data_manager.py
|
iSoron/Prescient
|
a3c1d7c5840893ff43dca48c40dc90f083292d26
|
[
"BSD-3-Clause"
] | null | null | null |
prescient/simulator/data_manager.py
|
iSoron/Prescient
|
a3c1d7c5840893ff43dca48c40dc90f083292d26
|
[
"BSD-3-Clause"
] | null | null | null |
# ___________________________________________________________________________
#
# Prescient
# Copyright 2020 National Technology & Engineering Solutions of Sandia, LLC
# (NTESS). Under the terms of Contract DE-NA0003525 with NTESS, the U.S.
# Government retains certain rights in this software.
# This software is distributed under the Revised BSD License.
# ___________________________________________________________________________
from __future__ import annotations
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Dict, Optional
import os.path
from typing import NamedTuple
from prescient.data.simulation_state import MutableSimulationState
from .manager import _Manager
class RucMarket(NamedTuple):
day_ahead_prices: Dict
day_ahead_reserve_prices: Dict
thermal_gen_cleared_DA: Dict
thermal_reserve_cleared_DA: Dict
renewable_gen_cleared_DA: Dict
class RucPlan(NamedTuple):
simulation_actuals: RucModel
deterministic_ruc_instance: RucModel
ruc_market: Optional[RucMarket]
class DataManager(_Manager):
def initialize(self, engine, options):
self.ruc_market_active = None
self.ruc_market_pending = None
self._state = MutableSimulationState()
self._extensions = {}
self.prior_sced_instance = None
@property
def current_state(self):
return self._state
def update_time(self, time):
self._current_time = time
def apply_sced(self, options, sced):
self._state.apply_sced(options, sced)
self.prior_sced_instance = sced
def set_pending_ruc_plan(self, options:Options, current_ruc_plan: RucPlan):
self.ruc_market_pending = current_ruc_plan.ruc_market
self._state.apply_ruc(options, current_ruc_plan.deterministic_ruc_instance)
self._state.apply_actuals(options, current_ruc_plan.simulation_actuals)
def activate_pending_ruc(self, options: Options):
self.ruc_market_active = self.ruc_market_pending
self.ruc_market_pending = None
##########
# Properties
##########
@property
def current_time(self):
return self._current_time
@property
def extensions(self):
return self._extensions
| 30.60274
| 83
| 0.748433
|
6bd4baf4fa9d421ebe2933f0e15d959166a43c78
| 4,886
|
py
|
Python
|
sdk/python/tests/unit/test_data_sources.py
|
benjamintanweihao/feast
|
b550e591f815aff53accdd064589ef06b6607d97
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/tests/unit/test_data_sources.py
|
benjamintanweihao/feast
|
b550e591f815aff53accdd064589ef06b6607d97
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/tests/unit/test_data_sources.py
|
benjamintanweihao/feast
|
b550e591f815aff53accdd064589ef06b6607d97
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from feast import ValueType
from feast.data_format import ProtoFormat
from feast.data_source import (
KafkaSource,
KinesisSource,
PushSource,
RequestDataSource,
RequestSource,
)
from feast.field import Field
from feast.infra.offline_stores.bigquery_source import BigQuerySource
from feast.types import Bool, Float32, Int64
def test_push_with_batch():
push_source = PushSource(
name="test", batch_source=BigQuerySource(table="test.test"),
)
push_source_proto = push_source.to_proto()
assert push_source_proto.HasField("batch_source")
push_source_unproto = PushSource.from_proto(push_source_proto)
assert push_source.name == push_source_unproto.name
assert push_source.batch_source.name == push_source_unproto.batch_source.name
def test_request_data_source_deprecation():
with pytest.warns(DeprecationWarning):
request_data_source = RequestDataSource(
name="vals_to_add",
schema={"val_to_add": ValueType.INT64, "val_to_add_2": ValueType.INT64},
)
request_data_source_proto = request_data_source.to_proto()
returned_request_source = RequestSource.from_proto(request_data_source_proto)
assert returned_request_source == request_data_source
def test_request_source_primitive_type_to_proto():
schema = [
Field(name="f1", dtype=Float32),
Field(name="f2", dtype=Bool),
]
request_source = RequestSource(
name="source", schema=schema, description="desc", tags={}, owner="feast",
)
request_proto = request_source.to_proto()
deserialized_request_source = RequestSource.from_proto(request_proto)
assert deserialized_request_source == request_source
def test_hash():
push_source_1 = PushSource(
name="test", batch_source=BigQuerySource(table="test.test"),
)
push_source_2 = PushSource(
name="test", batch_source=BigQuerySource(table="test.test"),
)
push_source_3 = PushSource(
name="test", batch_source=BigQuerySource(table="test.test2"),
)
push_source_4 = PushSource(
name="test",
batch_source=BigQuerySource(table="test.test2"),
description="test",
)
s1 = {push_source_1, push_source_2}
assert len(s1) == 1
s2 = {push_source_1, push_source_3}
assert len(s2) == 2
s3 = {push_source_3, push_source_4}
assert len(s3) == 2
s4 = {push_source_1, push_source_2, push_source_3, push_source_4}
assert len(s4) == 3
# TODO(kevjumba): Remove this test in feast 0.23 when positional arguments are removed.
def test_default_data_source_kw_arg_warning():
# source_class = request.param
with pytest.warns(DeprecationWarning):
source = KafkaSource(
"name", "column", "bootstrap_servers", ProtoFormat("class_path"), "topic"
)
assert source.name == "name"
assert source.timestamp_field == "column"
assert source.kafka_options.bootstrap_servers == "bootstrap_servers"
assert source.kafka_options.topic == "topic"
with pytest.raises(ValueError):
KafkaSource("name", "column", "bootstrap_servers", topic="topic")
with pytest.warns(DeprecationWarning):
source = KinesisSource(
"name",
"column",
"c_column",
ProtoFormat("class_path"),
"region",
"stream_name",
)
assert source.name == "name"
assert source.timestamp_field == "column"
assert source.created_timestamp_column == "c_column"
assert source.kinesis_options.region == "region"
assert source.kinesis_options.stream_name == "stream_name"
with pytest.raises(ValueError):
KinesisSource(
"name", "column", "c_column", region="region", stream_name="stream_name"
)
with pytest.warns(DeprecationWarning):
source = RequestSource(
"name", [Field(name="val_to_add", dtype=Int64)], description="description"
)
assert source.name == "name"
assert source.description == "description"
with pytest.raises(ValueError):
RequestSource("name")
with pytest.warns(DeprecationWarning):
source = PushSource(
"name",
BigQuerySource(name="bigquery_source", table="table"),
description="description",
)
assert source.name == "name"
assert source.description == "description"
assert source.batch_source.name == "bigquery_source"
with pytest.raises(ValueError):
PushSource("name")
# No name warning for DataSource
with pytest.warns(UserWarning):
source = KafkaSource(
event_timestamp_column="column",
bootstrap_servers="bootstrap_servers",
message_format=ProtoFormat("class_path"),
topic="topic",
)
| 33.013514
| 87
| 0.66844
|
6e2d77d0237b9e04479e37de676b4eea87734206
| 13,978
|
py
|
Python
|
src/lib/models/networks/msra_resnet_attention.py
|
leobean/CenterNet_simple
|
13e2eab2c049563afde5defdf90434a310a32d02
|
[
"MIT"
] | null | null | null |
src/lib/models/networks/msra_resnet_attention.py
|
leobean/CenterNet_simple
|
13e2eab2c049563afde5defdf90434a310a32d02
|
[
"MIT"
] | null | null | null |
src/lib/models/networks/msra_resnet_attention.py
|
leobean/CenterNet_simple
|
13e2eab2c049563afde5defdf90434a310a32d02
|
[
"MIT"
] | null | null | null |
# ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by Bin Xiao (Bin.Xiao@microsoft.com)
# Modified by Xingyi Zhou
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.model_zoo as model_zoo
BN_MOMENTUM = 0.1
model_urls = {
'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
}
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM)
self.conv3 = nn.Conv2d(planes, planes * self.expansion, kernel_size=1,
bias=False)
self.bn3 = nn.BatchNorm2d(planes * self.expansion,
momentum=BN_MOMENTUM)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class PoseResNet(nn.Module):
def __init__(self, block, layers, heads, head_conv, **kwargs):
self.inplanes = 64
self.deconv_with_bias = False
self.heads = heads
super(PoseResNet, self).__init__()
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(64, momentum=BN_MOMENTUM)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
self.attention = SELayer(512)
# used for deconv layers
self.deconv_layers = self._make_deconv_layer(
3,
[256, 256, 256],
[4, 4, 4],
)
# self.final_layer = []
for head in sorted(self.heads):
num_output = self.heads[head]
if head_conv > 0:
fc = nn.Sequential(
nn.Conv2d(256, head_conv,
kernel_size=3, padding=1, bias=True),
nn.ReLU(inplace=True),
nn.Conv2d(head_conv, num_output,
kernel_size=1, stride=1, padding=0))
else:
fc = nn.Conv2d(
in_channels=256,
out_channels=num_output,
kernel_size=1,
stride=1,
padding=0
)
self.__setattr__(head, fc)
# self.final_layer = nn.ModuleList(self.final_layer)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion, momentum=BN_MOMENTUM),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def _get_deconv_cfg(self, deconv_kernel, index):
if deconv_kernel == 4:
padding = 1
output_padding = 0
elif deconv_kernel == 3:
padding = 1
output_padding = 1
elif deconv_kernel == 2:
padding = 0
output_padding = 0
return deconv_kernel, padding, output_padding
def _make_deconv_layer(self, num_layers, num_filters, num_kernels):
assert num_layers == len(num_filters), \
'ERROR: num_deconv_layers is different len(num_deconv_filters)'
assert num_layers == len(num_kernels), \
'ERROR: num_deconv_layers is different len(num_deconv_filters)'
layers = []
for i in range(num_layers):
kernel, padding, output_padding = \
self._get_deconv_cfg(num_kernels[i], i)
planes = num_filters[i]
layers.append(
nn.ConvTranspose2d(
in_channels=self.inplanes,
out_channels=planes,
kernel_size=kernel,
stride=2,
padding=padding,
output_padding=output_padding,
bias=self.deconv_with_bias))
layers.append(nn.BatchNorm2d(planes, momentum=BN_MOMENTUM))
layers.append(nn.ReLU(inplace=True))
self.inplanes = planes
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.attention(x)
x = self.deconv_layers(x)
ret = {}
for head in self.heads:
ret[head] = self.__getattr__(head)(x)
return [ret]
def init_weights(self, num_layers, pretrained=True):
if pretrained:
# print('=> init resnet deconv weights from normal distribution')
for _, m in self.deconv_layers.named_modules():
if isinstance(m, nn.ConvTranspose2d):
# print('=> init {}.weight as normal(0, 0.001)'.format(name))
# print('=> init {}.bias as 0'.format(name))
nn.init.normal_(m.weight, std=0.001)
if self.deconv_with_bias:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
# print('=> init {}.weight as 1'.format(name))
# print('=> init {}.bias as 0'.format(name))
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
# print('=> init final conv weights from normal distribution')
for head in self.heads:
final_layer = self.__getattr__(head)
for i, m in enumerate(final_layer.modules()):
if isinstance(m, nn.Conv2d):
# nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
# print('=> init {}.weight as normal(0, 0.001)'.format(name))
# print('=> init {}.bias as 0'.format(name))
if m.weight.shape[0] == self.heads[head]:
if 'hm' in head:
nn.init.constant_(m.bias, -2.19)
else:
nn.init.normal_(m.weight, std=0.001)
nn.init.constant_(m.bias, 0)
#pretrained_state_dict = torch.load(pretrained)
url = model_urls['resnet{}'.format(num_layers)]
pretrained_state_dict = model_zoo.load_url(url)
print('=> loading pretrained model {}'.format(url))
self.load_state_dict(pretrained_state_dict, strict=False)
else:
print('=> imagenet pretrained model dose not exist')
print('=> please download it first')
raise ValueError('imagenet pretrained model does not exist')
resnet_spec = {18: (BasicBlock, [2, 2, 2, 2]),
34: (BasicBlock, [3, 4, 6, 3]),
50: (Bottleneck, [3, 4, 6, 3]),
101: (Bottleneck, [3, 4, 23, 3]),
152: (Bottleneck, [3, 8, 36, 3])}
def get_pose_net(num_layers, heads, head_conv):
block_class, layers = resnet_spec[num_layers]
model = PoseResNet(block_class, layers, heads, head_conv=head_conv)
model.init_weights(num_layers, pretrained=True)
return model
class R2CNNattetion(nn.Module):
def __init__(self):
super(R2CNNattetion, self).__init__()
self.pool1 = nn.MaxPool2d(kernel_size=1)
self.pool2 = nn.MaxPool2d(kernel_size=2)
self.pool3 = nn.MaxPool2d(kernel_size=4)
self.deconv2 = nn.ConvTranspose2d(512, 512, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
self.deconv3 = nn.ConvTranspose2d(512, 512, kernel_size=(6, 6), stride=(4, 4), padding=(1, 1))
def forward(self, x):
x1 = self.pool1(x)
x2 = self.pool2(x)
x3 = self.pool3(x)
x2 = self.deconv2(x2)
x3 = self.deconv3(x3)
x = x1 + x2 + x3
return x
class ChannelAttention(nn.Module):
def __init__(self, C):
super(ChannelAttention, self).__init__()
self.relu = nn.ReLU()
self.sigmoid = nn.Sigmoid()
self.fc1 = nn.Linear(C, int(C / 4))
self.fc2 = nn.Linear(int(C / 4), C)
def forward(self, x):
avg_pool = F.avg_pool2d(x, kernel_size=x.size()[-1])
avg_pool = avg_pool.permute(0, 2, 3, 1)
fc = self.fc1(avg_pool)
relu = self.relu(fc)
fc = self.fc2(relu).permute(0, 3, 1, 2)
atten = self.sigmoid(fc)
output = atten * x
return output
class SpatialAttention(nn.Module):
def __init__(self, kernel_size):
super(SpatialAttention, self).__init__()
self.kernel_size = kernel_size
assert kernel_size % 2 == 1, "Odd kernel size required"
self.conv = nn.Conv2d(in_channels = 2, out_channels = 1, kernel_size = kernel_size, padding= int((kernel_size-1)/2))
def forward(self, x):
max_pool = self.agg_channel(x, "max")
avg_pool = self.agg_channel(x, "avg")
pool = torch.cat([max_pool, avg_pool], dim = 1)
conv = self.conv(pool)
conv = conv.repeat(1,x.size()[1],1,1)
att = torch.sigmoid(conv)
output = att * x
return output
def agg_channel(self, x, pool = "max"):
b,c,h,w = x.size()
x = x.view(b, c, h*w)
x = x.permute(0,2,1)
if pool == "max":
x = F.max_pool1d(x,c)
elif pool == "avg":
x = F.avg_pool1d(x,c)
x = x.permute(0,2,1)
x = x.view(b,1,h,w)
return x
class MixedAttettion(nn.Module):
def __init__(self, C, kernel_size):
super(MixedAttettion, self).__init__()
self.spatial_att = SpatialAttention(kernel_size)
self.channel_att = ChannelAttention(C)
def forward(self, x):
f1 = self.channel_att(x)
f2 = self.spatial_att(x)
out = f1 + f2 + x
return out
class SELayer(nn.Module):
def __init__(self, channel, reduction=16):
super(SELayer, self).__init__()
# 返回1X1大小的特征图,通道数不变
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Sequential(
nn.Linear(channel, channel // reduction, bias=False),
nn.ReLU(inplace=True),
nn.Linear(channel // reduction, channel, bias=False),
nn.Sigmoid()
)
def forward(self, x):
b, c, _, _ = x.size()
# 全局平均池化,batch和channel和原来一样保持不变
y = self.avg_pool(x).view(b, c)
# 全连接层+池化
y = self.fc(y).view(b, c, 1, 1)
# 和原特征图相乘
return x * y.expand_as(x)
| 35.387342
| 125
| 0.544498
|
d59414354231a90712ac1e9223842b54fbb4e452
| 2,696
|
py
|
Python
|
scripts/require-ppx-coda.py
|
jackpaparian/coda
|
6da61361bdfda7f0de0ce525230b35bac965cb17
|
[
"Apache-2.0"
] | 1
|
2020-05-05T07:31:26.000Z
|
2020-05-05T07:31:26.000Z
|
scripts/require-ppx-coda.py
|
kunxian-xia/coda
|
93295e8b4db5f4f2d192a26f90146bf322808ef8
|
[
"Apache-2.0"
] | null | null | null |
scripts/require-ppx-coda.py
|
kunxian-xia/coda
|
93295e8b4db5f4f2d192a26f90146bf322808ef8
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# In dune files, require preprocessing by ppx_coda, so that the version syntax linter is always run
import subprocess
import string
import sexpdata
dune_string = subprocess.check_output(['find','src','-name','dune'])
dune_paths_raw = string.split (dune_string,'\n')
# filter out dune paths where we don't require linting
def dune_paths_ok (dune) :
path = dune.split ('/')
path_prefix2 = path[1:2]
path_prefix3 = path[1:3]
return (not (path_prefix2 == ['_build'] or path_prefix2 == ['external'] or
path_prefix3 == ['lib', 'snarky'] or path_prefix3 == ['lib', 'ppx_coda']))
dune_paths = list(filter(lambda s : len(s) > 0 and dune_paths_ok (s),dune_paths_raw))
library = sexpdata.loads ('library')
preprocess = sexpdata.loads ('preprocess')
pps = sexpdata.loads ('pps')
no_preprocessing = sexpdata.loads ('no_preprocessing')
ppx_lint = sexpdata.loads ('ppx_coda')
exit_code = 0
def missing_ppx_error (dune,ppx) :
print ("In dune file " + dune + ", the preprocessing clause is missing; there should be one containing " + (sexpdata.dumps (ppx)))
global exit_code
exit_code = 1
def no_ppx_error (dune,ppx) :
print ("In dune file " + dune + ", the preprocessing clause indicates no preprocessing, but it should include " + (sexpdata.dumps (ppx)))
global exit_code
exit_code = 1
def get_ppx_ndx (dune,ppxs,ppx) :
try :
ppxs.index (ppx)
except :
print ("In dune file " + dune + ", the preprocessing clause does not contain " + (sexpdata.dumps (ppx)))
global exit_code
exit_code = 1
for dune in dune_paths :
with open (dune) as fp :
# wrap in parens to get list of top-level clauses
sexps = sexpdata.loads ('(' + fp.read () + ')')
for sexp in sexps :
if isinstance (sexp,list) and len (sexp) > 0 and sexpdata.car (sexp) == library :
clauses = sexpdata.cdr (sexp)
found_preprocess = False
for clause in clauses :
if sexpdata.car (clause) == preprocess :
found_preprocess = True
subclause = sexpdata.car (sexpdata.cdr (clause))
if subclause == no_preprocessing :
# error if no preprocessing explicitly
no_ppx_error (dune,ppx_lint)
elif sexpdata.car (subclause) == pps :
ppxs = sexpdata.cdr (subclause)
lint_ppx_ndx = get_ppx_ndx (dune,ppxs,ppx_lint)
if found_preprocess == False :
# error if no preprocessing implicitly
missing_ppx_error (dune,ppx_lint)
exit (exit_code)
| 36.931507
| 141
| 0.619436
|
1104bf92ac8d58ec08bc2a9d2724d8d9dbf925dd
| 1,047
|
py
|
Python
|
tools/help-center-exporter/print-articles.py
|
mpillar/zendesk-tools
|
0ebaaac2d1481f4459fef60be352b223e75099e3
|
[
"MIT"
] | 2
|
2015-03-04T16:42:52.000Z
|
2016-03-24T04:00:03.000Z
|
tools/help-center-exporter/print-articles.py
|
mpillar/zendesk-tools
|
0ebaaac2d1481f4459fef60be352b223e75099e3
|
[
"MIT"
] | null | null | null |
tools/help-center-exporter/print-articles.py
|
mpillar/zendesk-tools
|
0ebaaac2d1481f4459fef60be352b223e75099e3
|
[
"MIT"
] | null | null | null |
"""
Python script to print all zendesk domain articles as a single entity. Useful for checking global
formatting properties or your articles.
N.B. this python app currently does not have a wrapper script.
"""
import sys
from zendesk.api import DomainConfiguration
from zendesk.api import HelpCenter
from zendesk.formatter import format_tags_local
def main(sub_domain):
config = DomainConfiguration(sub_domain)
hc = HelpCenter(config)
for category in hc.get_categories():
for section in category.get_sections():
for article in section.get_articles():
# XXX This could probably be improved to be prettier.
print('<a name="%i"></a><h2>%s</h2>' % (article.get_id(), article.get_name()))
print(format_tags_local(config, article.get_body()))
print('<p style="page-break-after:always;"></p>')
if __name__ == '__main__':
if len(sys.argv) != 2:
print('Usage: python %s <zendesk_sub_domain>' % sys.argv[0])
else:
main(sys.argv[1])
| 33.774194
| 97
| 0.667622
|
88da6156f6b5febfcc0d456db9c14838c57d9ed1
| 577
|
py
|
Python
|
WEEKS/CD_Sata-Structures/_RESOURCES/python-prac/Overflow/_Learning/problems/problem_09_getters_and_setters.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | 5
|
2021-06-02T23:44:25.000Z
|
2021-12-27T16:21:57.000Z
|
WEEKS/CD_Sata-Structures/_RESOURCES/python-prac/Overflow/_Learning/problems/problem_09_getters_and_setters.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | 22
|
2021-05-31T01:33:25.000Z
|
2021-10-18T18:32:39.000Z
|
WEEKS/CD_Sata-Structures/_RESOURCES/python-prac/Overflow/_Learning/problems/problem_09_getters_and_setters.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | 3
|
2021-06-19T03:37:47.000Z
|
2021-08-31T00:49:51.000Z
|
# GETTERS AND SETTERS
#
# Create a class named "ValueHolder". On the class, create:
# * A constructor that takes self and one argument and sets the value of the
# value holder to the value passed in
# * A getter property named "value" that returns the value of the object
# * A setter property named "value" that will only set the value of the object
# if the provided value is not None
#
# Test data is below.
# WRITE YOUR CODE HERE
# Test data
o = ValueHolder("boop")
print(o.value) # > "boop"
o.value = 2
print(o.value) # > 2
o.value = None
print(o.value) # > 2
| 24.041667
| 78
| 0.696707
|
02e215a2ca318eed658fdd07bbe781dfa23a64d8
| 2,643
|
py
|
Python
|
plugins/action/mnt_session_profiler_count_info.py
|
steinzi/ansible-ise
|
0add9c8858ed8e0e5e7219fbaf0c936b6d7cc6c0
|
[
"MIT"
] | null | null | null |
plugins/action/mnt_session_profiler_count_info.py
|
steinzi/ansible-ise
|
0add9c8858ed8e0e5e7219fbaf0c936b6d7cc6c0
|
[
"MIT"
] | null | null | null |
plugins/action/mnt_session_profiler_count_info.py
|
steinzi/ansible-ise
|
0add9c8858ed8e0e5e7219fbaf0c936b6d7cc6c0
|
[
"MIT"
] | null | null | null |
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
try:
from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
AnsibleArgSpecValidator,
)
except ImportError:
ANSIBLE_UTILS_IS_INSTALLED = False
else:
ANSIBLE_UTILS_IS_INSTALLED = True
from ansible.errors import AnsibleActionFail
from ansible_collections.cisco.ise.plugins.module_utils.ise import (
ISESDK,
ise_argument_spec,
)
# Get common arguements specification
argument_spec = ise_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
))
required_if = []
required_one_of = []
mutually_exclusive = []
required_together = []
class ActionModule(ActionBase):
def __init__(self, *args, **kwargs):
if not ANSIBLE_UTILS_IS_INSTALLED:
raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
super(ActionModule, self).__init__(*args, **kwargs)
self._supports_async = True
self._result = None
# Checks the supplied parameters against the argument spec for this module
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
data=self._task.args,
schema=dict(argument_spec=argument_spec),
schema_format="argspec",
schema_conditionals=dict(
required_if=required_if,
required_one_of=required_one_of,
mutually_exclusive=mutually_exclusive,
required_together=required_together,
),
name=self._task.action,
)
valid, errors, self._task.args = aav.validate()
if not valid:
raise AnsibleActionFail(errors)
def get_object(self, params):
new_object = dict(
)
return new_object
def run(self, tmp=None, task_vars=None):
self._task.diff = False
self._result = super(ActionModule, self).run(tmp, task_vars)
self._result["changed"] = False
self._check_argspec()
ise = ISESDK(params=self._task.args)
id = self._task.args.get("id")
name = self._task.args.get("name")
if not name and not id:
response = ise.exec(
family="misc",
function='get_profiler_count',
params=self.get_object(self._task.args),
).response['count']
self._result.update(dict(ise_response=response))
self._result.update(ise.exit_json())
return self._result
| 33.0375
| 128
| 0.659478
|
c458c5f6b93e5e0c543a5db7924ec3eee9d2bf1e
| 5,548
|
py
|
Python
|
avatar_models/utils/create_ade20k_vqa_dataset.py
|
rafiberlin/sose21-pm-language-and-vision-g1
|
c7f9e8912abc25191fb823a1f3ee2e0a374a1bc6
|
[
"MIT"
] | null | null | null |
avatar_models/utils/create_ade20k_vqa_dataset.py
|
rafiberlin/sose21-pm-language-and-vision-g1
|
c7f9e8912abc25191fb823a1f3ee2e0a374a1bc6
|
[
"MIT"
] | 46
|
2021-04-30T15:28:40.000Z
|
2021-08-21T15:26:34.000Z
|
avatar_models/utils/create_ade20k_vqa_dataset.py
|
rafiberlin/sose21-pm-language-and-vision-g1
|
c7f9e8912abc25191fb823a1f3ee2e0a374a1bc6
|
[
"MIT"
] | 1
|
2021-08-07T19:41:12.000Z
|
2021-08-07T19:41:12.000Z
|
from avatar_models.utils.util import get_config
import os
import json
from tqdm import tqdm
import pandas as pd
import random
import jsonlines
def read_ade20k_object_annotations():
"""
Returns the annoations
:return:object_annotations, image_annotations, rel_annotations
"""
conf = get_config()
ADE20K = conf["ade20k_dir"]
# as found on the jarvis server under data/ImageCorpora/ADE20K_2016_07_26/preprocessed_dfs
# with unzipped files (3 json files as a result)
ADE20K_OBJECT_ANNOTATIONS = os.path.join(ADE20K, "preprocessed_dfs", "obj_df.json")
with open(ADE20K_OBJECT_ANNOTATIONS, "r") as read_file:
object_annotations = json.load(read_file)
ADE20K_IMAGE_ANNOTATIONS = os.path.join(ADE20K, "preprocessed_dfs", "image_df.json")
with open(ADE20K_IMAGE_ANNOTATIONS, "r") as read_file:
image_annotations = json.load(read_file)
ADE20K_RELATION_ANNOTATIONS = os.path.join(ADE20K, "preprocessed_dfs", "relations_df.json")
with open(ADE20K_RELATION_ANNOTATIONS, "r") as read_file:
rel_annotations = json.load(read_file)
return object_annotations, image_annotations, rel_annotations
def extract_ade20k_classes(object_annotations):
"""
attr contains the attributes of the current label
and synset contains all synset synonyms of the current label
:param object_annotations:
:return: label_set, synset_set, attr_set
"""
cols = object_annotations["columns"]
data = object_annotations["data"]
label_col_id = cols.index("label")
synset_col_id = cols.index("synset")
attr_col_id = cols.index("attr")
label_set = set()
synset_set = set()
attr_set = set()
for i, row in tqdm(enumerate(data)):
label_set.add(row[label_col_id])
shards = row[synset_col_id].split(",")
if len(shards) == 1:
synset_set.add(shards[0])
else:
synset_set.update(shards)
shards = row[attr_col_id].split(",")
if len(shards) == 1:
attr_set.add(shards[0])
else:
attr_set.update(shards)
return label_set, synset_set, attr_set
def create_ADE20K_dataset(min_labels=3):
"""
To create questions, there must be a minimum amount of objects / labels avalaible in the picture.
The picture will be skipped if the minimum amount is not reached
:param min_labels:
:return:
"""
conf = get_config()
ADE20K = conf["ade20k_dir"]
VQA_FILE_NAME = conf["ade20k_vqa_file"]
object_annotations, image_annotations, rel_annotations = read_ade20k_object_annotations()
label_set, synset_set, attr_set = extract_ade20k_classes(object_annotations)
label_list = list(label_set)
obj_df = pd.DataFrame(object_annotations["data"], columns=object_annotations["columns"])
obj_df['image_id'] = obj_df['image_id'].astype('str')
image_df = pd.DataFrame(image_annotations["data"], columns=image_annotations["columns"])
image_df['image_id'] = image_df['image_id'].astype('str')
# retrieves each filepath
merged = obj_df.merge(image_df[['image_id', 'filename', "split"]], how="left", on=["image_id", "split"])
merged["synset"] = merged["synset"].copy().apply(lambda x: x.split(","))
merged["attr"] = merged["attr"].copy().apply(lambda x: x.split(","))
image_list = {f: set() for f in list(set(merged["filename"]))}
for i, row in tqdm(merged.iterrows()):
filename, label = row["filename"], row["label"]
image_list[filename].add(label)
# make results reproducible
random.seed(0)
question_templates = ["Is there a {} ?", "Can you see a {} ?", "Is it a {} or a {} ?", "What is it?", "What is missing {} or {} ?"]
jsonline_path = os.path.join(ADE20K, VQA_FILE_NAME)
with jsonlines.open(jsonline_path, 'w') as f_out:
for key in tqdm(image_list.keys()):
val = list(image_list[key])
if len(val) >= min_labels:
positive_examples = random.sample(val, k=min_labels)
negative_examples = random.sample([s for s in label_list if s not in val], k=min_labels)
questions = []
answers = []
for p in positive_examples:
questions.append(question_templates[0].format(p))
answers.append("yes")
questions.append(question_templates[1].format(p))
answers.append("yes")
n = random.choice(negative_examples)
questions.append(question_templates[2].format(p, n))
answers.append(p)
questions.append(question_templates[3])
answers.append(p)
for neg in negative_examples:
questions.append(question_templates[0].format(neg))
answers.append("no")
questions.append(question_templates[1].format(neg))
answers.append("no")
n = random.sample(negative_examples, k=2)
questions.append(question_templates[2].format(n[0], n[1]))
answers.append("none")
pos = random.sample(positive_examples, k=1)
questions.append(question_templates[4].format(pos[0], neg))
answers.append(neg)
for q, a, in zip(questions, answers):
f_out.write({"image_path": key, "question": q, "answer": a})
if __name__ == "__main__":
create_ADE20K_dataset()
| 39.347518
| 135
| 0.633742
|
31946ea5d4fe4f6cad6352870f2bebb218705087
| 236
|
py
|
Python
|
src/masonite/commands/UpCommand.py
|
cercos/masonite
|
f7f220efa7fae833683e9f07ce13c3795a87d3b8
|
[
"MIT"
] | 1,816
|
2018-02-14T01:59:51.000Z
|
2022-03-31T17:09:20.000Z
|
src/masonite/commands/UpCommand.py
|
cercos/masonite
|
f7f220efa7fae833683e9f07ce13c3795a87d3b8
|
[
"MIT"
] | 340
|
2018-02-11T00:27:26.000Z
|
2022-03-21T12:00:24.000Z
|
src/masonite/commands/UpCommand.py
|
cercos/masonite
|
f7f220efa7fae833683e9f07ce13c3795a87d3b8
|
[
"MIT"
] | 144
|
2018-03-18T00:08:16.000Z
|
2022-02-26T01:51:58.000Z
|
import os
from .Command import Command
class UpCommand(Command):
"""
Brings the server out of maintenance state.
up
"""
def handle(self):
os.remove(".down")
self.info("Server is online again !")
| 14.75
| 47
| 0.605932
|
c53800a23a509fda29312b818dc32c87657d746e
| 7,198
|
py
|
Python
|
tensorflow_model_analysis/api/tfma_unit_test.py
|
kennysong/model-analysis
|
ac0a1ca2cd9d79ebb19c5cb47b10a54129ec7dc4
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_model_analysis/api/tfma_unit_test.py
|
kennysong/model-analysis
|
ac0a1ca2cd9d79ebb19c5cb47b10a54129ec7dc4
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_model_analysis/api/tfma_unit_test.py
|
kennysong/model-analysis
|
ac0a1ca2cd9d79ebb19c5cb47b10a54129ec7dc4
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for using the tfma_unit library."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import apache_beam as beam
import tensorflow as tf
from tensorflow_model_analysis.api import tfma_unit
from tensorflow_model_analysis.eval_saved_model.example_trainers import fixed_prediction_estimator
from tensorflow_model_analysis.eval_saved_model.example_trainers import fixed_prediction_estimator_extra_fields
from tensorflow_model_analysis.post_export_metrics import metric_keys
from tensorflow_model_analysis.post_export_metrics import post_export_metrics
from tensorflow_model_analysis.slicer import slicer
class TFMAUnitTest(tfma_unit.TestCase):
def _getEvalExportDir(self):
return os.path.join(self._getTempDir(), 'eval_export_dir')
def testAssertMetricsComputedWithoutBeamAre(self):
temp_eval_export_dir = self._getEvalExportDir()
_, eval_export_dir = (
fixed_prediction_estimator.simple_fixed_prediction_estimator(
None, temp_eval_export_dir))
examples = [
self.makeExample(prediction=0.0, label=1.0),
self.makeExample(prediction=0.7, label=0.0),
self.makeExample(prediction=0.8, label=1.0),
self.makeExample(prediction=1.0, label=1.0)
]
self.assertMetricsComputedWithoutBeamAre(
eval_saved_model_path=eval_export_dir,
serialized_examples=examples,
expected_metrics={'average_loss': (1.0 + 0.49 + 0.04 + 0.00) / 4.0})
def testBoundedValueChecks(self):
temp_eval_export_dir = self._getEvalExportDir()
_, eval_export_dir = (
fixed_prediction_estimator.simple_fixed_prediction_estimator(
None, temp_eval_export_dir))
examples = [
self.makeExample(prediction=0.8, label=1.0),
]
self.assertMetricsComputedWithBeamAre(
eval_saved_model_path=eval_export_dir,
serialized_examples=examples,
expected_metrics={'average_loss': 0.04})
self.assertMetricsComputedWithoutBeamAre(
eval_saved_model_path=eval_export_dir,
serialized_examples=examples,
expected_metrics={
'average_loss':
tfma_unit.BoundedValue(lower_bound=0.03, upper_bound=0.05)
})
with self.assertRaisesRegexp(
AssertionError, 'expecting key average_loss to have value between'):
self.assertMetricsComputedWithoutBeamAre(
eval_saved_model_path=eval_export_dir,
serialized_examples=examples,
expected_metrics={
'average_loss': tfma_unit.BoundedValue(upper_bound=0.01)
})
with self.assertRaisesRegexp(
AssertionError, 'expecting key average_loss to have value between'):
self.assertMetricsComputedWithoutBeamAre(
eval_saved_model_path=eval_export_dir,
serialized_examples=examples,
expected_metrics={
'average_loss': tfma_unit.BoundedValue(lower_bound=0.10)
})
def testAssertMetricsComputedWithBeamAre(self):
temp_eval_export_dir = self._getEvalExportDir()
_, eval_export_dir = (
fixed_prediction_estimator.simple_fixed_prediction_estimator(
None, temp_eval_export_dir))
examples = [
self.makeExample(prediction=0.0, label=1.0),
self.makeExample(prediction=0.7, label=0.0),
self.makeExample(prediction=0.8, label=1.0),
self.makeExample(prediction=1.0, label=1.0)
]
self.assertMetricsComputedWithBeamAre(
eval_saved_model_path=eval_export_dir,
serialized_examples=examples,
expected_metrics={'average_loss': (1.0 + 0.49 + 0.04 + 0.00) / 4.0})
def testAssertGeneralMetricsComputedWithBeamAre(self):
temp_eval_export_dir = self._getEvalExportDir()
_, eval_export_dir = (
fixed_prediction_estimator_extra_fields
.simple_fixed_prediction_estimator_extra_fields(None,
temp_eval_export_dir))
examples = [
self.makeExample(
prediction=0.0,
label=0.0,
fixed_string='negative_slice',
fixed_float=0.0,
fixed_int=0),
self.makeExample(
prediction=0.2,
label=0.0,
fixed_string='negative_slice',
fixed_float=0.0,
fixed_int=0),
self.makeExample(
prediction=0.4,
label=0.0,
fixed_string='negative_slice',
fixed_float=0.0,
fixed_int=0),
self.makeExample(
prediction=0.8,
label=1.0,
fixed_string='positive_slice',
fixed_float=0.0,
fixed_int=0),
self.makeExample(
prediction=0.9,
label=1.0,
fixed_string='positive_slice',
fixed_float=0.0,
fixed_int=0),
self.makeExample(
prediction=1.0,
label=1.0,
fixed_string='positive_slice',
fixed_float=0.0,
fixed_int=0),
]
expected_slice_metrics = {}
expected_slice_metrics[()] = {
'average_loss': (0.00 + 0.04 + 0.16 + 0.04 + 0.01 + 0.00) / 6.0,
'mae':
0.15,
# Note that we don't check the exact value because of numerical errors.
metric_keys.AUC:
tfma_unit.BoundedValue(0.98, 1.00),
}
# We don't check AUC for the positive / negative only slices because
# it's not clear what the value should be.
expected_slice_metrics[(('fixed_string', b'negative_slice'),)] = {
'average_loss': (0.00 + 0.04 + 0.16) / 3.0,
'mae': 0.2,
}
expected_slice_metrics[(('fixed_string', b'positive_slice'),)] = {
'average_loss': (0.04 + 0.01 + 0.00) / 3.0,
'mae': 0.1,
}
def add_metrics(features, predictions, labels):
del features
metric_ops = {
'mae': tf.metrics.mean_absolute_error(labels, predictions),
}
return metric_ops
with beam.Pipeline() as pipeline:
examples_pcollection = pipeline | 'Create' >> beam.Create(examples)
self.assertGeneralMetricsComputedWithBeamAre(
eval_saved_model_path=eval_export_dir,
examples_pcollection=examples_pcollection,
slice_spec=[
slicer.SingleSliceSpec(),
slicer.SingleSliceSpec(columns=['fixed_string'])
],
add_metrics_callbacks=[add_metrics,
post_export_metrics.auc()],
expected_slice_metrics=expected_slice_metrics)
if __name__ == '__main__':
tf.test.main()
| 36.353535
| 111
| 0.661851
|
dc4f3de97f57496825767d92d56c351ea17dab48
| 1,718
|
py
|
Python
|
twitoff/predict.py
|
max-moore/twitoff
|
0ffe3cd0cdfde6960d8cb6203b790fe909f6dc0b
|
[
"MIT"
] | null | null | null |
twitoff/predict.py
|
max-moore/twitoff
|
0ffe3cd0cdfde6960d8cb6203b790fe909f6dc0b
|
[
"MIT"
] | null | null | null |
twitoff/predict.py
|
max-moore/twitoff
|
0ffe3cd0cdfde6960d8cb6203b790fe909f6dc0b
|
[
"MIT"
] | null | null | null |
"""Prediction of User based on tweet embeddings."""
#importing
import numpy as np
from sklearn.linear_model import LogisticRegression
from .models import User
from .twitter import BASILICA
def predict_user(user0_name, user1_name, tweet_text): # tweet text is a hypothetical tweet that the user passes in
"""
Determine and return which user is more likely to say a given Tweet.
Example run: predict_user('jackblack', 'elonmusk', 'Tesla, woohoo!')
Returns 0 (user0_name) or 1 (user1_name)
"""
# querying our database for user0
user0 = User.query.filter(User.name == user0_name).one()
# querying our database for user1
user1 = User.query.filter(User.name == user1_name).one()
#grabbing embeddings from our tweet database (reference models.py) to run LR on
user0_embeddings = np.array([tweet.embedding for tweet in user0.tweets])
user1_embeddings = np.array([tweet.embedding for tweet in user1.tweets])
#stacking vertically to create one embeddings matrix (tweets by features)
embeddings = np.vstack([user0_embeddings, user1_embeddings])
#creating labels associated with the embeddings that correspond to either user0 (0) or user1 (1)
labels = np.concatenate([np.zeros(len(user0.tweets)), np.ones(len(user1.tweets))])
#creating and training LR model based off labels and embeddings
log_reg = LogisticRegression().fit(embeddings, labels)
#grabbing embeddings from BASILICA for our hypothetical tweet_text paramater passed in
tweet_embedding = BASILICA.embed_sentence(tweet_text, model='twitter')
#returns a prediction for the hypothetical tweet, returning either 0 or 1
return log_reg.predict(np.array(tweet_embedding).reshape(1,-1))
| 44.051282
| 114
| 0.750291
|
d2096d3197c8b3456d0ffcda00bd910207c02fe0
| 164
|
py
|
Python
|
frequencia/core/templatetags/simpletags.py
|
andersonqueiroz/frequencia
|
7dae9bb6115759edb8e8297565d0dd1b638ac94a
|
[
"MIT"
] | 1
|
2021-11-22T17:17:03.000Z
|
2021-11-22T17:17:03.000Z
|
frequencia/core/templatetags/simpletags.py
|
andersonqueiroz/frequencia
|
7dae9bb6115759edb8e8297565d0dd1b638ac94a
|
[
"MIT"
] | 11
|
2019-06-18T11:19:23.000Z
|
2021-08-23T12:04:54.000Z
|
frequencia/core/templatetags/simpletags.py
|
andersonqueiroz/frequencia
|
7dae9bb6115759edb8e8297565d0dd1b638ac94a
|
[
"MIT"
] | 2
|
2019-04-09T16:23:22.000Z
|
2022-01-27T19:13:19.000Z
|
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def current_version():
return settings.VERSION
| 20.5
| 32
| 0.804878
|
e53fe9cd85b89d436383bd4825c3f22add18f049
| 701
|
py
|
Python
|
setup.py
|
danklotz/RRMPG
|
6f3e4d694d97f3f92df5380b5ca88bcc123199e9
|
[
"MIT"
] | null | null | null |
setup.py
|
danklotz/RRMPG
|
6f3e4d694d97f3f92df5380b5ca88bcc123199e9
|
[
"MIT"
] | null | null | null |
setup.py
|
danklotz/RRMPG
|
6f3e4d694d97f3f92df5380b5ca88bcc123199e9
|
[
"MIT"
] | 1
|
2018-12-18T21:09:04.000Z
|
2018-12-18T21:09:04.000Z
|
from distutils.core import setup
import rrmpg
setup(
name="rrmpg",
version=rrmpg.__reversion__,
author=rrmpg.__author__,
author_email="f.kratzert[at]gmail.com",
description=("Rainfall-Runoff-Model-PlayGround: a Python library for"
"hydrological modeling."),
url="https//www.github.com/kratzert/RRMPG",
packages=["rrmpg", "rrmpg.models", "rrmpg.tools", "rrmpg.utils"],
license="MIT-License",
keywords="hydrology rainfall-runoff modeling",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License"]
)
| 31.863636
| 73
| 0.653352
|
b568bd80858bc9d3b97d230a77818e35da70ea3a
| 383
|
py
|
Python
|
wbb/utils/botinfo.py
|
sylphiette/WilliamButcherBot
|
1f715f2664ecd10fe6770ce609d799ea2c17b716
|
[
"MIT"
] | null | null | null |
wbb/utils/botinfo.py
|
sylphiette/WilliamButcherBot
|
1f715f2664ecd10fe6770ce609d799ea2c17b716
|
[
"MIT"
] | null | null | null |
wbb/utils/botinfo.py
|
sylphiette/WilliamButcherBot
|
1f715f2664ecd10fe6770ce609d799ea2c17b716
|
[
"MIT"
] | null | null | null |
BOT_ID = 0
BOT_NAME = ""
BOT_USERNAME = ""
BOT_DC_ID = 0
async def get_info(app):
global BOT_ID, BOT_NAME, BOT_USERNAME, BOT_DC_ID
getme = await app.get_me()
BOT_ID = getme.id
if getme.last_name:
BOT_NAME = getme.first_name + " " + getme.last_name
else:
BOT_NAME = getme.first_name
BOT_USERNAME = getme.username
BOT_DC_ID = getme.dc_id
| 21.277778
| 59
| 0.660574
|
47e744c07ec7901b2d0f5056dd65201f4516cec6
| 579
|
py
|
Python
|
src/sensationdriver/profiler.py
|
sebastianludwig/SensationDriver
|
8787afa8fb55a43af69d83192a4f09a4279b5c00
|
[
"MIT"
] | null | null | null |
src/sensationdriver/profiler.py
|
sebastianludwig/SensationDriver
|
8787afa8fb55a43af69d83192a4f09a4279b5c00
|
[
"MIT"
] | null | null | null |
src/sensationdriver/profiler.py
|
sebastianludwig/SensationDriver
|
8787afa8fb55a43af69d83192a4f09a4279b5c00
|
[
"MIT"
] | null | null | null |
import time
import itertools
class Profiler(object):
def __init__(self):
self.entries = []
def save_data(self, path):
def flatten(entry):
return itertools.chain(entry[:2], entry[2])
def convert(entry):
return ';'.join(str(element).replace("\n", ';') for element in entry)
with open(path, 'w') as f:
entries = map(convert, map(flatten, self.entries))
f.write("\n".join(entries))
def log(self, action, *text):
self.entries.append((action, time.time() * 1000, text))
| 25.173913
| 81
| 0.566494
|
3c5dc469faa4afa171fc90646f78bb1da692742a
| 25,477
|
py
|
Python
|
nuitka/tree/ReformulationFunctionStatements.py
|
em3ndez/Nuitka
|
a5a036a94c1842d1cd72f27c0c67461798fdf977
|
[
"Apache-2.0"
] | 1
|
2019-09-09T19:27:43.000Z
|
2019-09-09T19:27:43.000Z
|
nuitka/tree/ReformulationFunctionStatements.py
|
em3ndez/Nuitka
|
a5a036a94c1842d1cd72f27c0c67461798fdf977
|
[
"Apache-2.0"
] | 1
|
2019-02-21T13:05:17.000Z
|
2019-02-21T13:05:17.000Z
|
nuitka/tree/ReformulationFunctionStatements.py
|
em3ndez/Nuitka
|
a5a036a94c1842d1cd72f27c0c67461798fdf977
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Reformulation of function statements.
Consult the developer manual for information. TODO: Add ability to sync
source code comments with developer manual sections.
"""
from nuitka.nodes.AssignNodes import (
StatementAssignmentVariable,
StatementAssignmentVariableName,
StatementReleaseVariable,
)
from nuitka.nodes.AsyncgenNodes import (
ExpressionAsyncgenObjectBody,
ExpressionMakeAsyncgenObject,
)
from nuitka.nodes.BuiltinIteratorNodes import (
ExpressionBuiltinIter1,
StatementSpecialUnpackCheck,
)
from nuitka.nodes.BuiltinNextNodes import ExpressionSpecialUnpack
from nuitka.nodes.BuiltinRefNodes import makeExpressionBuiltinTypeRef
from nuitka.nodes.CodeObjectSpecs import CodeObjectSpec
from nuitka.nodes.CoroutineNodes import (
ExpressionCoroutineObjectBody,
ExpressionMakeCoroutineObject,
)
from nuitka.nodes.FunctionNodes import (
ExpressionFunctionBody,
ExpressionFunctionCreation,
ExpressionFunctionRef,
)
from nuitka.nodes.GeneratorNodes import (
ExpressionGeneratorObjectBody,
ExpressionMakeGeneratorObject,
StatementGeneratorReturnNone,
)
from nuitka.nodes.LocalsDictNodes import StatementSetLocalsDictionary
from nuitka.nodes.OutlineNodes import ExpressionOutlineFunction
from nuitka.nodes.ReturnNodes import StatementReturn, StatementReturnNone
from nuitka.nodes.VariableRefNodes import (
ExpressionTempVariableRef,
ExpressionVariableNameRef,
ExpressionVariableRef,
)
from nuitka.PythonVersions import python_version
from nuitka.specs.ParameterSpecs import ParameterSpec
from .ReformulationTryFinallyStatements import makeTryFinallyStatement
from .SyntaxErrors import raiseSyntaxError
from .TreeHelpers import (
buildAnnotationNode,
buildFrameNode,
buildNode,
buildNodeList,
detectFunctionBodyKind,
extractDocFromBody,
getKind,
makeCallNode,
makeDictCreationOrConstant2,
makeStatementsSequenceFromStatement,
mangleName,
)
def _insertFinalReturnStatement(function_statements_body, return_statement):
if function_statements_body is None:
function_statements_body = makeStatementsSequenceFromStatement(
statement=return_statement
)
elif not function_statements_body.isStatementAborting():
function_statements_body.setStatements(
function_statements_body.getStatements() + (return_statement,)
)
return function_statements_body
def _insertInitialSetLocalsDictStatement(function_body, function_statements_body):
locals_statement = StatementSetLocalsDictionary(
locals_scope=function_body.getLocalsScope(), source_ref=function_body.source_ref
)
if function_statements_body is None:
function_statements_body = makeStatementsSequenceFromStatement(
statement=locals_statement
)
else:
function_statements_body.setStatements(
(locals_statement,) + function_statements_body.getStatements()
)
return function_statements_body
def _injectDecorator(decorators, inject, acceptable, source_ref):
assert type(inject) is str
assert type(acceptable) is tuple
for decorator in decorators:
if (
decorator.isExpressionVariableNameRef()
and decorator.getVariableName() in acceptable
):
break
else:
decorators.append(
makeExpressionBuiltinTypeRef(builtin_name=inject, source_ref=source_ref)
)
def buildFunctionNode(provider, node, source_ref):
# Functions have way too many details, pylint: disable=too-many-locals
assert getKind(node) == "FunctionDef"
function_statement_nodes, function_doc = extractDocFromBody(node)
function_kind, flags = detectFunctionBodyKind(nodes=function_statement_nodes)
function_body, code_body, code_object = buildFunctionWithParsing(
provider=provider,
function_kind=function_kind,
name=node.name,
function_doc=function_doc,
flags=flags,
node=node,
source_ref=source_ref,
)
if function_kind in ("Generator", "Coroutine"):
if function_kind == "Coroutine":
code_body = ExpressionCoroutineObjectBody(
provider=function_body,
name=node.name,
code_object=code_object,
flags=flags,
auto_release=None,
source_ref=source_ref,
)
maker_class = ExpressionMakeCoroutineObject
else:
code_body = ExpressionGeneratorObjectBody(
provider=function_body,
name=node.name,
code_object=code_object,
flags=flags,
auto_release=None,
source_ref=source_ref,
)
maker_class = ExpressionMakeGeneratorObject
code_body.qualname_provider = provider
for variable in function_body.getProvidedVariables():
code_body.getVariableForReference(variable.getName())
function_body.setBody(
makeStatementsSequenceFromStatement(
statement=StatementReturn(
expression=maker_class(
ExpressionFunctionRef(
function_body=code_body, source_ref=source_ref
),
source_ref=source_ref,
),
source_ref=source_ref,
)
)
)
decorators = buildNodeList(
provider=provider, nodes=reversed(node.decorator_list), source_ref=source_ref
)
defaults = buildNodeList(
provider=provider, nodes=node.args.defaults, source_ref=source_ref
)
kw_defaults = buildParameterKwDefaults(
provider=provider, node=node, function_body=function_body, source_ref=source_ref
)
function_statements_body = buildFrameNode(
provider=code_body,
nodes=function_statement_nodes,
code_object=code_object,
source_ref=source_ref,
)
if function_kind == "Function":
# TODO: Generators might have to raise GeneratorExit instead.
function_statements_body = _insertFinalReturnStatement(
function_statements_body=function_statements_body,
return_statement=StatementReturnNone(source_ref=source_ref),
)
if "has_exec" in flags:
function_statements_body = _insertInitialSetLocalsDictStatement(
function_body=code_body, function_statements_body=function_statements_body
)
if function_statements_body.isStatementsFrame():
function_statements_body = makeStatementsSequenceFromStatement(
statement=function_statements_body
)
code_body.setBody(function_statements_body)
annotations = buildParameterAnnotations(provider, node, source_ref)
function_creation = ExpressionFunctionCreation(
function_ref=ExpressionFunctionRef(
function_body=function_body, source_ref=source_ref
),
defaults=defaults,
kw_defaults=kw_defaults,
annotations=annotations,
source_ref=source_ref,
)
# Add the "staticmethod" decorator to __new__ methods if not provided.
# CPython made these optional, but secretly applies them when it does
# "class __new__". We add them earlier, so our optimization will see it.
if (
python_version < 300
and node.name == "__new__"
and provider.isExpressionClassBody()
):
_injectDecorator(
decorators, "staticmethod", ("staticmethod", "classmethod"), source_ref
)
# Add the "classmethod" decorator to __init_subclass__ methods if not provided.
if (
python_version >= 360
and node.name == "__init_subclass__"
and provider.isExpressionClassBody()
):
_injectDecorator(decorators, "classmethod", ("classmethod",), source_ref)
if (
python_version >= 370
and node.name == "__class_getitem__"
and provider.isExpressionClassBody()
):
_injectDecorator(decorators, "classmethod", ("classmethod",), source_ref)
decorated_function = function_creation
for decorator in decorators:
decorated_function = makeCallNode(
decorator, decorated_function, decorator.getSourceReference()
)
result = StatementAssignmentVariableName(
provider=provider,
variable_name=mangleName(node.name, provider),
source=decorated_function,
source_ref=source_ref,
)
if python_version >= 340:
function_body.qualname_setup = result.getVariableName()
return result
def buildAsyncFunctionNode(provider, node, source_ref):
# We are creating a function here that creates coroutine objects, with
# many details each, pylint: disable=too-many-locals
assert getKind(node) == "AsyncFunctionDef"
function_statement_nodes, function_doc = extractDocFromBody(node)
function_kind, flags = detectFunctionBodyKind(
nodes=function_statement_nodes, start_value="Coroutine"
)
creator_function_body, _, code_object = buildFunctionWithParsing(
provider=provider,
function_kind=function_kind,
name=node.name,
function_doc=function_doc,
flags=(),
node=node,
source_ref=source_ref,
)
if function_kind == "Coroutine":
function_body = ExpressionCoroutineObjectBody(
provider=creator_function_body,
name=node.name,
code_object=code_object,
flags=flags,
auto_release=None,
source_ref=source_ref,
)
else:
function_body = ExpressionAsyncgenObjectBody(
provider=creator_function_body,
name=node.name,
code_object=code_object,
flags=flags,
auto_release=None,
source_ref=source_ref,
)
function_body.qualname_provider = provider
for variable in creator_function_body.getProvidedVariables():
function_body.getVariableForReference(variable.getName())
decorators = buildNodeList(
provider=provider, nodes=reversed(node.decorator_list), source_ref=source_ref
)
defaults = buildNodeList(
provider=provider, nodes=node.args.defaults, source_ref=source_ref
)
function_statements_body = buildFrameNode(
provider=function_body,
nodes=function_statement_nodes,
code_object=code_object,
source_ref=source_ref,
)
function_statements_body = _insertFinalReturnStatement(
function_statements_body=function_statements_body,
return_statement=StatementGeneratorReturnNone(source_ref=source_ref),
)
if function_statements_body.isStatementsFrame():
function_statements_body = makeStatementsSequenceFromStatement(
statement=function_statements_body
)
function_body.setBody(function_statements_body)
annotations = buildParameterAnnotations(provider, node, source_ref)
kw_defaults = buildParameterKwDefaults(
provider=provider,
node=node,
function_body=creator_function_body,
source_ref=source_ref,
)
if function_kind == "Coroutine":
creation_node = ExpressionMakeCoroutineObject(
coroutine_ref=ExpressionFunctionRef(
function_body=function_body, source_ref=source_ref
),
source_ref=source_ref,
)
else:
creation_node = ExpressionMakeAsyncgenObject(
asyncgen_ref=ExpressionFunctionRef(
function_body=function_body, source_ref=source_ref
),
source_ref=source_ref,
)
creator_function_body.setBody(
makeStatementsSequenceFromStatement(
statement=StatementReturn(expression=creation_node, source_ref=source_ref)
)
)
function_creation = ExpressionFunctionCreation(
function_ref=ExpressionFunctionRef(
function_body=creator_function_body, source_ref=source_ref
),
defaults=defaults,
kw_defaults=kw_defaults,
annotations=annotations,
source_ref=source_ref,
)
decorated_function = function_creation
for decorator in decorators:
decorated_function = makeCallNode(
decorator, decorated_function, decorator.getSourceReference()
)
result = StatementAssignmentVariableName(
provider=provider,
variable_name=mangleName(node.name, provider),
source=decorated_function,
source_ref=source_ref,
)
function_body.qualname_setup = result.getVariableName()
# Share the non-local declarations. TODO: This may also apply to generators
# and async generators.
creator_function_body.non_local_declarations = function_body.non_local_declarations
return result
def buildParameterKwDefaults(provider, node, function_body, source_ref):
# Build keyword only arguments default values. We are hiding here, that it
# is a Python3 only feature.
if python_version >= 300:
kw_only_names = function_body.getParameters().getKwOnlyParameterNames()
if kw_only_names:
keys = []
values = []
for kw_only_name, kw_default in zip(kw_only_names, node.args.kw_defaults):
if kw_default is not None:
keys.append(kw_only_name)
values.append(buildNode(provider, kw_default, source_ref))
kw_defaults = makeDictCreationOrConstant2(
keys=keys, values=values, source_ref=source_ref
)
else:
kw_defaults = None
else:
kw_defaults = None
return kw_defaults
def buildParameterAnnotations(provider, node, source_ref):
# Too many branches, because there is too many cases, pylint: disable=too-many-branches
# Build annotations. We are hiding here, that it is a Python3 only feature.
if python_version < 300:
return None
# Starting with Python 3.4, the names of parameters are mangled in
# annotations as well.
if python_version < 340:
mangle = lambda variable_name: variable_name
else:
mangle = lambda variable_name: mangleName(variable_name, provider)
keys = []
values = []
def addAnnotation(key, value):
keys.append(mangle(key))
values.append(value)
def extractArgAnnotation(arg):
if getKind(arg) == "Name":
assert arg.annotation is None
elif getKind(arg) == "arg":
if arg.annotation is not None:
addAnnotation(
key=arg.arg,
value=buildAnnotationNode(provider, arg.annotation, source_ref),
)
elif getKind(arg) == "Tuple":
for sub_arg in arg.elts:
extractArgAnnotation(sub_arg)
else:
assert False, getKind(arg)
if python_version >= 380:
for arg in node.args.posonlyargs:
extractArgAnnotation(arg)
for arg in node.args.args:
extractArgAnnotation(arg)
for arg in node.args.kwonlyargs:
extractArgAnnotation(arg)
if python_version < 340:
if node.args.varargannotation is not None:
addAnnotation(
key=node.args.vararg,
value=buildNode(provider, node.args.varargannotation, source_ref),
)
if node.args.kwargannotation is not None:
addAnnotation(
key=node.args.kwarg,
value=buildNode(provider, node.args.kwargannotation, source_ref),
)
else:
if node.args.vararg is not None:
extractArgAnnotation(node.args.vararg)
if node.args.kwarg is not None:
extractArgAnnotation(node.args.kwarg)
# Return value annotation (not there for lambdas)
if hasattr(node, "returns") and node.returns is not None:
addAnnotation(
key="return", value=buildAnnotationNode(provider, node.returns, source_ref)
)
if keys:
return makeDictCreationOrConstant2(
keys=keys, values=values, source_ref=source_ref
)
else:
return None
def _wrapFunctionWithSpecialNestedArgs(
name, outer_body, parameters, special_args, source_ref
):
inner_name = name.strip("<>") + "$inner"
iter_vars = []
values = []
statements = []
def unpackFrom(source, arg_names):
accesses = []
sub_special_index = 0
iter_var = outer_body.allocateTempVariable(None, "arg_iter_%d" % len(iter_vars))
iter_vars.append(iter_var)
statements.append(
StatementAssignmentVariable(
variable=iter_var,
source=ExpressionBuiltinIter1(value=source, source_ref=source_ref),
source_ref=source_ref,
)
)
for element_index, arg_name in enumerate(arg_names):
if getKind(arg_name) == "Name":
arg_var = outer_body.createProvidedVariable(arg_name.id)
outer_body.getLocalsScope().registerProvidedVariable(arg_var)
statements.append(
StatementAssignmentVariable(
variable=arg_var,
source=ExpressionSpecialUnpack(
value=ExpressionTempVariableRef(
variable=iter_var, source_ref=source_ref
),
count=element_index + 1,
expected=len(arg_names),
starred=False,
source_ref=source_ref,
),
source_ref=source_ref,
)
)
accesses.append(
ExpressionVariableRef(variable=arg_var, source_ref=source_ref)
)
elif getKind(arg_name) == "Tuple":
accesses.extend(
unpackFrom(
source=ExpressionSpecialUnpack(
value=ExpressionTempVariableRef(
variable=iter_var, source_ref=source_ref
),
count=element_index + 1,
expected=len(arg_names),
starred=False,
source_ref=source_ref,
),
arg_names=arg_name.elts,
)
)
sub_special_index += 1
else:
assert False, arg_name
statements.append(
StatementSpecialUnpackCheck(
iterator=ExpressionTempVariableRef(
variable=iter_var, source_ref=source_ref
),
count=len(arg_names),
source_ref=source_ref,
)
)
return accesses
for arg_name in parameters.getParameterNames():
if arg_name.startswith("."):
source = ExpressionVariableNameRef(
provider=outer_body, variable_name=arg_name, source_ref=source_ref
)
values.extend(unpackFrom(source, special_args[arg_name]))
else:
values.append(
ExpressionVariableNameRef(
provider=outer_body, variable_name=arg_name, source_ref=source_ref
)
)
code_body = ExpressionOutlineFunction(
provider=outer_body, name=inner_name, source_ref=source_ref
)
statements.append(StatementReturn(expression=code_body, source_ref=source_ref))
outer_body.setBody(
makeStatementsSequenceFromStatement(
statement=makeTryFinallyStatement(
provider=outer_body,
tried=statements,
final=[
StatementReleaseVariable(variable=variable, source_ref=source_ref)
for variable in sorted(
outer_body.getTempVariables(),
key=lambda variable: variable.getName(),
)
],
source_ref=source_ref,
public_exc=False,
)
)
)
return code_body
def buildFunctionWithParsing(
provider, function_kind, name, function_doc, flags, node, source_ref
):
# This contains a complex re-formulation for nested parameter functions.
# pylint: disable=too-many-locals
kind = getKind(node)
assert kind in ("FunctionDef", "Lambda", "AsyncFunctionDef"), (
"unsupported for kind " + kind
)
def extractArg(arg):
if arg is None:
return None
elif type(arg) is str:
return mangleName(arg, provider)
elif getKind(arg) == "Name":
return mangleName(arg.id, provider)
elif getKind(arg) == "arg":
return mangleName(arg.arg, provider)
elif getKind(arg) == "Tuple":
# These are to be re-formulated on the outside.
assert False
else:
assert False, getKind(arg)
special_args = {}
def extractNormalArgs(args):
normal_args = []
for arg in args:
if type(arg) is not str and getKind(arg) == "Tuple":
special_arg_name = ".%d" % (len(special_args) + 1)
special_args[special_arg_name] = arg.elts
normal_args.append(special_arg_name)
else:
normal_args.append(extractArg(arg))
return normal_args
parameters = ParameterSpec(
ps_name=name,
ps_normal_args=extractNormalArgs(node.args.args),
ps_pos_only_args=[extractArg(arg) for arg in node.args.posonlyargs]
if python_version >= 380
else (),
ps_kw_only_args=[extractArg(arg) for arg in node.args.kwonlyargs]
if python_version >= 300
else (),
ps_list_star_arg=extractArg(node.args.vararg),
ps_dict_star_arg=extractArg(node.args.kwarg),
ps_default_count=len(node.args.defaults),
)
message = parameters.checkParametersValid()
if message is not None:
raiseSyntaxError(message, source_ref.atColumnNumber(node.col_offset))
parent_module = provider.getParentModule()
code_object = CodeObjectSpec(
co_name=name,
co_kind=function_kind,
co_varnames=parameters.getParameterNames(),
co_argcount=parameters.getArgumentCount(),
co_posonlyargcount=parameters.getPosOnlyParameterCount(),
co_kwonlyargcount=parameters.getKwOnlyParameterCount(),
co_has_starlist=parameters.getStarListArgumentName() is not None,
co_has_stardict=parameters.getStarDictArgumentName() is not None,
co_filename=parent_module.getRunTimeFilename(),
co_lineno=source_ref.getLineNumber(),
future_spec=parent_module.getFutureSpec(),
)
outer_body = ExpressionFunctionBody(
provider=provider,
name=name,
code_object=code_object,
flags=flags,
doc=function_doc,
parameters=parameters,
auto_release=None,
source_ref=source_ref,
)
# Wrap if necessary for special nested arguments.
if special_args:
code_body = _wrapFunctionWithSpecialNestedArgs(
name=name,
outer_body=outer_body,
parameters=parameters,
special_args=special_args,
source_ref=source_ref,
)
else:
code_body = outer_body
return outer_body, code_body, code_object
def addFunctionVariableReleases(function):
assert function.isExpressionFunctionBodyBase()
releases = []
# We attach everything to the function definition source location.
source_ref = function.getSourceReference()
for variable in function.getLocalVariables():
# Shared variables are freed by function attachment.
if variable.getOwner() is not function:
continue
releases.append(
StatementReleaseVariable(variable=variable, source_ref=source_ref)
)
if releases:
body = function.getBody()
if body.isStatementsFrame():
body = makeStatementsSequenceFromStatement(statement=body)
body = makeTryFinallyStatement(
provider=function, tried=body, final=releases, source_ref=source_ref
)
function.setBody(makeStatementsSequenceFromStatement(statement=body))
| 32.579284
| 91
| 0.646779
|
56a43a4b261257454609b6505fa4da65ca1a26b8
| 12,655
|
py
|
Python
|
nasbench2_subset_pred.py
|
debadeepta/zero-cost-nas
|
e14ee81078ed1a2f0e11d4db6e12bddf4b309776
|
[
"Apache-2.0"
] | null | null | null |
nasbench2_subset_pred.py
|
debadeepta/zero-cost-nas
|
e14ee81078ed1a2f0e11d4db6e12bddf4b309776
|
[
"Apache-2.0"
] | null | null | null |
nasbench2_subset_pred.py
|
debadeepta/zero-cost-nas
|
e14ee81078ed1a2f0e11d4db6e12bddf4b309776
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 Samsung Electronics Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import pickle
import torch
import argparse
from scipy.stats import spearmanr
import yaml
from foresight.models import *
from foresight.pruners import *
from foresight.dataset import *
from foresight.weight_initializers import init_net
def get_num_classes(args):
return 100 if args.dataset == 'cifar100' else 10 if args.dataset == 'cifar10' else 120
def parse_arguments():
parser = argparse.ArgumentParser(description='Zero-cost Metrics for NAS-Bench-201 subset')
parser.add_argument('--api_loc', default='data/NAS-Bench-201-v1_0-e61699.pth',
type=str, help='path to API')
parser.add_argument('--outdir', default='./',
type=str, help='output directory')
parser.add_argument('--init_w_type', type=str, default='none', help='weight initialization (before pruning) type [none, xavier, kaiming, zero]')
parser.add_argument('--init_b_type', type=str, default='none', help='bias initialization (before pruning) type [none, xavier, kaiming, zero]')
parser.add_argument('--batch_size', default=64, type=int)
parser.add_argument('--dataset', type=str, default='cifar10', help='dataset to use [cifar10, cifar100, ImageNet16-120]')
parser.add_argument('--gpu', type=int, default=0, help='GPU index to work on')
parser.add_argument('--num_data_workers', type=int, default=2, help='number of workers for dataloaders')
parser.add_argument('--dataload', type=str, default='random', help='random or grasp supported')
parser.add_argument('--dataload_info', type=int, default=1, help='number of batches to use for random dataload or number of samples per class for grasp dataload')
parser.add_argument('--seed', type=int, default=42, help='pytorch manual seed')
parser.add_argument('--write_freq', type=int, default=1, help='frequency of write to file')
parser.add_argument('--start', type=int, default=0, help='start index')
parser.add_argument('--end', type=int, default=0, help='end index')
parser.add_argument('--noacc', default=False, action='store_true', help='avoid loading NASBench2 api an instead load a pickle file with tuple (index, arch_str)')
args = parser.parse_args()
args.device = torch.device("cuda:"+str(args.gpu) if torch.cuda.is_available() else "cpu")
return args
if __name__ == '__main__':
args = parse_arguments()
if args.noacc:
api = pickle.load(open(args.api_loc,'rb'))
else:
from nas_201_api import NASBench201API as API
api = API(args.api_loc)
torch.manual_seed(args.seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
train_loader, val_loader = get_cifar_dataloaders(args.batch_size, args.batch_size, args.dataset, args.num_data_workers)
cached_res = []
pre='cf' if 'cifar' in args.dataset else 'im'
pfn=f'nb2_{pre}{get_num_classes(args)}_seed{args.seed}_dl{args.dataload}_dlinfo{args.dataload_info}_initw{args.init_w_type}_initb{args.init_b_type}.p'
op = os.path.join(args.outdir,pfn)
args.end = len(api) if args.end == 0 else args.end
# list of archs to compute measures over
all_archs = [6983, 14708, 1891, 6083, 5727, 10259, 11869, 4336, 5737, 8242, 1759, 12447, 11686, 1604, 8184, 288, 3352, 4026, 6531, 13745, 7319, 3435, 10659, 10850, 9427, 3972, 14392, 8047, 7118, 2491, 4750, 9167, 8039, 13941, 7787, 9438, 9110, 4555, 1835, 9117, 14584, 12112, 9233, 905, 4185, 6744, 9522, 11247, 411, 2778, 6781, 3370, 1032, 11208, 11033, 11652, 2224, 2047, 7572, 5031, 3395, 13911, 9045, 7113, 6050, 2373, 13971, 9650, 13273, 9682, 1944, 1809, 7170, 767, 10236, 3267, 11786, 8702, 3444, 13017, 5557, 14710, 7624, 11751, 6390, 2989, 8246, 8980, 13087, 27, 6607, 9580, 12749, 11341, 13912, 11180, 15543, 7761, 9102, 9509, 9677, 362, 6272, 3079, 13105, 8206, 13708, 191, 10218, 12165, 12613, 920, 6014, 5400, 8279, 13784, 307, 14341, 8797, 11173, 12631, 11363, 7723, 6708, 6918, 9906, 2903, 13869, 15135, 9594, 3088, 9455, 4360, 3201, 7404, 8504, 7933, 7545, 11098, 1554, 7939, 11503, 5637, 3140, 7982, 433, 15460, 12101, 9801, 15617, 6149, 2181, 8306, 15468, 14259, 13431, 2170, 7630, 5541, 5333, 14923, 5828, 5778, 771, 12138, 2924, 59, 14727, 6049, 7341, 13630, 13899, 8200, 13692, 5679, 14861, 14815, 9802, 11801, 2803, 2082, 6621, 1998, 3807, 9691, 7361, 15550, 13775, 851, 6056, 12596, 2884, 4477, 10417, 14455, 10915, 15094, 6180, 5212, 6323, 12684, 15180, 6501, 6890, 5875, 3572, 14526, 12273, 6440, 9078, 1285, 4368, 7383, 9197, 10560, 15439, 3827, 7060, 14000, 12244, 4398, 2685, 7481, 12825, 15299, 6656, 4824, 5903, 11735, 3810, 6137, 11185, 8956, 12813, 1567, 11666, 4478, 12066, 2315, 8517, 2950, 10669, 8096, 10638, 10805, 14524, 14179, 3330, 7045, 2573, 10152, 11851, 4299, 2980, 5099, 6651, 13137, 1902, 1212, 4704, 4522, 3760, 273, 1235, 8407, 8016, 7192, 4429, 3298, 8339, 11255, 7453, 8739, 5756, 3428, 2795, 2729, 13000, 5732, 9962, 658, 11422, 8464, 3455, 15201, 7885, 9053, 5976, 7257, 9598, 3070, 11672, 14113, 21, 12330, 7670, 6608, 3566, 13006, 10356, 7962, 4875, 7656, 7976, 7912, 4099, 6269, 11320, 10035, 3874, 7817, 3829, 11576, 2805, 2377, 8315, 13182, 6214, 7888, 10509, 8877, 1879, 1086, 7977, 3218, 13244, 738, 5122, 8927, 4961, 5367, 2829, 1247, 8994, 8723, 9297, 14924, 3025, 6689, 6605, 14695, 232, 14521, 9323, 12750, 7265, 11262, 3405, 12794, 7949, 5624, 1697, 11802, 3809, 7369, 800, 7650, 2447, 5299, 3803, 190, 9389, 10900, 12754, 13837, 4748, 9186, 7548, 6421, 8657, 11892, 13922, 4159, 11265, 6424, 14754, 1252, 5864, 8146, 9373, 4161, 15541, 8672, 14525, 12957, 14855, 2382, 2309, 7076, 1334, 779, 6986, 3826, 9065, 11874, 10706, 4695, 4686, 13267, 3648, 2844, 12055, 5874, 1528, 2524, 12857, 256, 10265, 8400, 6115, 5348, 4599, 13247, 11399, 15310, 3543, 1430, 4253, 11793, 247, 6413, 12083, 11181, 8864, 2124, 479, 12129, 11743, 15535, 3851, 2640, 10628, 465, 9060, 3415, 7822, 15168, 6490, 14886, 15072, 7440, 15312, 10472, 9397, 10621, 6081, 2818, 5373, 15420, 2348, 9019, 8625, 14961, 15044, 9278, 8011, 4286, 13012, 12213, 14768, 4340, 237, 3684, 12234, 14887, 2559, 5695, 4318, 9903, 14663, 770, 8043, 4699, 10133, 12939, 8614, 860, 967, 14671, 2283, 88, 9704, 14057, 15417, 3953, 14301, 14422, 14472, 5191, 14105, 14730, 4666, 13601, 405, 1510, 11715, 13319, 9932, 6789, 3512, 13861, 7615, 12995, 9855, 3197, 680, 7766, 3514, 4814, 2821, 8057, 3050, 4822, 14086, 12346, 4861, 755, 1089, 1749, 3933, 5957, 1620, 4805, 268, 12627, 6211, 8663, 13140, 12583, 7673, 12497, 1213, 10794, 13780, 4357, 12109, 3091, 11603, 4852, 12291, 10991, 9318, 6091, 14036, 14596, 7179, 7561, 2518, 13257, 1517, 1126, 7094, 901, 487, 8051, 4123, 4295, 13032, 6022, 1307, 6219, 2469, 6080, 6017, 11090, 5678, 8070, 10442, 3602, 3563, 13637, 1778, 10287, 14690, 6955, 6062, 6363, 14609, 977, 12609, 1029, 10297, 9152, 7529, 3258, 10771, 1518, 2417, 3273, 11205, 4973, 4678, 9170, 4499, 10343, 14581, 10368, 9499, 5579, 5609, 11569, 3587, 10789, 13972, 352, 4399, 2662, 14781, 14478, 13292, 4372, 14184, 8249, 15208, 4652, 7541, 13658, 2621, 8758, 3015, 11726, 15513, 14247, 5462, 6204, 13441, 7129, 14465, 9597, 5042, 13483, 10195, 246, 6995, 6034, 10010, 4948, 1640, 4130, 3086, 15503, 8357, 4638, 6839, 2838, 5359, 4575, 11637, 5262, 2023, 11675, 15161, 12147, 9920, 4174, 11190, 4210, 3484, 6597, 7425, 3559, 1052, 6122, 15578, 7225, 13851, 11438, 12412, 4267, 7695, 2175, 5175, 13347, 1355, 938, 8326, 15559, 6538, 2739, 6898, 9963, 10834, 1708, 11298, 10153, 4657, 3931, 11639, 13926, 12495, 6320, 4417, 2789, 3870, 12201, 12608, 9098, 4679, 8817, 9087, 2326, 7007, 3982, 9137, 2957, 11129, 5550, 11279, 12560, 979, 270, 13031, 1067, 867, 5159, 3204, 8729, 6276, 4618, 6578, 2163, 8006, 986, 5656, 1062, 2509, 15461, 3906, 10955, 4184, 4820, 8601, 818, 7974, 5486, 14090, 2458, 3046, 13386, 5885, 1859, 5502, 911, 13126, 12658, 10128, 12931, 3207, 13510, 5494, 15526, 8304, 9461, 3838, 6850, 11322, 8713, 3491, 12691, 12400, 12178, 6999, 1674, 13812, 12907, 3060, 12520, 12015, 13455, 13632, 5176, 747, 2238, 10043, 10007, 13827, 11485, 6823, 14319, 12766, 12985, 9927, 8520, 4163, 5529, 9352, 13619, 15551, 10038, 5760, 3398, 7585, 4701, 9928, 787, 8084, 1381, 1026, 7223, 2945, 7281, 13364, 6785, 2899, 5300, 11700, 6917, 13968, 4439, 2199, 12391, 4019, 12418, 12639, 8604, 12443, 2670, 9079, 11755, 9246, 4157, 8948, 13638, 3077, 7710, 259, 8095, 7706, 11101, 9940, 13287, 3759, 4914, 8003, 13109, 10464, 15238, 15392, 417, 15556, 7135, 2296, 5168, 9242, 2490, 10177, 13045, 13366, 11993, 10654, 3341, 14190, 11272, 9852, 796, 8819, 5813, 5647, 10056, 9215, 2209, 3618, 11474, 7622, 1184, 7894, 603, 2928, 5951, 11804, 4989, 5455, 4568, 3170, 1332, 5250, 10515, 2302, 4690, 571, 864, 4606, 7607, 9595, 1017, 5096, 14930, 12830, 8963, 5154, 484, 13022, 5595, 14404, 14234, 15504, 9391, 4886, 7896, 14993, 2954, 7726, 2955, 2708, 5752, 11432, 13363, 5102, 10618, 11126, 727, 5698, 6534, 12879, 11253, 9343, 15052, 2407, 7255, 11976, 14103, 10945, 5508, 12270, 641, 1362, 12432, 13539, 15364, 9915, 14811, 13359, 3728, 12548, 4441, 14082, 4154, 5499, 13479, 13365, 3638, 5959, 159, 3550, 10182, 13323, 9559, 1087, 14223, 4062, 14717, 10087, 14130, 11430, 630, 1351, 3365, 3303, 2958, 3353, 7441, 14283, 7563, 12974, 2354, 5052, 7753, 10273, 11730, 2235, 12788, 5658, 8231, 6226, 1602, 9631, 3806, 2609, 5479, 4967, 8835, 1574, 6954, 4351, 2041, 9931, 8744, 7104, 14354, 1445, 6565, 4946, 11687, 10718, 6909, 1668, 12027, 4585, 3995, 11048]
#loop over nasbench2 archs
for i, arch_str in enumerate(api):
# only compute over the subset
if i not in all_archs:
continue
if i < args.start:
continue
if i >= args.end:
break
res = {'i':i, 'arch':arch_str}
net = nasbench2.get_model_from_arch_str(arch_str, get_num_classes(args))
net.to(args.device)
init_net(net, args.init_w_type, args.init_b_type)
arch_str2 = nasbench2.get_arch_str_from_model(net)
if arch_str != arch_str2:
print(arch_str)
print(arch_str2)
raise ValueError
measures = predictive.find_measures(net,
train_loader,
(args.dataload, args.dataload_info, get_num_classes(args)),
args.device)
res['logmeasures']= measures
if not args.noacc:
info = api.get_more_info(i, 'cifar10-valid' if args.dataset=='cifar10' else args.dataset, iepoch=None, hp='200', is_random=False)
trainacc = info['train-accuracy']
valacc = info['valid-accuracy']
testacc = info['test-accuracy']
res['trainacc']=trainacc
res['valacc']=valacc
res['testacc']=testacc
#print(res)
cached_res.append(res)
# write raw results to file for post processing
if i % args.write_freq == 0 or i == len(api)-1 or i == 10:
savename = f'nasbench201_{args.dataset}_subset_all_results.yaml'
with open(savename, 'w') as f:
yaml.dump(cached_res, f)
# compute spearman's correlation between measures
measures_list = ['grad_norm', 'snip', 'fisher', 'grasp', 'jacob_cov', 'plain', 'synflow_bn', 'synflow']
evals_dict = {}
# gather test accuracies
all_reg_evals = [res['testacc'] for res in cached_res]
# gather measures
for m in measures_list:
m_evals = [res['logmeasures'][m] for res in cached_res]
evals_dict[m] = m_evals
spes_dict = {}
for m in measures_list:
assert len(all_reg_evals) == len(evals_dict[m])
spe, _ = spearmanr(all_reg_evals, evals_dict[m])
spes_dict[m] = spe
print(f'{m} measure spe: {spe}')
savename = f'nasbench201_{args.dataset}_subset_overall.yaml'
with open(savename, 'w') as f:
yaml.dump(spes_dict, f)
| 81.121795
| 6,292
| 0.663216
|
4643e34ee0f7fc963093ab9a445fca3eec3bbe3e
| 486
|
py
|
Python
|
setup.py
|
kvdogan/advanced_tools
|
7e93232374980d83fda8051496a190188c11fe0d
|
[
"MIT"
] | null | null | null |
setup.py
|
kvdogan/advanced_tools
|
7e93232374980d83fda8051496a190188c11fe0d
|
[
"MIT"
] | null | null | null |
setup.py
|
kvdogan/advanced_tools
|
7e93232374980d83fda8051496a190188c11fe0d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
from setuptools import setup, find_packages
setup(
name='advanced_tools',
version='0.6',
description='Utility functions,scripts for daily operations',
url='http://github.com/kvdogan/advanced_tools',
author='kvdogan',
packages=find_packages(),
include_package_data=True,
license='MIT License',
entry_points={
'console_scripts': ['build-hierarchy=advanced_tools.build_hierarchy_tree:main'],
}
)
| 24.3
| 88
| 0.699588
|
c666711e0252afa1b07b778137aa6ac2626db0f8
| 1,510
|
py
|
Python
|
aliyunsdkcore/auth/url_encoder.py
|
gikoluo/aliyun-python-sdk-core
|
5c4e79ad5f7668af048ae1a18d424c4919131a9c
|
[
"MIT"
] | 1
|
2019-05-30T03:53:33.000Z
|
2019-05-30T03:53:33.000Z
|
aliyun-python-sdk-release-test/aliyunsdkcore/auth/url_encoder.py
|
liusc27/aliyun-openapi-python-sdk
|
5e3db3535dd21de987dc5981e71151327d5a884f
|
[
"Apache-2.0"
] | null | null | null |
aliyun-python-sdk-release-test/aliyunsdkcore/auth/url_encoder.py
|
liusc27/aliyun-openapi-python-sdk
|
5e3db3535dd21de987dc5981e71151327d5a884f
|
[
"Apache-2.0"
] | 4
|
2017-07-27T11:27:01.000Z
|
2020-09-01T07:49:21.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding=utf-8
import urllib
import sys
"""
Acs url encoder module.
Created on 6/16/2015
@author: alex
"""
def get_encode_str(params):
"""
transforms parameters to encoded string
:param params: dict parameters
:return: string
"""
list_params = sorted(params.iteritems(), key=lambda d: d[0])
encode_str = urllib.urlencode(list_params)
if sys.stdin.encoding is None:
res = urllib.quote(encode_str.decode('cp936').encode('utf8'), '')
else:
res = urllib.quote(
encode_str.decode(
sys.stdin.encoding).encode('utf8'), '')
res = res.replace("+", "%20")
res = res.replace("*", "%2A")
res = res.replace("%7E", "~")
return res
| 29.607843
| 73
| 0.689404
|
185a0d5f3e5bd6a492c6dbc3a52a1fe3e79947f8
| 1,308
|
py
|
Python
|
ecowitt2mqtt/data.py
|
mike240se/ecowitt2mqtt
|
2f922f3d287b0935eb05122da6dba4b557ec97e6
|
[
"MIT"
] | null | null | null |
ecowitt2mqtt/data.py
|
mike240se/ecowitt2mqtt
|
2f922f3d287b0935eb05122da6dba4b557ec97e6
|
[
"MIT"
] | null | null | null |
ecowitt2mqtt/data.py
|
mike240se/ecowitt2mqtt
|
2f922f3d287b0935eb05122da6dba4b557ec97e6
|
[
"MIT"
] | null | null | null |
"""Define helpers to process data from an Ecowitt device."""
import meteocalc
from ecowitt2mqtt.const import (
DATA_POINT_DEWPOINT,
DATA_POINT_FEELSLIKEF,
DATA_POINT_HEATINDEX,
DATA_POINT_HUMIDITY,
DATA_POINT_TEMPF,
DATA_POINT_WINDCHILL,
DATA_POINT_WINDSPEEDMPH,
LOGGER,
)
def process_data_payload(data: dict) -> dict:
"""Process incoming data from an Ecowitt device."""
for ignore_key in ("dateutc", "freq", "model", "stationtype"):
data.pop(ignore_key, None)
humidity = int(data[DATA_POINT_HUMIDITY])
temperature = meteocalc.Temp(data[DATA_POINT_TEMPF], "f")
wind_speed = float(data[DATA_POINT_WINDSPEEDMPH])
dew_point = meteocalc.dew_point(temperature, humidity)
data[DATA_POINT_DEWPOINT] = dew_point.f
heat_index = meteocalc.heat_index(temperature, humidity)
data[DATA_POINT_HEATINDEX] = heat_index.f
try:
wind_chill = meteocalc.wind_chill(temperature, wind_speed)
except ValueError as err:
LOGGER.debug(
"%s (temperature: %s, wind speed: %s)", err, temperature.f, wind_speed,
)
else:
data[DATA_POINT_WINDCHILL] = wind_chill.f
feels_like = meteocalc.feels_like(temperature, humidity, wind_speed)
data[DATA_POINT_FEELSLIKEF] = feels_like.f
return data
| 29.727273
| 83
| 0.707951
|
c520583e6d33d9782f60d9ae2cc0d224b918a56c
| 10,208
|
py
|
Python
|
docs/conf.py
|
Unite-Genomics/django-cognito-jwt
|
75aa2a90c91a45233dea418d2a847104aae5a9ec
|
[
"MIT"
] | 139
|
2018-08-15T13:26:21.000Z
|
2022-03-20T15:37:05.000Z
|
docs/conf.py
|
Unite-Genomics/django-cognito-jwt
|
75aa2a90c91a45233dea418d2a847104aae5a9ec
|
[
"MIT"
] | 31
|
2019-01-10T20:48:35.000Z
|
2022-02-10T12:13:07.000Z
|
docs/conf.py
|
Unite-Genomics/django-cognito-jwt
|
75aa2a90c91a45233dea418d2a847104aae5a9ec
|
[
"MIT"
] | 50
|
2018-12-13T10:35:27.000Z
|
2022-03-20T06:40:10.000Z
|
# -*- coding: utf-8 -*-
#
# django-cognito-jwt documentation build configuration file, created by
# sphinx-quickstart on Wed Aug 10 17:06:14 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'django-cognito-jwt'
copyright = u'Y, Michael van Tellingen'
author = u'Michael van Tellingen'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
version = '0.0.4'
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
html_theme_options = {
'github_user': 'LabD',
'github_banner': True,
'github_repo': 'django-cognito-jwt',
'travis_button': True,
'codecov_button': True,
'analytics_id': 'UA-75907833-X',
}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'django-cognito-jwt v0.0.4'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'*': [
'sidebar-intro.html',
]
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'django_cognito_jwt-doc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'django_cognito_jwt.tex', u'django-cognito-jwt Documentation',
u'Michael van Tellingen', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'django_cognito_jwt', u'django-cognito-jwt Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'django_cognito_jwt', u'django-cognito-jwt Documentation',
author, 'django_cognito_jwt', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
| 29
| 80
| 0.702782
|
1c01d2ea71039b8a3339431f7c58939870329af5
| 53,319
|
py
|
Python
|
clients/python-experimental/generated/openapi_client/api_client.py
|
cliffano/jenkins-api-clients-generator
|
522d02b3a130a29471df5ec1d3d22c822b3d0813
|
[
"MIT"
] | null | null | null |
clients/python-experimental/generated/openapi_client/api_client.py
|
cliffano/jenkins-api-clients-generator
|
522d02b3a130a29471df5ec1d3d22c822b3d0813
|
[
"MIT"
] | null | null | null |
clients/python-experimental/generated/openapi_client/api_client.py
|
cliffano/jenkins-api-clients-generator
|
522d02b3a130a29471df5ec1d3d22c822b3d0813
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Swaggy Jenkins
Jenkins API clients generated from Swagger / Open API specification # noqa: E501
The version of the OpenAPI document: 1.1.2-pre.0
Contact: blah@cliffano.com
Generated by: https://openapi-generator.tech
"""
from dataclasses import dataclass
from decimal import Decimal
import enum
import json
import os
import io
import atexit
from multiprocessing.pool import ThreadPool
import re
import tempfile
import typing
import urllib3
from urllib3._collections import HTTPHeaderDict
from urllib.parse import quote
from urllib3.fields import RequestField as RequestFieldBase
from openapi_client import rest
from openapi_client.configuration import Configuration
from openapi_client.exceptions import ApiTypeError, ApiValueError
from openapi_client.schemas import (
NoneClass,
BoolClass,
Schema,
FileIO,
BinarySchema,
InstantiationMetadata,
date,
datetime,
none_type,
frozendict,
Unset,
unset,
)
class RequestField(RequestFieldBase):
def __eq__(self, other):
if not isinstance(other, RequestField):
return False
return self.__dict__ == other.__dict__
class JSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, (str, int, float)):
# instances based on primitive classes
return obj
elif isinstance(obj, Decimal):
if obj.as_tuple().exponent >= 0:
return int(obj)
return float(obj)
elif isinstance(obj, NoneClass):
return None
elif isinstance(obj, BoolClass):
return bool(obj)
elif isinstance(obj, (dict, frozendict)):
return {key: self.default(val) for key, val in obj.items()}
elif isinstance(obj, (list, tuple)):
return [self.default(item) for item in obj]
raise ApiValueError('Unable to prepare type {} for serialization'.format(obj.__class__.__name__))
class ParameterInType(enum.Enum):
QUERY = 'query'
HEADER = 'header'
PATH = 'path'
COOKIE = 'cookie'
class ParameterStyle(enum.Enum):
MATRIX = 'matrix'
LABEL = 'label'
FORM = 'form'
SIMPLE = 'simple'
SPACE_DELIMITED = 'spaceDelimited'
PIPE_DELIMITED = 'pipeDelimited'
DEEP_OBJECT = 'deepObject'
class ParameterSerializerBase:
@staticmethod
def __serialize_number(
in_data: typing.Union[int, float], name: str, prefix=''
) -> typing.Tuple[typing.Tuple[str, str]]:
return tuple([(name, prefix + str(in_data))])
@staticmethod
def __serialize_str(
in_data: str, name: str, prefix=''
) -> typing.Tuple[typing.Tuple[str, str]]:
return tuple([(name, prefix + quote(in_data))])
@staticmethod
def __serialize_bool(in_data: bool, name: str, prefix='') -> typing.Tuple[typing.Tuple[str, str]]:
if in_data:
return tuple([(name, prefix + 'true')])
return tuple([(name, prefix + 'false')])
@staticmethod
def __urlencode(in_data: typing.Any) -> str:
return quote(str(in_data))
def __serialize_list(
self,
in_data: typing.List[typing.Any],
style: ParameterStyle,
name: str,
explode: bool,
empty_val: typing.Union[typing.Tuple[str, str], typing.Tuple] = tuple(),
prefix: str = '',
separator: str = ',',
) -> typing.Tuple[typing.Union[typing.Tuple[str, str], typing.Tuple], ...]:
if not in_data:
return empty_val
if explode and style in {
ParameterStyle.FORM,
ParameterStyle.MATRIX,
ParameterStyle.SPACE_DELIMITED,
ParameterStyle.PIPE_DELIMITED
}:
if style is ParameterStyle.FORM:
return tuple((name, prefix + self.__urlencode(val)) for val in in_data)
else:
joined_vals = prefix + separator.join(name + '=' + self.__urlencode(val) for val in in_data)
else:
joined_vals = prefix + separator.join(map(self.__urlencode, in_data))
return tuple([(name, joined_vals)])
def __form_item_representation(self, in_data: typing.Any) -> typing.Optional[str]:
if isinstance(in_data, none_type):
return None
elif isinstance(in_data, list):
if not in_data:
return None
raise ApiValueError('Unable to generate a form representation of {}'.format(in_data))
elif isinstance(in_data, dict):
if not in_data:
return None
raise ApiValueError('Unable to generate a form representation of {}'.format(in_data))
elif isinstance(in_data, (bool, bytes)):
raise ApiValueError('Unable to generate a form representation of {}'.format(in_data))
# str, float, int
return self.__urlencode(in_data)
def __serialize_dict(
self,
in_data: typing.Dict[str, typing.Any],
style: ParameterStyle,
name: str,
explode: bool,
empty_val: typing.Union[typing.Tuple[str, str], typing.Tuple] = tuple(),
prefix: str = '',
separator: str = ',',
) -> typing.Tuple[typing.Tuple[str, str]]:
if not in_data:
return empty_val
if all(val is None for val in in_data.values()):
return empty_val
form_items = {}
if style is ParameterStyle.FORM:
for key, val in in_data.items():
new_val = self.__form_item_representation(val)
if new_val is None:
continue
form_items[key] = new_val
if explode:
if style is ParameterStyle.FORM:
return tuple((key, prefix + val) for key, val in form_items.items())
elif style in {
ParameterStyle.SIMPLE,
ParameterStyle.LABEL,
ParameterStyle.MATRIX,
ParameterStyle.SPACE_DELIMITED,
ParameterStyle.PIPE_DELIMITED
}:
joined_vals = prefix + separator.join(key + '=' + self.__urlencode(val) for key, val in in_data.items())
else:
raise ApiValueError(f'Invalid style {style} for dict serialization with explode=True')
elif style is ParameterStyle.FORM:
joined_vals = prefix + separator.join(key + separator + val for key, val in form_items.items())
else:
joined_vals = prefix + separator.join(
key + separator + self.__urlencode(val) for key, val in in_data.items())
return tuple([(name, joined_vals)])
def _serialize_x(
self,
in_data: typing.Union[None, int, float, str, bool, dict, list],
style: ParameterStyle,
name: str,
explode: bool,
empty_val: typing.Union[typing.Tuple[str, str], typing.Tuple] = (),
prefix: str = '',
separator: str = ',',
) -> typing.Tuple[typing.Tuple[str, str], ...]:
if isinstance(in_data, none_type):
return empty_val
elif isinstance(in_data, bool):
# must be before int check
return self.__serialize_bool(in_data, name=name, prefix=prefix)
elif isinstance(in_data, (int, float)):
return self.__serialize_number(in_data, name=name, prefix=prefix)
elif isinstance(in_data, str):
return self.__serialize_str(in_data, name=name, prefix=prefix)
elif isinstance(in_data, list):
return self.__serialize_list(
in_data,
style=style,
name=name,
explode=explode,
empty_val=empty_val,
prefix=prefix,
separator=separator
)
elif isinstance(in_data, dict):
return self.__serialize_dict(
in_data,
style=style,
name=name,
explode=explode,
empty_val=empty_val,
prefix=prefix,
separator=separator
)
class StyleFormSerializer(ParameterSerializerBase):
def _serialize_form(
self,
in_data: typing.Union[None, int, float, str, bool, dict, list],
name: str,
explode: bool,
) -> typing.Tuple[typing.Tuple[str, str], ...]:
return self._serialize_x(in_data, style=ParameterStyle.FORM, name=name, explode=explode)
class StyleSimpleSerializer(ParameterSerializerBase):
def _serialize_simple_tuple(
self,
in_data: typing.Union[None, int, float, str, bool, dict, list],
name: str,
explode: bool,
in_type: ParameterInType,
) -> typing.Tuple[typing.Tuple[str, str], ...]:
if in_type is ParameterInType.HEADER:
empty_val = ()
else:
empty_val = ((name, ''),)
return self._serialize_x(in_data, style=ParameterStyle.SIMPLE, name=name, explode=explode, empty_val=empty_val)
@dataclass
class ParameterBase:
name: str
in_type: ParameterInType
required: bool
style: typing.Optional[ParameterStyle]
explode: typing.Optional[bool]
allow_reserved: typing.Optional[bool]
schema: typing.Optional[typing.Type[Schema]]
content: typing.Optional[typing.Dict[str, typing.Type[Schema]]]
__style_to_in_type = {
ParameterStyle.MATRIX: {ParameterInType.PATH},
ParameterStyle.LABEL: {ParameterInType.PATH},
ParameterStyle.FORM: {ParameterInType.QUERY, ParameterInType.COOKIE},
ParameterStyle.SIMPLE: {ParameterInType.PATH, ParameterInType.HEADER},
ParameterStyle.SPACE_DELIMITED: {ParameterInType.QUERY},
ParameterStyle.PIPE_DELIMITED: {ParameterInType.QUERY},
ParameterStyle.DEEP_OBJECT: {ParameterInType.QUERY},
}
__in_type_to_default_style = {
ParameterInType.QUERY: ParameterStyle.FORM,
ParameterInType.PATH: ParameterStyle.SIMPLE,
ParameterInType.HEADER: ParameterStyle.SIMPLE,
ParameterInType.COOKIE: ParameterStyle.FORM,
}
__disallowed_header_names = {'Accept', 'Content-Type', 'Authorization'}
_json_encoder = JSONEncoder()
_json_content_type = 'application/json'
@classmethod
def __verify_style_to_in_type(cls, style: typing.Optional[ParameterStyle], in_type: ParameterInType):
if style is None:
return
in_type_set = cls.__style_to_in_type[style]
if in_type not in in_type_set:
raise ValueError(
'Invalid style and in_type combination. For style={} only in_type={} are allowed'.format(
style, in_type_set
)
)
def __init__(
self,
name: str,
in_type: ParameterInType,
required: bool = False,
style: typing.Optional[ParameterStyle] = None,
explode: bool = False,
allow_reserved: typing.Optional[bool] = None,
schema: typing.Optional[typing.Type[Schema]] = None,
content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
):
if schema is None and content is None:
raise ValueError('Value missing; Pass in either schema or content')
if schema and content:
raise ValueError('Too many values provided. Both schema and content were provided. Only one may be input')
if name in self.__disallowed_header_names and in_type is ParameterInType.HEADER:
raise ValueError('Invalid name, name may not be one of {}'.format(self.__disallowed_header_names))
self.__verify_style_to_in_type(style, in_type)
if content is None and style is None:
style = self.__in_type_to_default_style[in_type]
if content is not None and in_type in self.__in_type_to_default_style and len(content) != 1:
raise ValueError('Invalid content length, content length must equal 1')
self.in_type = in_type
self.name = name
self.required = required
self.style = style
self.explode = explode
self.allow_reserved = allow_reserved
self.schema = schema
self.content = content
@staticmethod
def _remove_empty_and_cast(
in_data: typing.Tuple[typing.Tuple[str, str]],
) -> typing.Dict[str, str]:
data = tuple(t for t in in_data if t)
if not data:
return dict()
return dict(data)
def _serialize_json(
self,
in_data: typing.Union[None, int, float, str, bool, dict, list]
) -> typing.Tuple[typing.Tuple[str, str]]:
return tuple([(self.name, json.dumps(in_data))])
class PathParameter(ParameterBase, StyleSimpleSerializer):
def __init__(
self,
name: str,
required: bool = False,
style: typing.Optional[ParameterStyle] = None,
explode: bool = False,
allow_reserved: typing.Optional[bool] = None,
schema: typing.Optional[typing.Type[Schema]] = None,
content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
):
super().__init__(
name,
in_type=ParameterInType.PATH,
required=required,
style=style,
explode=explode,
allow_reserved=allow_reserved,
schema=schema,
content=content
)
def __serialize_label(
self,
in_data: typing.Union[None, int, float, str, bool, dict, list]
) -> typing.Dict[str, str]:
empty_val = ((self.name, ''),)
prefix = '.'
separator = '.'
return self._remove_empty_and_cast(
self._serialize_x(
in_data,
style=ParameterStyle.LABEL,
name=self.name,
explode=self.explode,
empty_val=empty_val,
prefix=prefix,
separator=separator
)
)
def __serialize_matrix(
self,
in_data: typing.Union[None, int, float, str, bool, dict, list]
) -> typing.Dict[str, str]:
separator = ','
if in_data == '':
prefix = ';' + self.name
elif isinstance(in_data, (dict, list)) and self.explode:
prefix = ';'
separator = ';'
else:
prefix = ';' + self.name + '='
empty_val = ((self.name, ''),)
return self._remove_empty_and_cast(
self._serialize_x(
in_data,
style=ParameterStyle.MATRIX,
name=self.name,
explode=self.explode,
prefix=prefix,
empty_val=empty_val,
separator=separator
)
)
def _serialize_simple(
self,
in_data: typing.Union[None, int, float, str, bool, dict, list],
) -> typing.Dict[str, str]:
tuple_data = self._serialize_simple_tuple(in_data, self.name, self.explode, self.in_type)
return self._remove_empty_and_cast(tuple_data)
def serialize(
self,
in_data: typing.Union[
Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict]
) -> typing.Dict[str, str]:
if self.schema:
cast_in_data = self.schema(in_data)
cast_in_data = self._json_encoder.default(cast_in_data)
"""
simple -> path
path:
returns path_params: dict
label -> path
returns path_params
matrix -> path
returns path_params
"""
if self.style:
if self.style is ParameterStyle.SIMPLE:
return self._serialize_simple(cast_in_data)
elif self.style is ParameterStyle.LABEL:
return self.__serialize_label(cast_in_data)
elif self.style is ParameterStyle.MATRIX:
return self.__serialize_matrix(cast_in_data)
# self.content will be length one
for content_type, schema in self.content.items():
cast_in_data = schema(in_data)
cast_in_data = self._json_encoder.default(cast_in_data)
if content_type == self._json_content_type:
tuple_data = self._serialize_json(cast_in_data)
return self._remove_empty_and_cast(tuple_data)
raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
class QueryParameter(ParameterBase, StyleFormSerializer):
def __init__(
self,
name: str,
required: bool = False,
style: typing.Optional[ParameterStyle] = None,
explode: bool = False,
allow_reserved: typing.Optional[bool] = None,
schema: typing.Optional[typing.Type[Schema]] = None,
content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
):
super().__init__(
name,
in_type=ParameterInType.QUERY,
required=required,
style=style,
explode=explode,
allow_reserved=allow_reserved,
schema=schema,
content=content
)
def __serialize_space_delimited(
self,
in_data: typing.Union[None, int, float, str, bool, dict, list]
) -> typing.Tuple[typing.Tuple[str, str], ...]:
separator = '%20'
empty_val = ()
return self._serialize_x(
in_data,
style=ParameterStyle.SPACE_DELIMITED,
name=self.name,
explode=self.explode,
separator=separator,
empty_val=empty_val
)
def __serialize_pipe_delimited(
self,
in_data: typing.Union[None, int, float, str, bool, dict, list]
) -> typing.Tuple[typing.Tuple[str, str], ...]:
separator = '|'
empty_val = ()
return self._serialize_x(
in_data,
style=ParameterStyle.PIPE_DELIMITED,
name=self.name,
explode=self.explode,
separator=separator,
empty_val=empty_val
)
def serialize(
self,
in_data: typing.Union[
Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict]
) -> typing.Tuple[typing.Tuple[str, str]]:
if self.schema:
cast_in_data = self.schema(in_data)
cast_in_data = self._json_encoder.default(cast_in_data)
"""
form -> query
query:
- GET/HEAD/DELETE: could use fields
- PUT/POST: must use urlencode to send parameters
returns fields: tuple
spaceDelimited -> query
returns fields
pipeDelimited -> query
returns fields
deepObject -> query, https://github.com/OAI/OpenAPI-Specification/issues/1706
returns fields
"""
if self.style:
# TODO update query ones to omit setting values when [] {} or None is input
if self.style is ParameterStyle.FORM:
return self._serialize_form(cast_in_data, explode=self.explode, name=self.name)
elif self.style is ParameterStyle.SPACE_DELIMITED:
return self.__serialize_space_delimited(cast_in_data)
elif self.style is ParameterStyle.PIPE_DELIMITED:
return self.__serialize_pipe_delimited(cast_in_data)
# self.content will be length one
for content_type, schema in self.content.items():
cast_in_data = schema(in_data)
cast_in_data = self._json_encoder.default(cast_in_data)
if content_type == self._json_content_type:
return self._serialize_json(cast_in_data)
raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
class CookieParameter(ParameterBase, StyleFormSerializer):
def __init__(
self,
name: str,
required: bool = False,
style: typing.Optional[ParameterStyle] = None,
explode: bool = False,
allow_reserved: typing.Optional[bool] = None,
schema: typing.Optional[typing.Type[Schema]] = None,
content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
):
super().__init__(
name,
in_type=ParameterInType.COOKIE,
required=required,
style=style,
explode=explode,
allow_reserved=allow_reserved,
schema=schema,
content=content
)
def serialize(
self,
in_data: typing.Union[
Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict]
) -> typing.Tuple[typing.Tuple[str, str]]:
if self.schema:
cast_in_data = self.schema(in_data)
cast_in_data = self._json_encoder.default(cast_in_data)
"""
form -> cookie
returns fields: tuple
"""
if self.style:
return self._serialize_form(cast_in_data, explode=self.explode, name=self.name)
# self.content will be length one
for content_type, schema in self.content.items():
cast_in_data = schema(in_data)
cast_in_data = self._json_encoder.default(cast_in_data)
if content_type == self._json_content_type:
return self._serialize_json(cast_in_data)
raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
class HeaderParameter(ParameterBase, StyleSimpleSerializer):
def __init__(
self,
name: str,
required: bool = False,
style: typing.Optional[ParameterStyle] = None,
explode: bool = False,
allow_reserved: typing.Optional[bool] = None,
schema: typing.Optional[typing.Type[Schema]] = None,
content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
):
super().__init__(
name,
in_type=ParameterInType.HEADER,
required=required,
style=style,
explode=explode,
allow_reserved=allow_reserved,
schema=schema,
content=content
)
@staticmethod
def __to_headers(in_data: typing.Tuple[typing.Tuple[str, str], ...]) -> HTTPHeaderDict[str, str]:
data = tuple(t for t in in_data if t)
headers = HTTPHeaderDict()
if not data:
return headers
headers.extend(data)
return headers
def _serialize_simple(
self,
in_data: typing.Union[None, int, float, str, bool, dict, list],
) -> HTTPHeaderDict[str, str]:
tuple_data = self._serialize_simple_tuple(in_data, self.name, self.explode, self.in_type)
return self.__to_headers(tuple_data)
def serialize(
self,
in_data: typing.Union[
Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict]
) -> HTTPHeaderDict[str, str]:
if self.schema:
cast_in_data = self.schema(in_data)
cast_in_data = self._json_encoder.default(cast_in_data)
"""
simple -> header
headers: PoolManager needs a mapping, tuple is close
returns headers: dict
"""
if self.style:
return self._serialize_simple(cast_in_data)
# self.content will be length one
for content_type, schema in self.content.items():
cast_in_data = schema(in_data)
cast_in_data = self._json_encoder.default(cast_in_data)
if content_type == self._json_content_type:
tuple_data = self._serialize_json(cast_in_data)
return self.__to_headers(tuple_data)
raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
class Encoding:
def __init__(
self,
content_type: str,
headers: typing.Optional[typing.Dict[str, HeaderParameter]] = None,
style: typing.Optional[ParameterStyle] = None,
explode: bool = False,
allow_reserved: bool = False,
):
self.content_type = content_type
self.headers = headers
self.style = style
self.explode = explode
self.allow_reserved = allow_reserved
class MediaType:
"""
Used to store request and response body schema information
encoding:
A map between a property name and its encoding information.
The key, being the property name, MUST exist in the schema as a property.
The encoding object SHALL only apply to requestBody objects when the media type is
multipart or application/x-www-form-urlencoded.
"""
def __init__(
self,
schema: typing.Type[Schema],
encoding: typing.Optional[typing.Dict[str, Encoding]] = None,
):
self.schema = schema
self.encoding = encoding
@dataclass
class ApiResponse:
response: urllib3.HTTPResponse
body: typing.Union[Unset, typing.Type[Schema]]
headers: typing.Union[Unset, typing.List[HeaderParameter]]
def __init__(
self,
response: urllib3.HTTPResponse,
body: typing.Union[Unset, typing.Type[Schema]],
headers: typing.Union[Unset, typing.List[HeaderParameter]]
):
"""
pycharm needs this to prevent 'Unexpected argument' warnings
"""
self.response = response
self.body = body
self.headers = headers
@dataclass
class ApiResponseWithoutDeserialization(ApiResponse):
response: urllib3.HTTPResponse
body: typing.Union[Unset, typing.Type[Schema]] = unset
headers: typing.Union[Unset, typing.List[HeaderParameter]] = unset
class OpenApiResponse:
def __init__(
self,
response_cls: typing.Type[ApiResponse] = ApiResponse,
content: typing.Optional[typing.Dict[str, MediaType]] = None,
headers: typing.Optional[typing.List[HeaderParameter]] = None,
):
self.headers = headers
if content is not None and len(content) == 0:
raise ValueError('Invalid value for content, the content dict must have >= 1 entry')
self.content = content
self.response_cls = response_cls
@staticmethod
def __deserialize_json(response: urllib3.HTTPResponse) -> typing.Any:
decoded_data = response.data.decode("utf-8")
return json.loads(decoded_data)
@staticmethod
def __file_name_from_content_disposition(content_disposition: typing.Optional[str]) -> typing.Optional[str]:
if content_disposition is None:
return None
match = re.search('filename="(.+?)"', content_disposition)
if not match:
return None
return match.group(1)
def __deserialize_application_octet_stream(
self, response: urllib3.HTTPResponse
) -> typing.Union[bytes, io.BufferedReader]:
"""
urllib3 use cases:
1. when preload_content=True (stream=False) then supports_chunked_reads is False and bytes are returned
2. when preload_content=False (stream=True) then supports_chunked_reads is True and
a file will be written and returned
"""
if response.supports_chunked_reads():
file_name = self.__file_name_from_content_disposition(response.headers.get('content-disposition'))
if file_name is None:
_fd, path = tempfile.mkstemp()
else:
path = os.path.join(tempfile.gettempdir(), file_name)
# TODO get file_name from the filename at the end of the url if it exists
with open(path, 'wb') as new_file:
chunk_size = 1024
while True:
data = response.read(chunk_size)
if not data:
break
new_file.write(data)
# release_conn is needed for streaming connections only
response.release_conn()
new_file = open(path, 'rb')
return new_file
else:
return response.data
def deserialize(self, response: urllib3.HTTPResponse, configuration: Configuration) -> ApiResponse:
content_type = response.getheader('content-type')
deserialized_body = unset
streamed = response.supports_chunked_reads()
if self.content is not None:
if content_type == 'application/json':
body_data = self.__deserialize_json(response)
elif content_type == 'application/octet-stream':
body_data = self.__deserialize_application_octet_stream(response)
else:
raise NotImplementedError('Deserialization of {} has not yet been implemented'.format(content_type))
body_schema = self.content[content_type].schema
_instantiation_metadata = InstantiationMetadata(from_server=True, configuration=configuration)
deserialized_body = body_schema._from_openapi_data(
body_data, _instantiation_metadata=_instantiation_metadata)
elif streamed:
response.release_conn()
deserialized_headers = unset
if self.headers is not None:
deserialized_headers = unset
return self.response_cls(
response=response,
headers=deserialized_headers,
body=deserialized_body
)
class ApiClient:
"""Generic API client for OpenAPI client library builds.
OpenAPI generic API client. This client handles the client-
server communication, and is invariant across implementations. Specifics of
the methods and models for each application are generated from the OpenAPI
templates.
NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
:param configuration: .Configuration object for this client
:param header_name: a header to pass when making calls to the API.
:param header_value: a header value to pass when making calls to
the API.
:param cookie: a cookie to include in the header when making calls
to the API
:param pool_threads: The number of threads to use for async requests
to the API. More threads means more concurrent API requests.
"""
_pool = None
__json_encoder = JSONEncoder()
def __init__(
self,
configuration: typing.Optional[Configuration] = None,
header_name: typing.Optional[str] = None,
header_value: typing.Optional[str] = None,
cookie: typing.Optional[str] = None,
pool_threads: int = 1
):
if configuration is None:
configuration = Configuration()
self.configuration = configuration
self.pool_threads = pool_threads
self.rest_client = rest.RESTClientObject(configuration)
self.default_headers = {}
if header_name is not None:
self.default_headers[header_name] = header_value
self.cookie = cookie
# Set default User-Agent.
self.user_agent = 'OpenAPI-Generator/1.0.0/python'
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def close(self):
if self._pool:
self._pool.close()
self._pool.join()
self._pool = None
if hasattr(atexit, 'unregister'):
atexit.unregister(self.close)
@property
def pool(self):
"""Create thread pool on first request
avoids instantiating unused threadpool for blocking clients.
"""
if self._pool is None:
atexit.register(self.close)
self._pool = ThreadPool(self.pool_threads)
return self._pool
@property
def user_agent(self):
"""User agent for this API client"""
return self.default_headers['User-Agent']
@user_agent.setter
def user_agent(self, value):
self.default_headers['User-Agent'] = value
def set_default_header(self, header_name, header_value):
self.default_headers[header_name] = header_value
def __call_api(
self,
resource_path: str,
method: str,
path_params: typing.Optional[typing.Dict[str, typing.Any]] = None,
query_params: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
headers: typing.Optional[HTTPHeaderDict] = None,
body: typing.Optional[typing.Union[str, bytes]] = None,
fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
auth_settings: typing.Optional[typing.List[str]] = None,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
host: typing.Optional[str] = None,
) -> urllib3.HTTPResponse:
# header parameters
headers = headers or {}
headers.update(self.default_headers)
if self.cookie:
headers['Cookie'] = self.cookie
# path parameters
if path_params:
for k, v in path_params.items():
# specified safe chars, encode everything
resource_path = resource_path.replace(
'{%s}' % k,
quote(str(v), safe=self.configuration.safe_chars_for_path_param)
)
# auth setting
self.update_params_for_auth(headers, query_params,
auth_settings, resource_path, method, body)
# request url
if host is None:
url = self.configuration.host + resource_path
else:
# use server/host defined in path or operation instead
url = host + resource_path
# perform request and return response
response = self.request(
method,
url,
query_params=query_params,
headers=headers,
fields=fields,
body=body,
stream=stream,
timeout=timeout,
)
return response
def call_api(
self,
resource_path: str,
method: str,
path_params: typing.Optional[typing.Dict[str, typing.Any]] = None,
query_params: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
headers: typing.Optional[HTTPHeaderDict] = None,
body: typing.Optional[typing.Union[str, bytes]] = None,
fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
auth_settings: typing.Optional[typing.List[str]] = None,
async_req: typing.Optional[bool] = None,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
host: typing.Optional[str] = None,
) -> urllib3.HTTPResponse:
"""Makes the HTTP request (synchronous) and returns deserialized data.
To make an async_req request, set the async_req parameter.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param headers: Header parameters to be
placed in the request header.
:param body: Request body.
:param fields: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings: Auth Settings names for the request.
:param async_req: execute request asynchronously
:type async_req: bool, optional TODO remove, unused
:param stream: if True, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Also when True, if the openapi spec describes a file download,
the data will be written to a local filesystme file and the BinarySchema
instance will also inherit from FileSchema and FileIO
Default is False.
:type stream: bool, optional
:param timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param host: api endpoint host
:return:
If async_req parameter is True,
the request will be called asynchronously.
The method will return the request thread.
If parameter async_req is False or missing,
then the method will return the response directly.
"""
if not async_req:
return self.__call_api(
resource_path,
method,
path_params,
query_params,
headers,
body,
fields,
auth_settings,
stream,
timeout,
host,
)
return self.pool.apply_async(
self.__call_api,
(
resource_path,
method,
path_params,
query_params,
headers,
body,
json,
fields,
auth_settings,
stream,
timeout,
host,
)
)
def request(
self,
method: str,
url: str,
query_params: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
headers: typing.Optional[HTTPHeaderDict] = None,
fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
body: typing.Optional[typing.Union[str, bytes]] = None,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
) -> urllib3.HTTPResponse:
"""Makes the HTTP request using RESTClient."""
if method == "GET":
return self.rest_client.GET(url,
query_params=query_params,
stream=stream,
timeout=timeout,
headers=headers)
elif method == "HEAD":
return self.rest_client.HEAD(url,
query_params=query_params,
stream=stream,
timeout=timeout,
headers=headers)
elif method == "OPTIONS":
return self.rest_client.OPTIONS(url,
query_params=query_params,
headers=headers,
fields=fields,
stream=stream,
timeout=timeout,
body=body)
elif method == "POST":
return self.rest_client.POST(url,
query_params=query_params,
headers=headers,
fields=fields,
stream=stream,
timeout=timeout,
body=body)
elif method == "PUT":
return self.rest_client.PUT(url,
query_params=query_params,
headers=headers,
fields=fields,
stream=stream,
timeout=timeout,
body=body)
elif method == "PATCH":
return self.rest_client.PATCH(url,
query_params=query_params,
headers=headers,
fields=fields,
stream=stream,
timeout=timeout,
body=body)
elif method == "DELETE":
return self.rest_client.DELETE(url,
query_params=query_params,
headers=headers,
stream=stream,
timeout=timeout,
body=body)
else:
raise ApiValueError(
"http method must be `GET`, `HEAD`, `OPTIONS`,"
" `POST`, `PATCH`, `PUT` or `DELETE`."
)
def update_params_for_auth(self, headers, querys, auth_settings,
resource_path, method, body):
"""Updates header and query params based on authentication setting.
:param headers: Header parameters dict to be updated.
:param querys: Query parameters tuple list to be updated.
:param auth_settings: Authentication setting identifiers list.
:param resource_path: A string representation of the HTTP request resource path.
:param method: A string representation of the HTTP request method.
:param body: A object representing the body of the HTTP request.
The object type is the return value of _encoder.default().
"""
if not auth_settings:
return
for auth in auth_settings:
auth_setting = self.configuration.auth_settings().get(auth)
if auth_setting:
if auth_setting['in'] == 'cookie':
headers.add('Cookie', auth_setting['value'])
elif auth_setting['in'] == 'header':
if auth_setting['type'] != 'http-signature':
headers.add(auth_setting['key'], auth_setting['value'])
elif auth_setting['in'] == 'query':
querys.append((auth_setting['key'], auth_setting['value']))
else:
raise ApiValueError(
'Authentication token must be in `query` or `header`'
)
class Api:
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client: typing.Optional[ApiClient] = None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
@staticmethod
def _verify_typed_dict_inputs(cls: typing.Type[typing.TypedDict], data: typing.Dict[str, typing.Any]):
"""
Ensures that:
- required keys are present
- additional properties are not input
- value stored under required keys do not have the value unset
Note: detailed value checking is done in schema classes
"""
missing_required_keys = []
required_keys_with_unset_values = []
for required_key in cls.__required_keys__:
if required_key not in data:
missing_required_keys.append(required_key)
continue
value = data[required_key]
if value is unset:
required_keys_with_unset_values.append(required_key)
if missing_required_keys:
raise ApiTypeError(
'{} missing {} required arguments: {}'.format(
cls.__name__, len(missing_required_keys), missing_required_keys
)
)
if required_keys_with_unset_values:
raise ApiValueError(
'{} contains invalid unset values for {} required keys: {}'.format(
cls.__name__, len(required_keys_with_unset_values), required_keys_with_unset_values
)
)
disallowed_additional_keys = []
for key in data:
if key in cls.__required_keys__ or key in cls.__optional_keys__:
continue
disallowed_additional_keys.append(key)
if disallowed_additional_keys:
raise ApiTypeError(
'{} got {} unexpected keyword arguments: {}'.format(
cls.__name__, len(disallowed_additional_keys), disallowed_additional_keys
)
)
def get_host(
self,
operation_id: str,
servers: typing.Tuple[typing.Dict[str, str], ...] = tuple(),
host_index: typing.Optional[int] = None
) -> typing.Optional[str]:
configuration = self.api_client.configuration
try:
if host_index is None:
index = configuration.server_operation_index.get(
operation_id, configuration.server_index
)
else:
index = host_index
server_variables = configuration.server_operation_variables.get(
operation_id, configuration.server_variables
)
host = configuration.get_host_from_settings(
index, variables=server_variables, servers=servers
)
except IndexError:
if servers:
raise ApiValueError(
"Invalid host index. Must be 0 <= index < %s" %
len(servers)
)
host = None
return host
class SerializedRequestBody(typing.TypedDict, total=False):
body: typing.Union[str, bytes]
fields: typing.Tuple[typing.Union[RequestField, tuple[str, str]], ...]
class RequestBody(StyleFormSerializer):
"""
A request body parameter
content: content_type to MediaType Schema info
"""
__json_encoder = JSONEncoder()
def __init__(
self,
content: typing.Dict[str, MediaType],
required: bool = False,
):
self.required = required
if len(content) == 0:
raise ValueError('Invalid value for content, the content dict must have >= 1 entry')
self.content = content
def __serialize_json(
self,
in_data: typing.Any
) -> typing.Dict[str, bytes]:
in_data = self.__json_encoder.default(in_data)
json_str = json.dumps(in_data, separators=(",", ":"), ensure_ascii=False).encode(
"utf-8"
)
return dict(body=json_str)
@staticmethod
def __serialize_text_plain(in_data: typing.Any) -> typing.Dict[str, str]:
if isinstance(in_data, frozendict):
raise ValueError('Unable to serialize type frozendict to text/plain')
elif isinstance(in_data, tuple):
raise ValueError('Unable to serialize type tuple to text/plain')
elif isinstance(in_data, NoneClass):
raise ValueError('Unable to serialize type NoneClass to text/plain')
elif isinstance(in_data, BoolClass):
raise ValueError('Unable to serialize type BoolClass to text/plain')
return dict(body=str(in_data))
def __multipart_json_item(self, key: str, value: Schema) -> RequestField:
json_value = self.__json_encoder.default(value)
return RequestField(name=key, data=json.dumps(json_value), headers={'Content-Type': 'application/json'})
def __multipart_form_item(self, key: str, value: Schema) -> RequestField:
if isinstance(value, str):
return RequestField(name=key, data=str(value), headers={'Content-Type': 'text/plain'})
elif isinstance(value, bytes):
return RequestField(name=key, data=value, headers={'Content-Type': 'application/octet-stream'})
elif isinstance(value, FileIO):
request_field = RequestField(
name=key,
data=value.read(),
filename=os.path.basename(value.name),
headers={'Content-Type': 'application/octet-stream'}
)
value.close()
return request_field
else:
return self.__multipart_json_item(key=key, value=value)
def __serialize_multipart_form_data(
self, in_data: Schema
) -> typing.Dict[str, typing.Tuple[RequestField, ...]]:
if not isinstance(in_data, frozendict):
raise ValueError(f'Unable to serialize {in_data} to multipart/form-data because it is not a dict of data')
"""
In a multipart/form-data request body, each schema property, or each element of a schema array property,
takes a section in the payload with an internal header as defined by RFC7578. The serialization strategy
for each property of a multipart/form-data request body can be specified in an associated Encoding Object.
When passing in multipart types, boundaries MAY be used to separate sections of the content being
transferred – thus, the following default Content-Types are defined for multipart:
If the (object) property is a primitive, or an array of primitive values, the default Content-Type is text/plain
If the property is complex, or an array of complex values, the default Content-Type is application/json
Question: how is the array of primitives encoded?
If the property is a type: string with a contentEncoding, the default Content-Type is application/octet-stream
"""
fields = []
for key, value in in_data.items():
if isinstance(value, tuple):
if value:
# values use explode = True, so the code makes a RequestField for each item with name=key
for item in value:
request_field = self.__multipart_form_item(key=key, value=item)
fields.append(request_field)
else:
# send an empty array as json because exploding will not send it
request_field = self.__multipart_json_item(key=key, value=value)
fields.append(request_field)
else:
request_field = self.__multipart_form_item(key=key, value=value)
fields.append(request_field)
return dict(fields=tuple(fields))
def __serialize_application_octet_stream(self, in_data: BinarySchema) -> typing.Dict[str, bytes]:
if isinstance(in_data, bytes):
return dict(body=in_data)
# FileIO type
result = dict(body=in_data.read())
in_data.close()
return result
def __serialize_application_x_www_form_data(
self, in_data: typing.Any
) -> typing.Dict[str, tuple[tuple[str, str], ...]]:
if not isinstance(in_data, frozendict):
raise ValueError(
f'Unable to serialize {in_data} to application/x-www-form-urlencoded because it is not a dict of data')
cast_in_data = self.__json_encoder.default(in_data)
fields = self._serialize_form(cast_in_data, explode=True, name='')
if not fields:
return {}
return {'fields': fields}
def serialize(
self, in_data: typing.Any, content_type: str
) -> SerializedRequestBody:
"""
If a str is returned then the result will be assigned to data when making the request
If a tuple is returned then the result will be used as fields input in encode_multipart_formdata
Return a tuple of
The key of the return dict is
- body for application/json
- encode_multipart and fields for multipart/form-data
"""
media_type = self.content[content_type]
if isinstance(in_data, media_type.schema):
cast_in_data = in_data
elif isinstance(in_data, (dict, frozendict)) and in_data:
cast_in_data = media_type.schema(**in_data)
else:
cast_in_data = media_type.schema(in_data)
# TODO check for and use encoding if it exists
# and content_type is multipart or application/x-www-form-urlencoded
if content_type == 'application/json':
return self.__serialize_json(cast_in_data)
elif content_type == 'text/plain':
return self.__serialize_text_plain(cast_in_data)
elif content_type == 'multipart/form-data':
return self.__serialize_multipart_form_data(cast_in_data)
elif content_type == 'application/x-www-form-urlencoded':
return self.__serialize_application_x_www_form_data(cast_in_data)
elif content_type == 'application/octet-stream':
return self.__serialize_application_octet_stream(cast_in_data)
raise NotImplementedError('Serialization has not yet been implemented for {}'.format(content_type))
| 38.862245
| 120
| 0.591403
|
fe99ed590525baa19138b9476290b9752e101ee8
| 3,390
|
py
|
Python
|
pollme/settings.py
|
Sbborashetti/pollme
|
deed93939238e3f59e6c910caddfe10d5644559c
|
[
"MIT"
] | null | null | null |
pollme/settings.py
|
Sbborashetti/pollme
|
deed93939238e3f59e6c910caddfe10d5644559c
|
[
"MIT"
] | null | null | null |
pollme/settings.py
|
Sbborashetti/pollme
|
deed93939238e3f59e6c910caddfe10d5644559c
|
[
"MIT"
] | null | null | null |
"""
Django settings for pollme project.
Generated by 'django-admin startproject' using Django 2.1.5.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'x*za6xf&_80ofdpae!yzq61g9ffikkx9$*iygbl$j7rr4wlf8t'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ['127.0.0.1','polllist.herokuapp.com']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'polls.apps.PollsConfig',
'accounts.apps.AccountsConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'pollme.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'pollme.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Dhaka'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static')
]
STATIC_ROOT= os.path.join(BASE_DIR, 'assests')
| 26.484375
| 91
| 0.70118
|
630a02f14b7ce854b9662a2b57ded038b87249d9
| 423
|
py
|
Python
|
users/migrations/0004_profile_social_stackoverflow.py
|
Kimutai01/kimsearch
|
dcf484c91b25d09ad2d5b736e363a6dc3bbe7031
|
[
"MIT"
] | null | null | null |
users/migrations/0004_profile_social_stackoverflow.py
|
Kimutai01/kimsearch
|
dcf484c91b25d09ad2d5b736e363a6dc3bbe7031
|
[
"MIT"
] | null | null | null |
users/migrations/0004_profile_social_stackoverflow.py
|
Kimutai01/kimsearch
|
dcf484c91b25d09ad2d5b736e363a6dc3bbe7031
|
[
"MIT"
] | null | null | null |
# Generated by Django 4.0 on 2022-01-13 09:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0003_profile_location_skill'),
]
operations = [
migrations.AddField(
model_name='profile',
name='social_stackoverflow',
field=models.CharField(blank=True, max_length=200, null=True),
),
]
| 22.263158
| 74
| 0.621749
|
e4be150f44ea8de4f803cbec2442d587fa7bf2f5
| 5,097
|
py
|
Python
|
code/nn/act.py
|
niuwk/infonets
|
274e97c9a86144dd52cbe90caffff578a2f5d178
|
[
"BSD-3-Clause"
] | 8
|
2018-06-20T23:20:43.000Z
|
2020-01-12T01:32:06.000Z
|
code/nn/act.py
|
niuwk/infonets
|
274e97c9a86144dd52cbe90caffff578a2f5d178
|
[
"BSD-3-Clause"
] | null | null | null |
code/nn/act.py
|
niuwk/infonets
|
274e97c9a86144dd52cbe90caffff578a2f5d178
|
[
"BSD-3-Clause"
] | 4
|
2018-06-26T20:28:13.000Z
|
2021-06-17T13:39:56.000Z
|
# act.py
"""
Created on Sat May 5 22:24:18 2018
@author: Wentao Huang
"""
#import torch as tc
#from torch.nn import functional as F
#from ..utils.helper import is_number, to_Param
from .base import Base
from .fun import *
class PLU(Base):
r"""
Applies the piecewise linear unit function element-wise,
:math:`\text{PLU}(x) = \max(0, x-margin) + \text{negative_slope} * \min(0, x+margin)`
"""
def __init__(self, margin=0.5, negative_slope=1.0, inplace=False, name='PLU'):
super(PLU, self).__init__()
self.margin = margin
self.negative_slope = negative_slope
self.inplace = inplace
self.set_name(name)
self.training = False
def grad(self, input):
return plu_grad(input, self.margin, self.negative_slope)
def forward(self, input):
return plu(input, self.margin, self.negative_slope, self.inplace)
def extra_repr(self):
return 'margin={}, negative_slope={}, inplace={}, name={}'.format(
self.margin, self.negative_slope, self.inplace, self._name)
class SReLU(Base):
r"""
Applies the soft rectified linear unit function element-wise.
"""
def __init__(self, beta=1.0, inplace=False, name='SReLU'):
super(SReLU, self).__init__()
self.beta = beta
self.inplace = inplace
self.set_name(name)
self.training = False
def grad(self, input):
return srelu_grad(input, self.beta)
def forward(self, input):
return srelu(input, self.beta, self.inplace)
def extra_repr(self):
return 'beta={}, inplace={}, name={}'.format(
self.beta, self.inplace, self._name)
class Sigmoid(Base):
r"""
Applies the sigmoid function element-wise.
"""
def __init__(self, beta=1.0, inplace=False, name='Sigmoid'):
super(Sigmoid, self).__init__()
self.beta = beta
self.inplace = inplace
self.set_name(name)
self.training = False
def grad(self, input):
return sigmoid_grad(input, self.beta)
def forward(self, input):
if self.inplace:
if self.beta == 1.0:
return input.sigmoid_()
return input.mul_(self.beta).sigmoid_()
else:
if self.beta == 1.0:
return input.sigmoid()
return input.mul(self.beta).sigmoid_()
def extra_repr(self):
return 'beta={}, inplace={}, name={}'.format(
self.beta, self.inplace, self._name)
class Tanh(Base):
r"""
Applies the tanh function element-wise.
"""
def __init__(self, beta=1.0, inplace=False, name='Tanh'):
super(Tanh, self).__init__()
self.beta = beta
self.inplace = inplace
self.set_name(name)
self.training = False
def grad(self, input):
return tanh_grad(input, self.beta)
def forward(self, input):
if self.inplace:
if self.beta == 1.0:
return input.tanh_()
return input.mul_(self.beta).tanh_()
else:
if self.beta == 1.0:
return input.tanh()
return input.mul(self.beta).tanh_()
def extra_repr(self):
return 'beta={}, inplace={}, name={}'.format(
self.beta, self.inplace, self._name)
class LPTanh(Base):
r"""
Applies the linear plus tanh function element-wise.
"""
def __init__(self, beta=1.0, inplace=False, name='Tanh'):
super(LPTanh, self).__init__()
self.beta = beta
self.inplace = inplace
self.set_name(name)
self.training = False
def grad(self, input):
return lptanh_grad(input, self.beta)
def forward(self, input):
return lptanh(input, self.beta, self.inplace)
def extra_repr(self):
return 'beta={}, inplace={}, name={}'.format(
self.beta, self.inplace, self._name)
class LPSReLU(Base):
r"""
Applies the linear plus soft rectified linear unit function element-wise.
"""
def __init__(self, alpha=0.5, beta=1.0, inplace=False, name='LPSReLU'):
super(LPSReLU, self).__init__()
self.alpha = alpha
self.beta = beta
self.inplace = inplace
self.set_name(name)
self.training = False
@property
def alpha(self):
return self._alpha
@alpha.setter
def alpha(self, value):
if value < 0.0:
raise ValueError("{}: The parameter alpha must be greater than zero".format(self._name))
elif value > 1.0:
raise ValueError("{}: The parameter alpha must be less than one".format(self._name))
else:
self._alpha = value
def grad(self, input):
return lpsrelu_grad(input, self.alpha, self.beta)
def forward(self, input):
return lpsrelu(input, self.alpha, self.beta, self.inplace)
def extra_repr(self):
return 'alpha={}, beta={}, inplace={}, name={}'.format(
self.alpha, self.beta, self.inplace, self._name)
| 29.462428
| 99
| 0.586031
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.