max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
|---|---|---|---|---|---|---|
interventions/plot_intervention.py
|
priyald17/emissions-assumptions
| 1
|
12779751
|
import seaborn as sns
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import pandas as pd
import os
dam_cols_ap2 = ['co2_dam', 'so2_dam_ap2', 'nox_dam_ap2', 'pm25_dam_ap2']
dam_cols_eas = ['co2_dam', 'so2_dam_eas', 'nox_dam_eas', 'pm25_dam_eas']
# Plotting total damage stacked plot
def plot_total_damages(dam_type, intervention_effects_df, example_case, title=False):
fontsize=18
plt.rcParams['hatch.linewidth'] = 0.5
sns.set(style="whitegrid", color_codes=True)
dam_cols = dam_cols_eas if dam_type == 'EASIUR' else dam_cols_ap2
se_col = 'dam_{}-se'.format('eas' if dam_type == 'EASIUR' else 'ap2')
df = get_onelabel_formatted(dam_cols, intervention_effects_df, example_case)
if example_case == 'building_lighting':
df = (df.set_index(['spat', 'kind', 'time'])).reset_index()
elif example_case == 'demand_response':
df = (df.set_index(['spat', 'kind', 'time'])/1e6).reset_index() # millions of dollars
elif example_case == 'summer_load':
df = (df.set_index(['spat', 'kind', 'time'])/1e9).reset_index() # billions of dollars
df_cum = df.set_index(['spat', 'kind', 'time']).cumsum(axis=1).reset_index()
# Stacked bar plot
g = sns.FacetGrid(data=df_cum, col='spat', size=3, aspect=1)
hatches = ['||', '///', '', '\\\\\\']
hue_orders = dict([('building_lighting', ['Annual', 'Monthly', 'Monthly TOD']),
('demand_response', ['Annual', 'Monthly']),
('summer_load', ['Annual', 'Monthly', 'Monthly TOD', 'Hourly'])])
colors = dict([('building_lighting', [0,2,3]), ('demand_response', [0,2]), ('summer_load', [0,2,3,4])])
g.map(sns.barplot, 'kind', dam_cols[-1], 'time',
hue_order=hue_orders[example_case], order=['Marginal', 'Average'],
palette=[sns.color_palette('muted')[x] for x in colors[example_case]], edgecolor='black', hatch=hatches[0])
g.map(sns.barplot, 'kind', dam_cols[-2], 'time',
hue_order=hue_orders[example_case], order=['Marginal', 'Average'],
palette=[sns.color_palette('muted')[x] for x in colors[example_case]], edgecolor='black', hatch=hatches[1])
g.map(sns.barplot, 'kind', dam_cols[-3], 'time',
hue_order=hue_orders[example_case], order=['Marginal', 'Average'],
palette=[sns.color_palette('muted')[x] for x in colors[example_case]], edgecolor='black', hatch=hatches[2])
g.map(sns.barplot, 'kind', dam_cols[-4], 'time',
hue_order=hue_orders[example_case], order=['Marginal', 'Average'],
palette=[sns.color_palette('muted')[x] for x in colors[example_case]], edgecolor='black', hatch=hatches[3]).set_titles('{col_name}')
g.despine(left='true')
# Legend, fontsize, and other formatting
xoffset=0.035
for i, ax in enumerate(g.axes.flatten()):
ax.set_xlabel('') # No x-label
if i == 0:
# y label on left plot
dollar_units = dict([('building_lighting', ''), ('demand_response', ' millions'), ('summer_load', ' billions')])
ax.set_ylabel('Total damages\n{}(\${})'.format(
'' if example_case == 'summer_load' else 'avoided ',
dollar_units[example_case]))
# pollutants legend
leg_dict = dict(zip(dam_cols, ['CO$_2$', 'SO$_2$', 'NO$_x$', 'PM$_{{2.5}}$']))
dam_patches = []
for dam, hatch in zip(dam_cols, hatches[::-1]):
patch = mpatches.Patch(facecolor='white', label=leg_dict[dam], edgecolor='black', hatch=hatch)
dam_patches.append(patch)
offsets = dict([('building_lighting', (0.16, -0.175)), ('demand_response', (0.18, -0.17)), ('summer_load', (0.16, -0.175))])
lgd = ax.legend(handles=dam_patches, loc='center left',
bbox_to_anchor=(0.3+xoffset, -0.15), ncol=4, frameon=True, fontsize=fontsize,
bbox_transform=plt.gcf().transFigure)
plt.text(offsets[example_case][0]+xoffset, offsets[example_case][1], 'Pollutants:', transform=plt.gcf().transFigure,
fontsize=fontsize, fontweight='bold')
lgd.get_frame().set_edgecolor('white')
if i == 1:
# temporal scope legend (same length as pollutants legend for alignment)
if example_case == 'building_lighting':
blank_patch = mpatches.Patch(color='white', label='')
ann_patch = mpatches.Patch(color=sns.color_palette('muted')[0], label='Annual', edgecolor='black')
month_patch = mpatches.Patch(color=sns.color_palette('muted')[2], label='Monthly', edgecolor='black')
tod_patch = mpatches.Patch(color=sns.color_palette('muted')[3], label='Monthly TOD', edgecolor='black')
time_patches = [blank_patch, ann_patch, month_patch, tod_patch, blank_patch]
lgd2 = ax.legend(handles=time_patches, loc='center left',
bbox_to_anchor=(0.19+xoffset, -0.025), ncol=5, frameon=True, fontsize=fontsize,
bbox_transform=plt.gcf().transFigure)
plt.text(0.09+xoffset, -0.045, 'Temporal scopes:', transform=plt.gcf().transFigure,
fontsize=fontsize, fontweight='bold')
elif example_case == 'demand_response':
blank_patch = mpatches.Patch(color='white', label='')
ann_patch = mpatches.Patch(color=sns.color_palette('muted')[0], label='Annual', edgecolor='black')
tod_patch = mpatches.Patch(color=sns.color_palette('muted')[2], label='Monthly', edgecolor='black')
time_patches = [blank_patch, ann_patch, tod_patch, blank_patch]
lgd2 = ax.legend(handles=time_patches, loc='center left',
bbox_to_anchor=(0.3+xoffset, -0.0115), ncol=4, frameon=True, fontsize=fontsize,
bbox_transform=plt.gcf().transFigure)
plt.text(0.11+xoffset, -0.0425, 'Temporal scopes:', transform=plt.gcf().transFigure,
fontsize=fontsize, fontweight='bold')
elif example_case == 'summer_load':
blank_patch = mpatches.Patch(color='white', label='')
ann_patch = mpatches.Patch(color=sns.color_palette('muted')[0], label='Annual', edgecolor='black')
month_patch = mpatches.Patch(color=sns.color_palette('muted')[2], label='Monthly', edgecolor='black')
tod_patch = mpatches.Patch(color=sns.color_palette('muted')[3], label='Monthly TOD', edgecolor='black')
hr_patch = mpatches.Patch(color=sns.color_palette('muted')[4], label='Hourly', edgecolor='black')
time_patches = [ann_patch, month_patch, tod_patch, hr_patch]
lgd2 = ax.legend(handles=time_patches, loc='center left',
bbox_to_anchor=(0.27+xoffset, -0.025), ncol=4, frameon=True, fontsize=fontsize-1,
bbox_transform=plt.gcf().transFigure)
plt.text(0.09+xoffset, -0.045, 'Temporal scopes:', transform=plt.gcf().transFigure,
fontsize=fontsize, fontweight='bold')
lgd2.get_frame().set_edgecolor('white')
# Annotate baseline
# For building lighting: PJM fossil-plus marginal monthly TOD
# For demand response: PJM fossil-plus marginal monthly
# For summer load: PJM fossil-plus average monthly TOD
baseline_locs = dict([('building_lighting', (2, 0.27)),
('demand_response', (2, 0.2)),
('summer_load', (2, 1.3))])
if i == baseline_locs[example_case][0]:
baseline_x = baseline_locs[example_case][1]
patch_width = [p.get_width() for p in ax.patches][0]
baseline_y = max([p.get_height() \
for p in ax.patches if abs(p.get_xy()[0]+patch_width/2-baseline_x)<=patch_width/4])
ax.text(s='*', x=baseline_x, y=1.05 * baseline_y,
horizontalalignment='center', verticalalignment='center',
fontsize=fontsize*2, fontweight='bold')
# Set font size
for item in ([
# ax.title,
ax.xaxis.label, ax.yaxis.label] + ax.get_xticklabels() + ax.get_yticklabels()):
item.set_fontsize(fontsize)
ax.title.set_fontsize(fontsize-2)
# Hacky errorbars
df_tot = get_onelabel_formatted(se_col[:-3], intervention_effects_df, example_case)
df_tot_se = get_onelabel_formatted(se_col, intervention_effects_df, example_case)
errbar_locs = dict([('building_lighting', [-0.27, 0, 0.27]), ('demand_response', [-0.2, 0.2]), ('summer_load', [-0.3,-0.1, 0.1])])
for i, ax in enumerate(g.axes[0]):
spat = df_tot['spat'].dtype.categories[i]
df_tot_slice = df_tot.query('spat == @spat').query('kind == "Marginal"')
df_tot_se_slice = df_tot_se.query('spat == @spat').query('kind == "Marginal"')
if example_case == 'building_lighting':
val, errval = df_tot_slice[se_col[:-3]].values, df_tot_se_slice[se_col].values
elif example_case == 'demand_response':
val, errval = df_tot_slice[se_col[:-3]].values/1e6, df_tot_se_slice[se_col].values/1e6
elif example_case == 'summer_load':
val, errval = df_tot_slice[se_col[:-3]].values/1e9, df_tot_se_slice[se_col].values/1e9
ax.errorbar(errbar_locs[example_case], val, yerr=errval, ms=20, color='black',
linewidth=0, elinewidth=2, capsize=2, capthick=2)
# Line around legend
fig = plt.gcf()
if example_case == 'building_lighting':
leg_line = \
mpatches.Rectangle(
(0.073+xoffset, -0.2), 0.8, 0.24, facecolor='none', edgecolor='lightgray',
transform=fig.transFigure, figure=fig)
elif example_case == 'demand_response':
leg_line = \
mpatches.Rectangle(
(0.1+xoffset, -0.195), 0.77, 0.24, facecolor='none', edgecolor='lightgray',
transform=fig.transFigure, figure=fig)
elif example_case == 'summer_load':
leg_line = \
mpatches.Rectangle(
(0.073+xoffset, -0.2), 0.875, 0.24, facecolor='none', edgecolor='lightgray',
transform=fig.transFigure, figure=fig)
fig.patches.extend([leg_line])
extra_artists = (lgd, lgd2)
if title:
sup=fig.text(0.5, 0.9, 'Total damages ({})\n'.format(dam_type),
fontsize=fontsize, fontweight='bold', fontstyle='italic',
transform=fig.transFigure, ha='center')
extra_artists = extra_artists + (sup,)
plt.tight_layout()
dirname = os.path.join('plots', example_case)
if not os.path.exists(dirname): os.makedirs(dirname)
g.fig.savefig(os.path.join(dirname,
'{}-stacked-with-error{}.pdf'.format(dam_type, '-titled' if title else '')),
bbox_extra_artists=extra_artists, bbox_inches='tight')
# Plotting one graph per damage factor
def get_stacked_plot(label, intervention_effects_df, example_case):
df = get_onelabel_formatted(label, intervention_effects_df, example_case)
df_se = get_onelabel_formatted('{}-se'.format(label), intervention_effects_df, example_case)
# Get bar plot
sns.set(style="whitegrid")
colors = dict([('building_lighting', [0,2,3]), ('demand_response', [0,2]), ('summer_load', [0,2,3,4])])
g = sns.catplot(x='kind', y=label, hue='time', col='spat', data=df,
kind='bar', palette=[sns.color_palette('muted')[x] for x in colors[example_case]], legend=False, ci=None,
height=3, aspect=1).set_titles('{col_name}')
g.despine(left=True);
# Adjust font size and add legend
fontsize=18
for i, ax in enumerate(g.axes.flatten()):
ax.set_xlabel('')
for item in ([#ax.title,
ax.xaxis.label, ax.yaxis.label] +
ax.get_xticklabels() + ax.get_yticklabels()):
item.set_fontsize(fontsize)
ax.title.set_fontsize(fontsize-2)
if i == 0:
ncols = dict([('building_lighting', 3), ('demand_response', 2), ('summer_load', 4)])
lgd = ax.legend(loc='center left', bbox_to_anchor=(0.75, -0.3), ncol=ncols[example_case], frameon=True, fontsize=fontsize)
ax.set_ylabel(format_axis(label, example_case))
# Annotate baseline
# For building lighting: PJM fossil-plus marginal monthly TOD
# For demand response: PJM fossil-plus marginal monthly
# For summer load: PJM fossil-plus average monthly TOD
baseline_locs = dict([('building_lighting', (2, 0.27)), ('demand_response', (2, 0.2)), ('summer_load', (2, 1.3))])
if i == baseline_locs[example_case][0]:
baseline_x = baseline_locs[example_case][1]
patch_width = [p.get_width() for p in ax.patches][0]
baseline_y = max([p.get_height() \
for p in ax.patches if abs(p.get_xy()[0]+patch_width/2-baseline_x)<=patch_width/4])
ax.text(s='*', x=baseline_x, y=1.05 * baseline_y,
horizontalalignment='center', verticalalignment='center',
fontsize=fontsize*2, fontweight='bold')
# Hacky errorbars
errbar_locs = dict([('building_lighting', [-0.27, 0, 0.27]), ('demand_response', [-0.2, 0.2]), ('summer_load', [-0.3,-0.1, 0.1])])
for i, ax in enumerate(g.axes[0]):
spat = df['spat'].dtype.categories[i]
df_slice = df.query('spat == @spat').query('kind == "Marginal"')
df_se_slice = df_se.query('spat == @spat').query('kind == "Marginal"')
ax.errorbar(errbar_locs[example_case], df_slice[label].values,
yerr=df_se_slice['{}-se'.format(label)].values, ms=20, color='black',
linewidth=0, elinewidth=2, capsize=2, capthick=2)
# Plot title
fig = plt.gcf()
# sup=fig.text(0.5, 1, format_title(label),
# fontsize=fontsize, fontweight='bold', fontstyle='italic',
# transform=fig.transFigure, ha='center')
plt.tight_layout()
dirname = os.path.join('plots', example_case)
if not os.path.exists(dirname): os.makedirs(dirname)
g.fig.savefig(os.path.join(dirname, 'si-{}.pdf'.format(label)),
bbox_extra_artists=(lgd,), #(lgd,sup),
bbox_inches='tight')
## Formatting helpers
def get_dam_name(dam_abbr):
return 'AP2' if dam_abbr == 'ap2' else 'EASIUR'
FULL_DAMS = ['dam_ap2', 'dam_eas']
def format_title(label):
l = label.split('_')
if label in FULL_DAMS:
t = 'Total damages ({})'.format('AP2' if l[1] == 'ap2' else 'EASIUR')
else:
t = '{0}$_{{{1}}}$ {2}'.format(l[0][:2].upper(), l[0][2:], 'emissions' if l[1] == 'kg' else 'damages')
if len(l) > 2: t += ' ({})'.format('AP2' if l[2] == 'ap2' else 'EASIUR')
return t
def format_axis(label, example_case):
l = label.split('_')
if example_case == 'summer_load':
if label in FULL_DAMS:
t = 'Total damages ($)'
elif len(l) > 2 or l[1] == 'dam':
t = 'Damages ($)'
else:
t = 'Emissions (kg)'
else:
if label in FULL_DAMS:
t = 'Total damages\navoided ($)'
elif len(l) > 2 or l[1] == 'dam':
t = 'Damages\navoided ($)'
else:
t = 'Emissions\navoided (kg)'
return t
# Get formatted df with intervention effects for given label
def get_onelabel_formatted(label, intervention_effects_df, example_case):
kind_map = dict([('MEF', 'Marginal'), ('AEF', 'Average')])
time_map = dict([('YearOnly', 'Annual'), ('MonthTOD', 'Monthly TOD'), ('Month', 'Monthly'), ('Hour', 'Hourly')])
df = intervention_effects_df[label].reset_index()
df['spat'] = df.apply(
lambda x: '{} ({}-{}{})'.format(
x['region'], x['fuel_type'][:-4].lower(), x['fuel_type'][-4:].lower(),
' 2016' if x['year'] == 2016 else ''), axis=1)
df['spat'] = df['spat'].str.replace('fossil-plus', 'fossil+non-emit')
df = df.drop(['region', 'fuel_type', 'year'], axis=1)
df['kind'] = df['kind'].map(lambda x: kind_map[x]).astype(
pd.CategoricalDtype(categories=['Marginal', 'Average'], ordered=True))
times = dict([('building_lighting', ['Annual', 'Monthly', 'Monthly TOD']),
('demand_response', ['Annual', 'Monthly']),
('summer_load', ['Annual', 'Monthly', 'Monthly TOD', 'Hourly'])])
df['time'] = df['time'].map(lambda x: time_map[x]).astype(
pd.CategoricalDtype(categories=times[example_case], ordered=True))
df['spat'] = df['spat'].astype(pd.CategoricalDtype(
categories=['PJM (fossil-only)', 'PJM (fossil+non-emit 2016)', 'PJM (fossil+non-emit)', 'RFC (fossil-only)'],
ordered=True))
df = df.sort_values(['spat', 'kind', 'time'])
return df
| 2.515625
| 3
|
release/stubs.min/System/ComponentModel/__init___parts/RefreshEventArgs.py
|
YKato521/ironpython-stubs
| 0
|
12779752
|
class RefreshEventArgs(EventArgs):
"""
Provides data for the System.ComponentModel.TypeDescriptor.Refreshed event.
RefreshEventArgs(componentChanged: object)
RefreshEventArgs(typeChanged: Type)
"""
@staticmethod
def __new__(self, *__args):
"""
__new__(cls: type,componentChanged: object)
__new__(cls: type,typeChanged: Type)
"""
pass
ComponentChanged = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Gets the component that changed its properties,events,or extenders.
Get: ComponentChanged(self: RefreshEventArgs) -> object
"""
TypeChanged = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Gets the System.Type that changed its properties or events.
Get: TypeChanged(self: RefreshEventArgs) -> Type
"""
| 2
| 2
|
authorship_unmasking/unmasking/interfaces.py
|
torond/unmasking
| 5
|
12779753
|
<filename>authorship_unmasking/unmasking/interfaces.py
# Copyright (C) 2017-2019 <NAME>, Webis Group
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from authorship_unmasking.features.interfaces import FeatureSet
from authorship_unmasking.event.dispatch import EventBroadcaster, MultiProcessEventContext
from authorship_unmasking.event.events import UnmaskingTrainCurveEvent
from authorship_unmasking.job.interfaces import Strategy
from sklearn.model_selection import cross_validate
from sklearn.svm import LinearSVC
import numpy
from abc import ABCMeta, abstractmethod
import sys
from typing import List
import warnings
class UnmaskingStrategy(Strategy, metaclass=ABCMeta):
"""
Base class for unmasking execution strategies.
Events published by this class:
* `onUnmaskingRoundFinished`: [type: UnmaskingTrainCurveEvent]
fired whenever a single round of unmasking has finished
to update accuracy curves
* `onUnmaskingFinished`: [type: UnmaskingTrainCurveEvent]
fired when unmasking curve generation for a text has finished
"""
def __init__(self):
"""
Initialize unmasking strategy. LinearSVC() is used as default estimator.
"""
self._buffer_curves = True
self._values = []
self._iterations = 10
self._vector_size = 250
self._relative = False
self._folds = 10
self._monotonize = False
self._use_mean_coefs = True
@property
def iterations(self) -> int:
"""Number of unmasking iterations."""
return self._iterations
@iterations.setter
def iterations(self, iterations: int):
"""Set number of unmasking iterations."""
self._iterations = iterations
@property
def vector_size(self) -> int:
"""Feature vector size."""
return self._vector_size
@vector_size.setter
def vector_size(self, vector_size: int):
"""Set feature vector size."""
self._vector_size = vector_size
@property
def relative(self) -> bool:
"""Whether to use relative or absolute feature weights."""
return self._relative
@relative.setter
def relative(self, relative: bool):
"""Set whether to use relative or absolute feature weights."""
self._relative = relative
@property
def folds(self) -> int:
"""Number of cross-validation folds to use for discriminating feature vectors."""
return self._folds
@folds.setter
def folds(self, folds: int):
"""Set number of cross-validation folds to use for discriminating feature vectors."""
self._folds = folds
@property
def use_mean_coefs(self) -> bool:
"""Whether to use mean feature coefficients for vector transformation."""
return self._use_mean_coefs
@use_mean_coefs.setter
def use_mean_coefs(self, use_mean_coefs: bool):
"""Set whether to use mean coefficients"""
self._use_mean_coefs = use_mean_coefs
@property
def monotonize(self) -> bool:
"""Whether to monotonize curves."""
return self._monotonize
@monotonize.setter
def monotonize(self, monotonize: bool):
"""Set whether to monotonize curves."""
self._monotonize = monotonize
@property
def buffer_curves(self) -> bool:
"""Whether to buffer curves."""
return self._buffer_curves
@buffer_curves.setter
def buffer_curves(self, buffer: bool):
"""Set whether to buffer curves. Set to False to send update events after each round."""
if not buffer:
print('WARNING: Curve buffering is turned off.', file=sys.stderr),
print(' Set "buffer_curves" to true in your job config for better performance.\n', file=sys.stderr)
self._buffer_curves = buffer
# noinspection PyPep8Naming
async def run(self, fs: FeatureSet):
"""
Run ``m`` rounds of unmasking on given parametrized feature set.
:param fs: parametrized feature set to run unmasking on
"""
clf = LinearSVC()
X = []
y = []
if self._relative:
it = fs.get_features_relative(self._vector_size)
else:
it = fs.get_features_absolute(self._vector_size)
for row in it:
l = len(row)
X.append(row[0:l // 2])
X.append(row[l // 2:l])
# cls either "text 0" or "text 1" of a pair
y.append(0)
y.append(1)
X = numpy.array(X)
y = numpy.array(y)
group_id = UnmaskingTrainCurveEvent.generate_group_id([self.__class__.__name__ + ":" + fs.pair.pair_id])
event = UnmaskingTrainCurveEvent(group_id, 0, self._iterations, fs.pair, fs.__class__)
values = []
for i in range(self._iterations):
if MultiProcessEventContext().terminate_event.is_set():
return
try:
cv = cross_validate(clf, X, y, cv=self._folds, return_estimator=True, return_train_score=False)
score = max(0.0, (cv['test_score'].mean() - .5) * 2)
cv_models = cv["estimator"]
if self._monotonize:
values.append(score)
else:
values.append(score)
event.values = values
if len(cv_models[0].coef_.shape) > 1:
coef = numpy.array([c.coef_[0] for c in cv_models])
else:
coef = numpy.array([c.coef_ for c in cv_models])
if self._use_mean_coefs:
coef = numpy.mean(coef, axis=0)
if not self._monotonize and not self._buffer_curves:
await EventBroadcaster().publish("onUnmaskingRoundFinished", event, self.__class__)
event = UnmaskingTrainCurveEvent.new_event(event)
if i < self._iterations - 1:
X = await self.transform(X, coef)
if X.size == 0:
# Nothing to do anymore
break
except ValueError:
continue
if self._monotonize:
event.values = self._do_monotonize(values)
if self._monotonize or self._buffer_curves:
await EventBroadcaster().publish("onUnmaskingRoundFinished", event, self.__class__)
event = UnmaskingTrainCurveEvent.new_event(event)
await EventBroadcaster().publish("onUnmaskingFinished", event, self.__class__)
def _do_monotonize(self, values: List[float]):
# monotonize from the left
values_l = numpy.zeros(len(values))
prev_value = 1.0
for i, v in enumerate(values):
values_l[i] = min(prev_value, v)
prev_value = values_l[i]
# monotonize from the right
values_r = numpy.zeros(len(values))
prev_value = 0.0
for i in range(len(values) - 1, -1, -1):
values_r[i] = max(prev_value, values[i])
prev_value = values_r[i]
# calculate squared differences to find the better of both approximations
values_arr = numpy.array(values)
delta_l = numpy.sum(numpy.square(values_arr - values_l))
delta_r = numpy.sum(numpy.square(values_arr - values_r))
if delta_l <= delta_r:
return list(values_l)
return list(values_r)
@abstractmethod
async def transform(self, data: numpy.ndarray, coefs: numpy.ndarray) -> numpy.ndarray:
"""
Transform the input tensor according to the chosen unmasking strategy.
:param data: input feature matrix of shape (m_samples, n_features)
:param coefs: trained feature coefficients for each CV fold (of shape (k_folds, n_features)
or (1, n_features) if use_mean_coefs is True)
:return: output feature tensor (may have contain different number of features,
but the number of samples must be the same)
"""
pass
| 1.984375
| 2
|
hwtLib/avalon/sim/mmAgent_test.py
|
optical-o/hwtLib
| 0
|
12779754
|
<reponame>optical-o/hwtLib<gh_stars>0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
from hwt.hdl.constants import READ, WRITE
from hwt.interfaces.utils import addClkRstn
from hwt.simulator.simTestCase import SingleUnitSimTestCase
from hwt.synthesizer.unit import Unit
from hwtLib.avalon.mm import AvalonMM, RESP_OKAY
from hwtLib.avalon.sim.ram import AvalonMMSimRam
from hwtSimApi.constants import CLK_PERIOD
from pyMathBitPrecise.bit_utils import mask
class AvalonMmWire(Unit):
def _declr(self):
addClkRstn(self)
self.s = AvalonMM()
self.m = AvalonMM()._m()
def _impl(self):
self.m(self.s)
class AvalonMmAgentTC(SingleUnitSimTestCase):
@classmethod
def getUnit(cls):
cls.u = AvalonMmWire()
return cls.u
def test_nop(self):
u = self.u
self.runSim(10 * CLK_PERIOD)
self.assertEmpty(u.m._ag.req)
self.assertEmpty(u.m._ag.wData)
self.assertEmpty(u.s._ag.rData)
self.assertEmpty(u.s._ag.wResp)
def test_pass_data(self, N=8):
assert N % 2 == 0, N
u = self.u
m = mask(u.s.DATA_WIDTH // 8)
STEP = u.s.DATA_WIDTH // 8
# rw, address, burstCount
inAddr = [
(READ if (i % 2) == 0 else WRITE, i * STEP, 1)
for i in range(N)
]
u.s._ag.req.extend(inAddr)
# d, be
inW = [
(i + 1, m)
for i in range(N // 2)
]
u.s._ag.wData.extend(inW)
# readData, response
inR = [
(i + 1, RESP_OKAY)
for i in range(N // 2)
]
u.m._ag.rData.extend(inR)
inWResp = [RESP_OKAY for _ in range(N // 2)]
u.m._ag.wResp.extend(inWResp)
t = N + 5
self.runSim(t * CLK_PERIOD)
ae = self.assertValSequenceEqual
ae(u.m._ag.req, inAddr)
ae(u.m._ag.wData, inW)
ae(u.s._ag.rData, inR)
ae(u.s._ag.wResp, inWResp)
def test_sim_ram(self, N=8):
u = self.u
m = mask(u.s.DATA_WIDTH // 8)
STEP = u.s.DATA_WIDTH // 8
mem = AvalonMMSimRam(u.m)
inAddr = [
(READ if (i % 2) == 0 else WRITE, i * STEP, 1)
for i in range(N)
]
for i in range(N):
if i % 2 != 0:
continue
mem.data[i] = i
u.s._ag.req.extend(inAddr)
# d, be
inW = [
(i, m)
for i in range(N) if (i % 2) == 1
]
u.s._ag.wData.extend(inW)
t = N + 5
self.runSim(t * CLK_PERIOD)
self.assertValSequenceEqual([mem.data[i] for i in range(N)],
[i for i in range(N)])
if __name__ == "__main__":
suite = unittest.TestSuite()
# suite.addTest(AvalonMmAgentTC('test_sim_ram'))
suite.addTest(unittest.makeSuite(AvalonMmAgentTC))
runner = unittest.TextTestRunner(verbosity=3)
runner.run(suite)
| 1.953125
| 2
|
gmaltcli/tests/tools.py
|
gmalt/hgt2sql
| 5
|
12779755
|
class MockCallable(object):
def __init__(self):
self.called = False
self.args = ()
self.kwargs = {}
def __call__(self, *args, **kwargs):
self.called = True
self.args = args
self.kwargs = kwargs
| 2.765625
| 3
|
userapp/urls.py
|
ZiYin-ss/pythonnetshop
| 0
|
12779756
|
<filename>userapp/urls.py<gh_stars>0
# coding=utf-8
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^register/$', views.RegisterView.as_view()),
url(r'^checkUname/$', views.CheckUnameView.as_view()),
url(r'^center/$', views.CenterView.as_view()),
url(r'^logout/$', views.LogoutView.as_view()),
url(r'^login/$', views.LoginView.as_view()),
url(r'^loadCode.jpg$', views.LoadCodeView.as_view()),
url(r'^checkcode/$', views.CheckCodeView.as_view()),
url(r'^address/$', views.AddressView.as_view()),
url(r'^loadArea/$', views.LoadAreaView.as_view()),
]
| 1.609375
| 2
|
main.py
|
PratikshaJain37/Pixels-Fighting-pygame
| 0
|
12779757
|
# Main.py - Pixels Fighting #
# Author: <NAME> #
# ---------------------#
# Imports #
import pygame
from pygame.locals import *
from helpers import *
import random
import numpy as np
import time
# ---------------------#
# Initialize number of rows/columns
INT = 100
INT_SQ = INT*INT
# Initialize size of arrays
SIZE = 5
# Initialize Pygame
pygame.init()
# Initialize screen, status and clock
screen = pygame.display.set_mode((80+INT*SIZE,160+INT*SIZE))
running = True
clock = pygame.time.Clock()
# Defining Colors
COLOR_ALIVE = (random.randint(1,256),random.randint(0,256),random.randint(0,256))
COLOR_DEAD = (random.randint(1,256),random.randint(0,256),random.randint(0,256))
# Initialize Status Array - Making an array with half dead and half alive
zero = np.zeros((INT,INT//2), dtype=int)
one = np.ones((INT,INT//2), dtype=int)
current_status_array = np.concatenate((zero,one), axis=1)
# ---------------------#
# For Title Text to be displayed
# Defining font style and size
font = pygame.font.Font('freesansbold.ttf', 32)
text_title = font.render('Pixels Fighting', True, (255,255,255), (0,0,0))
textRectTitle = text_title.get_rect()
textRectTitle.center = (40+INT*SIZE/2, 40)
# ---------------------#
# Defining Box Class
class Box():
# Status can be dead (0) or alive(1);
def __init__(self, x, y, alive):
self.x = x
self.y = y
self.alive = alive
self.surf = pygame.Surface((SIZE,SIZE))
self.rect = (40 + SIZE*self.y, 100 + SIZE*self.x)
# Function to fill surface with color
def assign_color(self):
if self.alive == 0:
self.surf.fill(COLOR_DEAD)
else:
self.surf.fill(COLOR_ALIVE)
screen.blit(self.surf,self.rect)
# Function to update surface; as per current_status_array
def update(self):
self.alive = current_status_array[self.x][self.y]
self.assign_color()
# ---------------------#
# Creating 'INT_SQ' instances of box class, and appending them to a list for accessibility
boxes = []
for i in range(INT_SQ):
# x,y will be filled sequentially
x = i//INT
y = i%INT
# Alive status depening on current array
boxes.append(Box(x,y,current_status_array[x][y]))
# ---------------------#
# For Ratio Text to be displayed and updated continuously
# Defining font style and size
font = pygame.font.Font('freesansbold.ttf', 25)
def UpdateRatioText():
# For the alive ones
text_alive = font.render('Alive: {:.4f}'.format(IsAliveWinning(current_status_array, INT_SQ)), True, COLOR_ALIVE, (0,0,0))
textRectAlive = text_alive.get_rect()
textRectAlive.x = 80 + INT*SIZE - 210
textRectAlive.y = 115 + INT*SIZE
# For the dead ones
text_dead = font.render('Dead: {:.4f}'.format(1-IsAliveWinning(current_status_array, INT_SQ)), True, COLOR_DEAD, (0,0,0))
textRectDead = text_dead.get_rect()
textRectDead.x = 60
textRectDead.y = 115 + INT*SIZE
# Updating the font on the rect
screen.blit(text_alive, textRectAlive)
screen.blit(text_dead, textRectDead)
# ---------------------#
# Main python loop
while running:
# Main python quit function
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
# For updating array and boxes status
current_status_array = UpdateArray(current_status_array, INT)
for box in boxes:
box.update()
# Update Ratio text
UpdateRatioText()
# Display Title
screen.blit(text_title, textRectTitle)
# Refresh screen
pygame.display.update()
# A more optimal version of the clock.tick() function, determines fps of display basically
time.sleep(0.1)
# ---------------------#
| 3.171875
| 3
|
TPS_dice_roller_bot/core/constants.py
|
PumaConcolor/TPS-dice-roller-bot
| 4
|
12779758
|
<filename>TPS_dice_roller_bot/core/constants.py
DICE_ROLL_REGEX = '[\\w|\\s|!-/|:-@]*?[\\s]' \
'([0-9]*[\\s]*)d[\\s]*([0-9]+)' \
'[\\s]*([\\+|\\-][\\s]*[0-9]*)*[\\s]*(.*)[\\s]*$'
SPONGEBOB_CLEANER_REGEX = '\\/spongebob|\\/spongerep|\\/spr|\\/sp'
ZALGO_CLEANER_REGEX = '\\/zalgo|\\/z'
HELP_MESSAGE = ('I can roll dice and do funny stuff!\n\n'
'You can control me by sending these commands:\n\n'
'/help - sends this help message\n'
'/start - sends this help message\n\n'
'To roll dice and other things (each command has a long and short version):\n\n'
'/roll - roll dice as indicated using dice notation\n'
'/r - short version\n\n'
'/ability_scores - rolls six ability scores for use in D&D style game systems\n'
'/as - short version\n\n'
'/alive - returns the emotional state of the bot\n\n'
'/spongebob - takes your sentence and returns a saltier one\n'
'/sp - short version\n\n'
'/spongerep - reply to a message writing this, it will mock the first message\n'
'/spr - short version\n\n'
'/zalgo - takes your message to glitch it\n'
'/z - short version\n\n'
'/edgelord - give your character a reason to brood\n'
'this command is the only one to just have a long version, for added edginess\n\n'
'/character - creates a fully fledged character\n'
'/char - short version\n\n'
'/fabula - create your identity for a Fabula Ultima character\n'
'/f - short version')
ALIVE_SERVICE = [
'Alive and kicking',
'Lock \'n loaded',
'My life for DnD',
'Stop poking me',
'I like when you try to reach me',
'I\'m alive, not by my choice',
'What does "alive" even means?',
'I\'m alive, be grateful to Pelor',
'Who\'s summoning me?',
'(/^▽^)/',
'(つ ͡° ͜ʖ ͡°)つ',
'(╯°□°)╯︵ ┻━┻',
'ಠ_ಠ',
'Pelor sucks'
]
SALTY_ANSWER = [
'eh?',
'Yeah, sure',
'try writing something that makes sense, next time',
'I\'m sure you can do better',
'Wait. Are you sure about that?',
'You may wanna think this over',
'Did you really drink that much?',
'Why do you people keep not reading the instructions?',
'Well, fuck you too',
'ಠ_ಠ',
'...',
'boooring'
]
EDGELORD_PARTS = [
# Edgy antagonist
[
'An evil wizard',
'A dragon',
'The drow',
'Goblins',
'Kobolds',
'A mind flayer',
'Evil cultists',
'Orcs',
'Trolls',
'A banshee',
'A demon lord',
'An archdevil',
'Giants',
'Vampires',
'Gnolls',
'A werefolf',
'A Djinni',
'A mimic',
'A tarrasque',
'A beholder',
'A hag coven',
'A lich',
'Barbarians',
'An aboleth',
'A succubus',
'A criminal organizations',
'A gelatinous cube',
'A necromancer',
'Corrupt nobles',
'A death knight',
'The BBEG',
'The bard',
'Natural selection',
'The DM',
],
# Edgy action
[
'killed',
'murdered',
'slaughtered',
'massacred',
'assassinated',
'brainwashed',
'captured',
'banished',
'enslaved',
'betrayed',
'sacrificed',
'mauled',
'stole',
'blackmailed',
'conned',
'framed',
'humiliated',
'pillaged',
'ruined',
'ate',
'cursed',
'befriended',
'seduced'
],
# Edgy victim
[
'my family',
'my hometown',
'my parents',
'my clan',
'my sibling',
'my mentors',
'my significant other',
'my master',
'my side squeeze',
'my apprentice',
'my friends',
'my previous adventuring party',
'everyone I knew',
'my crew of sailors',
'my crew of pirates',
'my crew of noble outlaws',
'my crew of thieves',
'the tavern I basically lived in',
'my military unit',
'my social status',
'my treasure',
'my aspirations',
'my honour',
'my confidence',
'my imaginary friends'
],
# Edgy outcome
[
'and it will have no effect on how i roleplay my character',
'and now I\'m a murder hobo',
'and now I\'m a lawful good stick in the mud',
'and now I seek vengeance',
'and now I trust no one',
'and now I have a bleak outlook of the world',
'and now I strive to live by their ideals',
'and now I must become stronger',
'and now I seek to bring back what I have lost',
'and now I vow to prevent that from happening to anyone else',
'and now I am haunted by their memory',
'and now I seek to uncover the truth about what happened',
'and now I fear it will happen again',
'and now I am stronger because of it',
'and now I\'m and alcoholic',
'and now I have multiclassed into warlock',
'and now I\'m Batman'
]
]
PG_CREATION_PARTS = {
'Sex': [
'Male',
'Female',
'You choose'
],
'Sexual orientation': [
'Heterosexual',
'Homosexual',
'Bisexual',
'Pansexual',
'Asexual',
'you choose'
],
'Race': [
'Aarakocra',
'Aasimar (Fallen)',
'Aasimar (Protector)',
'Aasimar (Scourge)',
'Bugbear',
'Centaur',
'Changeling',
'Dragonborn (Base)',
'Dragonborn (Draconblood)',
'Dragonborn (Ravenite)',
'Dwarf',
'Dwarf (Duergar)',
'Dwarf (Hill)',
'Dwarf (Mark of Warding)',
'Dwarf (Mountain)',
'Elf (Drow)',
'Elf (Eladrin)',
'Elf (High)',
'Elf (Mark of Shadow)',
'Elf (Pallid)',
'Elf (Sea)',
'Elf (Shadar-kai)',
'Elf (Wood)',
'Firbolg',
'Genasi (Air)',
'Genasi (Earth)',
'Genasi (Fire)',
'Genasi (Water)',
'Gith (Githyanki)',
'Gith (Githzerai)',
'Gnome (Deep)',
'Gnome (Deep/Svirfneblin)',
'Gnome (Forest)',
'Gnome (Mark of Scribing)',
'Gnome (Rock)',
'Goblin',
'Goliath',
'Half-Elf',
'Half-Elf (Variant)',
'Half-Elf (Variant; Aquatic Elf Descent)',
'Half-Elf (Variant; Drow Descent)',
'Half-Elf (Variant; Mark of Detection)',
'Half-Elf (Variant; Mark of Storm)',
'Half-Elf (Variant; Moon Elf or Sun Elf Descent)',
'Half-Elf (Variant; Wood Elf Descent)',
'Half-Orc',
'Half-Orc (Mark of Finding)',
'Halfling (Ghostwise)',
'Halfling (Lightfoot)',
'Halfling (Lotusden)',
'Halfling (Mark of Healing)',
'Halfling (Mark of Hospitality)',
'Halfling (Stout)',
'Hobgoblin',
'Human (Base)',
'Human (Mark of Finding)',
'Human (Mark of Handling)',
'Human (Mark of Making)',
'Human (Mark of Passage)',
'Human (Mark of Sentinel)',
'Human (Variant)',
'Kalashtar',
'Kenku',
'Kobold',
'Leonin',
'Lizardfolk',
'Loxodon',
'Minotaur',
'Orc',
'Satyr',
'Shifter (Beasthide)',
'Shifter (Longtooth)',
'Shifter (Swiftstride)',
'Shifter (Wildhunt)',
'Simic Hybrid',
'Tabaxi',
'Tiefling (Asmodeus)',
'Tiefling (Baalzebul)',
'Tiefling (Base)',
'Tiefling (Dispater)',
'Tiefling (Fierna)',
'Tiefling (Glasya)',
'Tiefling (Levistus)',
'Tiefling (Mammon)',
'Tiefling (Mephistopheles)',
'Tiefling (Variant; Devil\'s Tongue)',
'Tiefling (Variant; Hellfire)',
'Tiefling (Variant; Infernal Legacy)',
'Tiefling (Variant; Winged)',
'Tiefling (Zariel)',
'Triton',
'Vedalken',
'Verdan',
'Warforged',
'Yuan-ti Pureblood',
'You choose'
],
'Class': [
'Artificer',
'Barbarian',
'Bard',
'Cleric',
'Druid',
'Fighter',
'Monk',
'Paladin',
'Ranger',
'Rogue',
'Sorcerer',
'Warlock',
'Wizard',
'You choose'
],
'Background': [
'Acolyte',
'Anthropologist',
'Archaeologist',
'Athlete',
'Augen Trust (Spy)',
'Azorius Functionary',
'Boros Legionnaire',
'Celebrity Adventurer\'s Scion',
'Charlatan',
'City Watch',
'Variant City Watch (Investigator)',
'Clan Crafter',
'Cloistered Scholar',
'Cobalt Scholar (Sage)',
'Courtier',
'Criminal',
'Variant Criminal (Spy)',
'Dimir Operative',
'Entertainer',
'Variant Entertainer (Gladiator)',
'Faceless',
'Faction Agent',
'Failed Merchant',
'Far Traveler',
'Fisher',
'Folk Hero',
'Gambler',
'Golgari Agent',
'Grinner',
'Gruul Anarch',
'Guild Artisan',
'Variant Guild Artisan (Guild Merchant)',
'Haunted One',
'Hermit',
'House Agent',
'Inheritor',
'Izzet Engineer',
'Knight of the Order',
'Luxonborn (Acolyte)',
'Marine',
'Mercenary Veteran',
'Myriad Operative (Criminal)',
'Noble',
'Variant Noble (Knight)',
'Orzhov Representative',
'Outlander',
'Plaintiff',
'Rakdos Cultist',
'Revelry Pirate (Sailor)',
'Rival Intern',
'Sage',
'Sailor',
'Variant Sailor (Pirate)',
'Selesnya Initiate',
'Shipwright',
'Simic Scientist',
'Smuggler',
'Soldier',
'Urban Bounty Hunter',
'Urchin',
'Uthgardt Tribe Member',
'Volstrucker Agent',
'Waterdhavian Noble',
'You choose'
]
}
HEIGHT_TABLE = {
'Aarakocra': [140, 160],
'Aasimar (Fallen)': [150, 190],
'Aasimar (Protector)': [150, 190],
'Aasimar (Scourge)': [150, 190],
'Bugbear': [180, 245],
'Centaur': [180, 210],
'Changeling': [150, 185],
'Dragonborn (Base)': [185],
'Dragonborn (Draconblood)': [185],
'Dragonborn (Ravenite)': [185],
'Dwarf': [120, 150],
'Dwarf (Duergar)': [120, 150],
'Dwarf (Hill)': [120, 150],
'Dwarf (Mark of Warding)': [120, 150],
'Dwarf (Mountain)': [120, 150],
'Elf (Drow)': [140, 185],
'Elf (Eladrin)': [140, 185],
'Elf (High)': [140, 185],
'Elf (Mark of Shadow)': [140, 185],
'Elf (Pallid)': [140, 185],
'Elf (Sea)': [140, 185],
'Elf (Shadar-kai)': [140, 185],
'Elf (Wood)': [140, 185],
'Firbolg': [210, 245],
'Genasi (Air)': [150, 190],
'Genasi (Earth)': [150, 190],
'Genasi (Fire)': [150, 190],
'Genasi (Water)': [150, 190],
'Gith (Githyanki)': [160],
'Gith (Githzerai)': [160],
'Gnome (Deep)': [90, 120],
'Gnome (Deep/Svirfneblin)': [90, 120],
'Gnome (Forest)': [90, 120],
'Gnome (Mark of Scribing)': [90, 120],
'Gnome (Rock)': [90, 120],
'Goblin': [90, 120],
'Goliath': [210, 245],
'Half-Elf': [150, 185],
'Half-Elf (Variant)': [150, 185],
'Half-Elf (Variant; Aquatic Elf Descent)': [150, 185],
'Half-Elf (Variant; Drow Descent)': [150, 185],
'Half-Elf (Variant; Mark of Detection)': [150, 185],
'Half-Elf (Variant; Mark of Storm)': [150, 185],
'Half-Elf (Variant; Moon Elf or Sun Elf Descent)': [150, 185],
'Half-Elf (Variant; Wood Elf Descent)': [150, 185],
'Half-Orc': [155, 195],
'Half-Orc (Mark of Finding)': [155, 195],
'Halfling (Ghostwise)': [75, 105],
'Halfling (Lightfoot)': [75, 105],
'Halfling (Lotusden)': [75, 105],
'Halfling (Mark of Healing)': [75, 105],
'Halfling (Mark of Hospitality)': [75, 105],
'Halfling (Stout)': [75, 105],
'Hobgoblin': [150, 190],
'Human (Base)': [150, 190],
'Human (Mark of Finding)': [150, 190],
'Human (Mark of Handling)': [150, 190],
'Human (Mark of Making)': [150, 190],
'Human (Mark of Passage)': [150, 190],
'Human (Mark of Sentinel)': [150, 190],
'Human (Variant)': [150, 190],
'Kalashtar': [150, 190],
'Kenku': [135, 165],
'Kobold': [60, 90],
'Leonin': [175, 210],
'Lizardfolk': [150, 190],
'Loxodon': [210, 240],
'Minotaur': [175, 195],
'Orc': [175, 215],
'Satyr': [145, 185],
'Shifter (Beasthide)': [150, 190],
'Shifter (Longtooth)': [150, 190],
'Shifter (Swiftstride)': [150, 190],
'Shifter (Wildhunt)': [150, 190],
'Simic Hybrid': ['You choose'],
'Tabaxi': [160, 200],
'Tiefling (Asmodeus)': [150, 190],
'Tiefling (Baalzebul)': [150, 190],
'Tiefling (Base)': [150, 190],
'Tiefling (Dispater)': [150, 190],
'Tiefling (Fierna)': [150, 190],
'Tiefling (Glasya)': [150, 190],
'Tiefling (Levistus)': [150, 190],
'Tiefling (Mammon)': [150, 190],
'Tiefling (Mephistopheles)': [150, 190],
'Tiefling (Variant; Devil\'s Tongue)': [150, 190],
'Tiefling (Variant; Hellfire)': [150, 190],
'Tiefling (Variant; Infernal Legacy)': [150, 190],
'Tiefling (Variant; Winged)': [150, 190],
'Tiefling (Zariel)': [150, 190],
'Triton': [140, 180],
'Vedalken': [175, 195],
'Verdan': [90, 120],
'Warforged': ['You choose'],
'Yuan-ti Pureblood': [150, 190],
'You choose': ['You choose']
}
WEIGHT_TABLE = {
'Aarakocra': [80, 100],
'Aasimar (Fallen)': [110, 200],
'Aasimar (Protector)': [110, 200],
'Aasimar (Scourge)': [110, 200],
'Bugbear': [250, 350],
'Centaur': [600, 700],
'Changeling': [110, 190],
'Dragonborn (Base)': [200, 300],
'Dragonborn (Draconblood)': [200, 300],
'Dragonborn (Ravenite)': [200, 300],
'Dwarf': [130, 170],
'Dwarf (Duergar)': [130, 170],
'Dwarf (Hill)': [125, 165],
'Dwarf (Mark of Warding)': [130, 170],
'Dwarf (Mountain)': [130, 170],
'Elf (Drow)': [80, 160],
'Elf (Eladrin)': [80, 160],
'Elf (High)': [80, 160],
'Elf (Mark of Shadow)': [80, 160],
'Elf (Pallid)': [80, 160],
'Elf (Sea)': [80, 160],
'Elf (Shadar-kai)': [80, 160],
'Elf (Wood)': [80, 160],
'Firbolg': [240, 300],
'Genasi (Air)': [110, 200],
'Genasi (Earth)': [110, 200],
'Genasi (Fire)': [110, 200],
'Genasi (Water)': [110, 200],
'Gith (Githyanki)': [100, 180],
'Gith (Githzerai)': [90, 140],
'Gnome (Deep)': [80, 120],
'Gnome (Deep/Svirfneblin)': [80, 120],
'Gnome (Forest)': [30, 50],
'Gnome (Mark of Scribing)': [30, 50],
'Gnome (Rock)': [30, 50],
'Goblin': [35, 50],
'Goliath': [280, 340],
'Half-Elf': [110, 190],
'Half-Elf (Variant)': [110, 190],
'Half-Elf (Variant; Aquatic Elf Descent)': [110, 190],
'Half-Elf (Variant; Drow Descent)': [110, 190],
'Half-Elf (Variant; Mark of Detection)': [110, 190],
'Half-Elf (Variant; Mark of Storm)': [110, 190],
'Half-Elf (Variant; Moon Elf or Sun Elf Descent)': [110, 190],
'Half-Elf (Variant; Wood Elf Descent)': [110, 190],
'Half-Orc': [140, 240],
'Half-Orc (Mark of Finding)': [140, 240],
'Halfling (Ghostwise)': [140, 240],
'Halfling (Lightfoot)': [30, 50],
'Halfling (Lotusden)': [30, 50],
'Halfling (Mark of Healing)': [30, 50],
'Halfling (Mark of Hospitality)': [30, 50],
'Halfling (Stout)': [30, 50],
'Hobgoblin': [110, 200],
'Human (Base)': [110, 200],
'Human (Mark of Finding)': [110, 200],
'Human (Mark of Handling)': [110, 200],
'Human (Mark of Making)': [110, 200],
'Human (Mark of Passage)': [110, 200],
'Human (Mark of Sentinel)': [110, 200],
'Human (Variant)': [110, 200],
'Kalashtar': [110, 180],
'Kenku': [90, 120],
'Kobold': [25, 35],
'Leonin': [180, 240],
'Lizardfolk': [120, 220],
'Loxodon': [300, 400],
'Minotaur': [180, 240],
'Orc': [230, 280],
'Satyr': [100, 180],
'Shifter (Beasthide)': [90, 160],
'Shifter (Longtooth)': [90, 160],
'Shifter (Swiftstride)': [90, 160],
'Shifter (Wildhunt)': [90, 160],
'Simic Hybrid': ['You choose'],
'Tabaxi': [90, 160],
'Tiefling (Asmodeus)': [110, 200],
'Tiefling (Baalzebul)': [110, 200],
'Tiefling (Base)': [110, 200],
'Tiefling (Dispater)': [110, 200],
'Tiefling (Fierna)': [110, 200],
'Tiefling (Glasya)': [110, 200],
'Tiefling (Levistus)': [110, 200],
'Tiefling (Mammon)': [110, 200],
'Tiefling (Mephistopheles)': [110, 200],
'Tiefling (Variant; Devil\'s Tongue)': [110, 200],
'Tiefling (Variant; Hellfire)': [110, 200],
'Tiefling (Variant; Infernal Legacy)': [110, 200],
'Tiefling (Variant; Winged)': [110, 200],
'Tiefling (Zariel)': [110, 200],
'Triton': [90, 160],
'Vedalken': [140, 200],
'Verdan': [35, 50],
'Warforged': ['You choose'],
'Yuan-ti Pureblood': [110, 200],
'You choose': ['You choose']
}
FABULA_CONCEPT = [
'knight',
'bodyguard',
'animated puppet',
'bounty hunter',
'bandit',
'scavenger',
'martial artist',
'factory worker',
'rebel agent',
'treasure hunter',
'student',
'warrior mage',
'alien',
'painter',
'noble',
'priest/ess',
'magitech engineer',
'duelist',
'professor',
'archer',
'monster hunter',
'samurai',
'occultist',
'medic',
'bard',
'paladin',
'shapeshifter',
'soldier',
'monk',
'pirate',
'inventor',
'gunslinger',
'gambler',
'smuggler',
'black knight',
'rōnin',
'automaton',
'alchemist',
'mercenary',
'ninja',
'airship pilot',
'cook',
'diplomat',
'spy',
'commander',
'thief',
'templar',
'sniper',
'king/queen',
'mechanic',
'athlete',
'mage',
'dancer',
'healer',
'gladiator',
'cannoneer',
'demon hunter',
'prince/ss',
'merchant',
'abomination'
]
FABULA_ADJECTIVES = [
'charming',
'devout',
'oathbreaker',
'last',
'chosen',
'distant',
'former imperial',
'proud',
'troubled',
'wanted',
'brave',
'fearful',
'animal-loving',
'kind',
'respectable',
'amnesiac',
'dashing',
'tainted',
'imperial',
'young',
'free-spirited',
'eccentric',
'loyal',
'well-connected',
'elderly',
'naive',
'chivalrous',
'spoiled',
'smiling',
'gifted',
'no-nonsense',
'royal',
'apprentice',
'reckless',
'influent',
'furtive',
'ill-tempered',
'famous',
'tough',
'non-human'
]
FABULA_DETAIL = [
'from an Ancient Bloodline',
'on the run',
'of the Old Faith',
'seeking justice',
'in disgrace',
'of the Crimson Wings',
'from the High Academy',
'from the Moon',
'of the seven seas',
'from the future',
'looking for answers',
'without a homeland',
'of the Royal Army',
'from another dimension',
'of the Desert Clans',
'of the Storm Knights',
'with a heart of gold',
'from the ancient forest',
'from the past',
'of the Sacred Flame'
]
FABULA_THEME = [
'ambition',
'anger',
'doubt',
'duty',
'guilt',
'hope',
'justice',
'belonging',
'mercy',
'vengeance'
]
| 2.421875
| 2
|
o3d/documentation/build_docs.py
|
rwatson/chromium-capsicum
| 11
|
12779759
|
<filename>o3d/documentation/build_docs.py
#!/usr/bin/python2.4
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Docbuilder for O3D and o3djs."""
import os
import os.path
import sys
import imp
import types
import glob
import subprocess
import shutil
import re
_java_exe = ''
_output_dir = ''
_third_party_dir = ''
_o3d_third_party_dir = ''
_script_path = os.path.dirname(os.path.realpath(__file__))
_js_copyright = """
/*
* Copyright 2009, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
"""
GlobalsDict = { }
def MakePath(*file_paths):
"""Makes a path absolute given a path relative to this script."""
return os.path.join(_script_path, *file_paths)
def MakeCommandName(name):
"""adds '.exe' if on Windows"""
if os.name == 'nt':
return name + '.exe'
return name
def Execute(args):
"""Executes an external program."""
# Comment the next line in for debugging.
# print "Execute: ", ' '.join(args)
if subprocess.call(args) > 0:
raise RuntimeError('FAILED: ' + ' '.join(args))
def AppendBasePath(folder, filenames):
"""Appends a base path to a ist of files"""
return [os.path.join(folder, filename) for filename in filenames]
def RunNixysa(idl_files, generate, output_dir, nixysa_options):
"""Executes Nixysa."""
Execute([
sys.executable,
MakePath(_o3d_third_party_dir, 'nixysa', 'codegen.py'),
'--binding-module=o3d:%s' % MakePath('..', 'plugin', 'o3d_binding.py'),
'--generate=' + generate,
'--force',
'--output-dir=' + output_dir] +
nixysa_options +
idl_files)
def RunJSDocToolkit(js_files, ezt_output_dir, html_output_dir, prefix, mode,
baseURL, topURL, exports_file):
"""Executes the JSDocToolkit."""
list_filename = MakePath(_output_dir, 'doclist.conf')
f = open(list_filename, 'w')
f.write('{\nD:{\n')
f.write('prefix: "%s",\n' % prefix)
f.write('baseURL: "%s",\n' % baseURL)
f.write('topURL: "%s",\n' % topURL)
f.write('mode: "%s",\n' % mode)
f.write('htmlOutDir: "%s",\n' % html_output_dir.replace('\\', '/'))
f.write('exportsFile: "%s",\n' % exports_file.replace('\\', '/'))
f.write('endMarker: ""\n')
f.write('},\n')
f.write('_: [\n')
for filename in js_files:
f.write('"%s",\n' % filename.replace('\\', '/'))
f.write(']\n}\n')
f.close()
files_dir = MakePath(_third_party_dir, 'jsdoctoolkit', 'files')
Execute([
_java_exe,
'-Djsdoc.dir=%s' % files_dir,
'-jar',
MakePath(files_dir, 'jsrun.jar'),
MakePath(files_dir, 'app', 'run.js'),
'-v',
'-t=%s' % MakePath('jsdoc-toolkit-templates'),
'-d=' + ezt_output_dir,
'-c=' + list_filename])
def DeleteOldDocs(docs_js_outpath):
try:
shutil.rmtree(docs_js_outpath);
except:
pass
def BuildJavaScriptForDocsFromIDLs(idl_files, output_dir):
RunNixysa(idl_files, 'jsheader', output_dir,
['--properties-equal-undefined', '--overloaded-function-docs'])
def BuildJavaScriptForExternsFromIDLs(idl_files, output_dir):
if (os.path.exists(output_dir)):
for filename in glob.glob(os.path.join(output_dir, '*.js')):
os.unlink(filename)
RunNixysa(idl_files, 'jsheader', output_dir, ['--no-return-docs'])
def BuildO3DDocsFromJavaScript(js_files, ezt_output_dir, html_output_dir):
RunJSDocToolkit(js_files, ezt_output_dir, html_output_dir,
'classo3d_1_1_', 'o3d', '', '', '')
def BuildO3DClassHierarchy(html_output_dir):
# TODO(gman): We need to make mutliple graphs. One for Params, one for
# ParamMatrix4, one for RenderNode, one for everythng else.
dot_path = MakePath(_third_party_dir, 'graphviz', 'files', 'bin',
MakeCommandName('dot'))
if os.path.exists(dot_path):
Execute([
dot_path,
'-Tcmapx', '-o' + MakePath(html_output_dir, 'class_hierarchy.map'),
'-Tpng', '-o' + MakePath(html_output_dir, 'class_hierarchy.png'),
MakePath(html_output_dir, 'class_hierarchy.dot')])
def BuildO3DJSDocs(js_files, ezt_output_dir, html_output_dir, exports_file):
# The backslashes below on 'jsdocs/' and '../' must stay.
RunJSDocToolkit(js_files, ezt_output_dir, html_output_dir, 'js_1_0_', 'o3djs',
'jsdocs/', '../', exports_file)
def BuildO3DExternsFile(js_files_dir, extra_externs_file, externs_file):
outfile = open(externs_file, 'w')
filenames = (glob.glob(os.path.join(js_files_dir, '*.js')) +
[extra_externs_file])
for filename in filenames:
print "-----", filename
infile = open(filename, 'r')
lines = infile.readlines()
infile.close()
filtered = []
skipping = False
# strip out @o3dparameter stuff
for line in lines:
if skipping:
if line.startswith(' * @') or line.startswith(' */'):
skipping = False
if not skipping:
if line.startswith(' * @o3dparameter'):
skipping = True
if not skipping:
filtered.append(line)
outfile.write(''.join(filtered))
outfile.close()
def BuildCompiledO3DJS(o3djs_files,
externs_path,
o3d_externs_js_path,
compiled_o3djs_outpath):
Execute([
_java_exe,
'-jar',
MakePath('..', '..', 'o3d-internal', 'jscomp', 'JSCompiler_deploy.jar'),
'--property_renaming', 'OFF',
'--variable_renaming', 'LOCAL',
'--jscomp_error=visibility',
'--jscomp_error=accessControls',
'--strict',
'--externs=%s' % externs_path,
('--externs=%s' % o3d_externs_js_path),
('--js_output_file=%s' % compiled_o3djs_outpath)] +
['-js=%s' % (x, ) for x in o3djs_files]);
# strip out goog.exportSymbol and move o3djs.require to end
file = open(compiled_o3djs_outpath, 'r')
contents = file.read()
file.close()
contents = re.sub(r'goog.exportSymbol\([^\)]*\);', '', contents)
requires = set(re.findall(r'o3djs.require\([^\)]*\);', contents))
contents = re.sub(r'o3djs.require\([^\)]*\);', '', contents)
file = open(compiled_o3djs_outpath, 'w')
file.write(_js_copyright)
file.write(contents)
file.write('\n')
file.write('\n'.join(requires))
file.close()
def CopyStaticFiles(o3d_docs_ezt_outpath, o3d_docs_html_outpath):
files = ['stylesheet.css',
'prettify.css',
'prettify.js',
'tabs.css',
'tab_l.gif',
'tab_r.gif',
'tab_b.gif']
for file in files:
shutil.copyfile(MakePath('jsdoc-toolkit-templates', 'static', file),
MakePath(os.path.join(o3d_docs_ezt_outpath, file)))
shutil.copyfile(MakePath('jsdoc-toolkit-templates', 'static', file),
MakePath(os.path.join(o3d_docs_html_outpath, file)))
def main(argv):
"""Builds the O3D API docs and externs and the o3djs docs."""
global _java_exe
_java_exe = argv[0]
global _third_party_dir
_third_party_dir = argv[1]
global _o3d_third_party_dir
_o3d_third_party_dir = os.path.normpath(
os.path.join(os.path.dirname(__file__), '..', 'third_party'))
# Fix up the python path of subprocesses by setting PYTHONPATH.
pythonpath = os.pathsep.join([MakePath(_o3d_third_party_dir, 'gflags', 'python'),
MakePath(_o3d_third_party_dir, 'ply')])
orig_pythonpath = os.environ.get('PYTHONPATH')
if orig_pythonpath:
pythonpath = os.pathsep.join([pythonpath, orig_pythonpath])
os.environ['PYTHONPATH'] = pythonpath
js_list_filename = MakePath('..', 'samples', 'o3djs', 'js_list.manifest')
idl_list_filename = MakePath('..', 'plugin', 'idl_list.manifest')
js_list_basepath = os.path.dirname(js_list_filename)
idl_list_basepath = os.path.dirname(idl_list_filename)
global _output_dir
_output_dir = argv[2]
docs_outpath = os.path.join(_output_dir, 'documentation')
docs_js_outpath = MakePath(docs_outpath, 'apijs')
externs_js_outpath = MakePath(_output_dir, 'externs')
o3d_docs_ezt_outpath = MakePath(docs_outpath, 'reference')
o3d_docs_html_outpath = MakePath(docs_outpath, 'local_html')
o3djs_docs_ezt_outpath = MakePath(docs_outpath, 'reference', 'jsdocs')
o3djs_docs_html_outpath = MakePath(docs_outpath, 'local_html', 'jsdocs')
o3d_externs_path = MakePath(_output_dir, 'o3d-externs.js')
o3djs_exports_path = MakePath(_output_dir, 'o3d-exports.js')
compiled_o3djs_outpath = MakePath(docs_outpath, 'base.js')
externs_path = MakePath('externs', 'externs.js')
o3d_extra_externs_path = MakePath('externs', 'o3d-extra-externs.js')
js_list = eval(open(js_list_filename, "r").read())
idl_list = eval(open(idl_list_filename, "r").read())
idl_files = AppendBasePath(idl_list_basepath, idl_list)
o3djs_files = AppendBasePath(js_list_basepath, js_list)
# we need to put base.js first?
o3djs_files = (
filter(lambda x: x.endswith('base.js'), o3djs_files) +
filter(lambda x: not x.endswith('base.js'), o3djs_files))
docs_js_files = [os.path.join(
docs_js_outpath,
os.path.splitext(os.path.basename(f))[0] + '.js')
for f in idl_list]
try:
DeleteOldDocs(MakePath(docs_outpath))
BuildJavaScriptForDocsFromIDLs(idl_files, docs_js_outpath)
BuildO3DDocsFromJavaScript([o3d_extra_externs_path] + docs_js_files,
o3d_docs_ezt_outpath, o3d_docs_html_outpath)
BuildO3DClassHierarchy(o3d_docs_html_outpath)
BuildJavaScriptForExternsFromIDLs(idl_files, externs_js_outpath)
BuildO3DExternsFile(externs_js_outpath,
o3d_extra_externs_path,
o3d_externs_path)
BuildO3DJSDocs(o3djs_files + [o3d_externs_path], o3djs_docs_ezt_outpath,
o3djs_docs_html_outpath, o3djs_exports_path)
CopyStaticFiles(o3d_docs_ezt_outpath, o3d_docs_html_outpath)
BuildCompiledO3DJS(o3djs_files + [o3djs_exports_path],
externs_path,
o3d_externs_path,
compiled_o3djs_outpath)
except Exception:
if os.path.exists(compiled_o3djs_outpath):
os.unlink(compiled_o3djs_outpath)
raise
if __name__ == '__main__':
main(sys.argv[1:])
| 1.476563
| 1
|
setup.py
|
scikit-hep/mplhep_data
| 0
|
12779760
|
<gh_stars>0
#!/usr/bin/env python
# Copyright (c) 2021, <NAME>
#
# Distributed under the 3-clause BSD license, see accompanying file LICENSE
# or https://github.com/Scikit-HEP/mplhep_data for details.
from setuptools import setup
setup()
| 0.886719
| 1
|
LeetCode/Trie/Design Search Autocomplete System.py
|
UtkarshPathrabe/Competitive-Coding
| 13
|
12779761
|
class TrieNode:
def __init__(self, string = '', times = 0):
self.string = string
self.times = times
self.children = defaultdict(TrieNode)
class AutocompleteSystem:
def __init__(self, sentences: List[str], times: List[int]):
self.root = TrieNode()
self.currentString = ''
for sentence, frequency in zip(sentences, times):
self.__insert(self.root, sentence, frequency)
def __insert(self, currentNode, sentence, frequency):
for char in sentence:
currentNode = currentNode.children.setdefault(char, TrieNode(char))
currentNode.times += frequency
def __lookup(self, currentNode, string):
for char in string:
if char not in currentNode.children:
return []
currentNode = currentNode.children[char]
matchingStrings = []
self.__traverse(currentNode, string, matchingStrings)
return matchingStrings
def __traverse(self, currentNode, string, matchingStrings):
if currentNode.times > 0:
matchingStrings.append((string, currentNode.times))
for key in currentNode.children:
self.__traverse(currentNode.children[key], string + key, matchingStrings)
def input(self, c: str) -> List[str]:
if c == '#':
self.__insert(self.root, self.currentString, 1)
self.currentString = ''
else:
self.currentString += c
listOfTuple = self.__lookup(self.root, self.currentString)
listOfTuple.sort(key = lambda x : (-1 * x[1], x[0]))
result = []
numberOfResults = 0
for Tuple in listOfTuple:
result.append(Tuple[0])
numberOfResults += 1
if numberOfResults >= 3:
break
return result
# Your AutocompleteSystem object will be instantiated and called as such:
# obj = AutocompleteSystem(sentences, times)
# param_1 = obj.input(c)
| 3.34375
| 3
|
adv/d_malora.py
|
smashwidget/dl-1
| 0
|
12779762
|
import adv.adv_test
from adv import *
from slot.a import *
def module():
return D_Malora
class D_Malora(Adv):
a1 = ('od',0.13)
conf = {}
conf['slot.a'] = KFM()+FitF()
conf['acl'] = """
`s1
`s2, seq=4
"""
def prerun(this):
if this.condition('buff all team'):
this.s1debuff = Debuff('s1',0.15,15)
else:
this.s1debuff = False
def s1_proc(this, e):
if this.s1debuff:
this.s1debuff.on()
this.dmg_make('s1',4.67,'s')
this.hits += 1
def s2_proc(this, e):
if this.mod('def')!= 1:
this.dmg_make('o_s2_boost',4.32*3*0.8)
if __name__ == '__main__':
conf = {}
adv.adv_test.test(module(), conf, verbose=-2)
| 1.945313
| 2
|
app/search/ui/views.py
|
ExiledNarwal28/glo-2005-project
| 0
|
12779763
|
<filename>app/search/ui/views.py<gh_stars>0
from flask import render_template, redirect, url_for, request, Blueprint
from app.search.forms import GeneralSearchForm
search_blueprint = Blueprint('search', __name__)
@search_blueprint.route('/', methods=('GET', 'POST'))
def search():
form = GeneralSearchForm(request.form)
if request.method == 'POST' and form.validate_on_submit():
search_route = request.form.get('search_route')
return redirect(url_for(search_route), 307)
return render_template('index.html', form=form), 200
| 2.203125
| 2
|
python/test/arrays/read_l2.py
|
mdvx/TimeBase
| 0
|
12779764
|
<gh_stars>0
import dxapi
import time
import sys
from datetime import datetime
def readstream():
# Create timebase connection
db = dxapi.TickDb.createFromUrl("dxtick://localhost:8023")
try:
# Open in read-write mode
db.open(True)
# Get the data stream
stream = db.getStream("l2")
# Create cursor using defined "ALL" subscription
cursor = stream.select(0, dxapi.SelectionOptions(), None, None)
# Iterate first 100 messages in available in cursor
for num in range(0,100):
if cursor.next():
message = cursor.getMessage()
print(toTimeString(message.timestamp) + ": " + str(message))
if message.typeName == 'deltix.timebase.api.messages.L2Message':
for action in message.actions:
print('Action: ' + str(action))
# cursor should be closed after use
cursor.close()
finally:
# database connection should be closed
db.close()
def toTimeString(timestamp):
# Message timestamp is Epoch time in nanoseconds
seconds = int(timestamp/1000000000)
nanoseconds = int(timestamp % 1000000000)
time = ""
while nanoseconds > 0:
r = nanoseconds % 1000
if r > 0:
time = str(r) + time
nanoseconds = int(nanoseconds/1000)
return str(datetime.utcfromtimestamp(seconds)) + ("." if len(time) > 0 else "") + time
readstream()
| 2.453125
| 2
|
solutions/130/130-yongjoonseo.py
|
iknoom/LeetCode-Solutions
| 4
|
12779765
|
<filename>solutions/130/130-yongjoonseo.py
from collections import deque
class Solution:
def BFS(self, board, sy, sx, visited, n, m):
q = deque([(sy, sx)])
visited[sy][sx] = 1
candis = [(sy, sx)]
dy = [0, 1, 0, -1]
dx = [1, 0, -1, 0]
out = False
while q:
y, x = q.popleft()
for i in range(4):
ny, nx = y + dy[i], x + dx[i]
if 0 <= ny < n and 0 <= nx < m:
if not visited[ny][nx] and board[ny][nx] == 'O':
visited[ny][nx] = 1
candis.append((ny, nx))
q.append((ny, nx))
else:
if not out: out = True
if out: return
for cy, cx in candis:
board[cy][cx] = 'X'
def solve(self, board: List[List[str]]) -> None:
"""
Do not return anything, modify board in-place instead.
"""
if not board: return
n, m = len(board), len(board[0])
visited = [[0] * m for i in range(n)]
for i in range(n):
for j in range(m):
if board[i][j] == 'O' and not visited[i][j]:
self.BFS(board, i, j, visited, n, m)
| 3.21875
| 3
|
modules/flow0d/cardiovascular0D_coronary.py
|
marchirschvogel/amb
| 0
|
12779766
|
#!/usr/bin/env python3
# Copyright (c) 2019-2021, Dr.-Ing. <NAME>
# All rights reserved.
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import sys
import sympy as sp
# coronary model with a 3-element Windkessel (ZCR, proximal part) in series with a 2-element Windkessel (CR, distal part)
# according to Vieira et al. (2018) "Patient-specific modeling of right coronary circulation vulnerability post-liver transplant in Alagille’s syndrome", PLoS ONE 13(11), e0205829
# here their R_e is Z_corp_sys, C_e is C_corp_sys, R_p is R_corp_sys, C_i is C_cord_sys, and R_d is R_cord_sys
# the distal compliance is fed by the left ventricular pressure in order to have a phase-dependent tone of the coronary
# (coronaries almost entirely fill in diastole, not during systole)
#\begin{align}
#&C_{\mathrm{cor,p}}^{\mathrm{sys}} \left(\frac{\mathrm{d}p_{\mathrm{ar}}^{\mathrm{sys}}}{\mathrm{d}t}-Z_{\mathrm{cor,p}}^{\mathrm{sys}}\frac{\mathrm{d}q_{\mathrm{cor,p,in}}^{\mathrm{sys}}}{\mathrm{d}t}\right) = q_{\mathrm{cor,p,in}}^{\mathrm{sys}} - q_{\mathrm{cor,p}}^{\mathrm{sys}}\\
#&R_{\mathrm{cor,p}}^{\mathrm{sys}}\,q_{\mathrm{cor,p}}^{\mathrm{sys}}=p_{\mathrm{ar}}^{\mathrm{sys}}-p_{\mathrm{cor,d}}^{\mathrm{sys}} - Z_{\mathrm{cor,p}}^{\mathrm{sys}}\,q_{\mathrm{cor,p,in}}^{\mathrm{sys}}\\
#&C_{\mathrm{cor,d}}^{\mathrm{sys}} \frac{\mathrm{d}(p_{\mathrm{cor,d}}^{\mathrm{sys}}-p_{\mathrm{v}}^{\ell})}{\mathrm{d}t} = q_{\mathrm{cor,p}}^{\mathrm{sys}} - q_{\mathrm{cor,d}}^{\mathrm{sys}}\\
#&R_{\mathrm{cor,d}}^{\mathrm{sys}}\,q_{\mathrm{cor,d}}^{\mathrm{sys}}=p_{\mathrm{cor,d}}^{\mathrm{sys}}-p_{\mathrm{at}}^{r}
#\end{align}
class coronary_circ_ZCRp_CRd():
def __init__(self, params, varmap, auxmap, vs):
self.Z_corp_sys = params['Z_corp_sys']
self.C_corp_sys = params['C_corp_sys']
self.R_corp_sys = params['R_corp_sys']
self.C_cord_sys = params['C_cord_sys']
self.R_cord_sys = params['R_cord_sys']
try: self.V_corp_sys_u = params['V_corp_sys_u']
except: self.V_corp_sys_u = 0
try: self.V_cord_sys_u = params['V_cord_sys_u']
except: self.V_cord_sys_u = 0
self.ndcor = 4
self.varmap = varmap
self.auxmap = auxmap
self.vs = vs
def equation_map(self, vindex, aindex, x_, a_, df_, f_, p_ar_, p_v_, p_at_):
self.varmap['q_corp_sys_in'] = vindex
self.varmap['q_corp_sys'] = vindex+1
self.varmap['p_cord_sys'] = vindex+2
self.varmap['q_ven'+str(self.vs+1)+'_sys'] = vindex+3
q_corp_sys_in_ = sp.Symbol('q_corp_sys_in_')
q_corp_sys_ = sp.Symbol('q_corp_sys_')
p_cord_sys_ = sp.Symbol('p_cord_sys_')
q_cord_sys_ = sp.Symbol('q_ven'+str(self.vs+1)+'_sys_')
x_[self.varmap['q_corp_sys_in']] = q_corp_sys_in_
x_[self.varmap['q_corp_sys']] = q_corp_sys_
x_[self.varmap['p_cord_sys']] = p_cord_sys_
x_[self.varmap['q_ven'+str(self.vs+1)+'_sys']] = q_cord_sys_
# populate df_ and f_ arrays
df_[vindex] = self.C_corp_sys * (p_ar_[0] - self.Z_corp_sys * q_corp_sys_in_) # coronary proximal volume rate
df_[vindex+1] = 0.
df_[vindex+2] = self.C_cord_sys * (p_cord_sys_ - p_v_) # coronary distal volume rate
df_[vindex+3] = 0.
f_[vindex] = q_corp_sys_ - q_corp_sys_in_ # coronary proximal flow balance
f_[vindex+1] = (p_cord_sys_ - p_ar_[0] + self.Z_corp_sys * q_corp_sys_in_)/self.R_corp_sys + q_corp_sys_ # coronary proximal momentum
f_[vindex+2] = q_cord_sys_ - q_corp_sys_ # coronary distal flow balance
f_[vindex+3] = (p_at_ - p_cord_sys_)/self.R_cord_sys + q_cord_sys_ # coronary distal momentum
# auxiliary map and variables
self.auxmap['V_corp_sys'] = aindex
self.auxmap['V_cord_sys'] = aindex+1
a_[self.auxmap['V_corp_sys']] = self.C_corp_sys * (p_ar_[0] - self.Z_corp_sys * q_corp_sys_in_) + self.V_corp_sys_u
a_[self.auxmap['V_cord_sys']] = self.C_cord_sys * (p_cord_sys_ - p_v_) + self.V_cord_sys_u
# safety check that we don't hand in a zero symbol for p_v
if p_v_ is sp.S.Zero: raise ValueError("Zero symbol for left ventricular pressure!")
return [q_corp_sys_in_], q_cord_sys_
def initialize(self, var, iniparam):
try: var[self.varmap['q_corp_sys_in']] = iniparam['q_corp_sys_in_0']
except: var[self.varmap['q_corp_sys_in']] = iniparam['q_corp_sys_0']
var[self.varmap['q_corp_sys']] = iniparam['q_corp_sys_0']
var[self.varmap['p_cord_sys']] = iniparam['p_cord_sys_0']
try: var[self.varmap['q_ven'+str(self.vs+1)+'_sys']] = iniparam['q_ven'+str(self.vs+1)+'_sys_0']
except: var[self.varmap['q_ven'+str(self.vs+1)+'_sys']] = iniparam['q_ven_sys_0']
def print_to_screen(self, var_sq, aux):
print("Output of 0D coronary model (ZCRp_CRd):")
print('{:<10s}{:<3s}{:<7.3f}'.format('p_cord_sys',' = ',var_sq[self.varmap['p_cord_sys']]))
sys.stdout.flush()
# equivalent model to ZCRp_CRd, but individually for left and right coronary arteries
#\begin{align}
#&C_{\mathrm{cor,p}}^{\mathrm{sys},\ell} \left(\frac{\mathrm{d}p_{\mathrm{ar}}^{\mathrm{sys},\ell}}{\mathrm{d}t}-Z_{\mathrm{cor,p}}^{\mathrm{sys},\ell}\frac{\mathrm{d}q_{\mathrm{cor,p,in}}^{\mathrm{sys},\ell}}{\mathrm{d}t}\right) = q_{\mathrm{cor,p,in}}^{\mathrm{sys},\ell} - q_{\mathrm{cor,p}}^{\mathrm{sys},\ell}\\
#&R_{\mathrm{cor,p}}^{\mathrm{sys},\ell}\,q_{\mathrm{cor,p}}^{\mathrm{sys},\ell}=p_{\mathrm{ar}}^{\mathrm{sys}}-p_{\mathrm{cor,d}}^{\mathrm{sys},\ell} - Z_{\mathrm{cor,p}}^{\mathrm{sys},\ell}\,q_{\mathrm{cor,p,in}}^{\mathrm{sys},\ell}\\
#&C_{\mathrm{cor,d}}^{\mathrm{sys},\ell} \frac{\mathrm{d}(p_{\mathrm{cor,d}}^{\mathrm{sys},\ell}-p_{\mathrm{v}}^{\ell})}{\mathrm{d}t} = q_{\mathrm{cor,p}}^{\mathrm{sys},\ell} - q_{\mathrm{cor,d}}^{\mathrm{sys},\ell}\\
#&R_{\mathrm{cor,d}}^{\mathrm{sys},\ell}\,q_{\mathrm{cor,d}}^{\mathrm{sys},\ell}=p_{\mathrm{cor,d}}^{\mathrm{sys},\ell}-p_{\mathrm{at}}^{r}\\
#&C_{\mathrm{cor,p}}^{\mathrm{sys},r} \left(\frac{\mathrm{d}p_{\mathrm{ar}}^{\mathrm{sys},r}}{\mathrm{d}t}-Z_{\mathrm{cor,p}}^{\mathrm{sys},r}\frac{\mathrm{d}q_{\mathrm{cor,p,in}}^{\mathrm{sys},r}}{\mathrm{d}t}\right) = q_{\mathrm{cor,p,in}}^{\mathrm{sys},r} - q_{\mathrm{cor,p}}^{\mathrm{sys},r}\\
#&R_{\mathrm{cor,p}}^{\mathrm{sys},r}\,q_{\mathrm{cor,p}}^{\mathrm{sys},r}=p_{\mathrm{ar}}^{\mathrm{sys}}-p_{\mathrm{cor,d}}^{\mathrm{sys},r} - Z_{\mathrm{cor,p}}^{\mathrm{sys},r}\,q_{\mathrm{cor,p,in}}^{\mathrm{sys},r}\\
#&C_{\mathrm{cor,d}}^{\mathrm{sys},r} \frac{\mathrm{d}(p_{\mathrm{cor,d}}^{\mathrm{sys},r}-p_{\mathrm{v}}^{\ell})}{\mathrm{d}t} = q_{\mathrm{cor,p}}^{\mathrm{sys},r} - q_{\mathrm{cor,d}}^{\mathrm{sys},r}\\
#&R_{\mathrm{cor,d}}^{\mathrm{sys},r}\,q_{\mathrm{cor,d}}^{\mathrm{sys},r}=p_{\mathrm{cor,d}}^{\mathrm{sys},r}-p_{\mathrm{at}}^{r}\\
#&0=q_{\mathrm{cor,d}}^{\mathrm{sys},\ell}+q_{\mathrm{cor,d}}^{\mathrm{sys},r}-q_{\mathrm{cor,d,out}}^{\mathrm{sys}}
#\end{align}
class coronary_circ_ZCRp_CRd_lr():
def __init__(self, params, varmap, auxmap, vs):
self.Z_corp_sys_l = params['Z_corp_sys_l']
self.C_corp_sys_l = params['C_corp_sys_l']
self.R_corp_sys_l = params['R_corp_sys_l']
self.C_cord_sys_l = params['C_cord_sys_l']
self.R_cord_sys_l = params['R_cord_sys_l']
self.Z_corp_sys_r = params['Z_corp_sys_r']
self.C_corp_sys_r = params['C_corp_sys_r']
self.R_corp_sys_r = params['R_corp_sys_r']
self.C_cord_sys_r = params['C_cord_sys_r']
self.R_cord_sys_r = params['R_cord_sys_r']
try: self.V_corp_sys_l_u = params['V_corp_sys_l_u']
except: self.V_corp_sys_l_u = 0
try: self.V_cord_sys_l_u = params['V_cord_sys_l_u']
except: self.V_cord_sys_l_u = 0
try: self.V_corp_sys_r_u = params['V_corp_sys_r_u']
except: self.V_corp_sys_r_u = 0
try: self.V_cord_sys_r_u = params['V_cord_sys_r_u']
except: self.V_cord_sys_r_u = 0
self.ndcor = 9
self.varmap = varmap
self.auxmap = auxmap
self.vs = vs
def equation_map(self, vindex, aindex, x_, a_, df_, f_, p_ar_, p_v_, p_at_):
self.varmap['q_corp_sys_l_in'] = vindex
self.varmap['q_corp_sys_l'] = vindex+1
self.varmap['p_cord_sys_l'] = vindex+2
self.varmap['q_cord_sys_l'] = vindex+3
self.varmap['q_corp_sys_r_in'] = vindex+4
self.varmap['q_corp_sys_r'] = vindex+5
self.varmap['p_cord_sys_r'] = vindex+6
self.varmap['q_cord_sys_r'] = vindex+7
self.varmap['q_ven'+str(self.vs+1)+'_sys'] = vindex+8
q_corp_sys_l_in_ = sp.Symbol('q_corp_sys_l_in_')
q_corp_sys_l_ = sp.Symbol('q_corp_sys_l_')
p_cord_sys_l_ = sp.Symbol('p_cord_sys_l_')
q_cord_sys_l_ = sp.Symbol('q_cord_sys_l_')
q_corp_sys_r_in_ = sp.Symbol('q_corp_sys_r_in_')
q_corp_sys_r_ = sp.Symbol('q_corp_sys_r_')
p_cord_sys_r_ = sp.Symbol('p_cord_sys_r_')
q_cord_sys_r_ = sp.Symbol('q_cord_sys_r_')
q_cord_sys_out_ = sp.Symbol('q_ven'+str(self.vs+1)+'_sys_')
x_[self.varmap['q_corp_sys_l_in']] = q_corp_sys_l_in_
x_[self.varmap['q_corp_sys_l']] = q_corp_sys_l_
x_[self.varmap['p_cord_sys_l']] = p_cord_sys_l_
x_[self.varmap['q_cord_sys_l']] = q_cord_sys_l_
x_[self.varmap['q_corp_sys_r_in']] = q_corp_sys_r_in_
x_[self.varmap['q_corp_sys_r']] = q_corp_sys_r_
x_[self.varmap['p_cord_sys_r']] = p_cord_sys_r_
x_[self.varmap['q_cord_sys_r']] = q_cord_sys_r_
x_[self.varmap['q_ven'+str(self.vs+1)+'_sys']] = q_cord_sys_out_
# populate df_ and f_ arrays
df_[vindex] = self.C_corp_sys_l * (p_ar_[0] - self.Z_corp_sys_l * q_corp_sys_l_in_) # left coronary proximal volume rate
df_[vindex+1] = 0.
df_[vindex+2] = self.C_cord_sys_l * (p_cord_sys_l_ - p_v_) # left coronary distal volume rate
df_[vindex+3] = 0.
df_[vindex+4] = self.C_corp_sys_r * (p_ar_[1] - self.Z_corp_sys_r * q_corp_sys_r_in_) # right coronary proximal volume rate
df_[vindex+5] = 0.
df_[vindex+6] = self.C_cord_sys_r * (p_cord_sys_r_ - p_v_) # right coronary distal volume rate
df_[vindex+7] = 0.
df_[vindex+8] = 0.
f_[vindex] = q_corp_sys_l_ - q_corp_sys_l_in_ # left coronary proximal flow balance
f_[vindex+1] = (p_cord_sys_l_ - p_ar_[0] + self.Z_corp_sys_l * q_corp_sys_l_in_)/self.R_corp_sys_l + q_corp_sys_l_ # left coronary proximal momentum
f_[vindex+2] = q_cord_sys_l_ - q_corp_sys_l_ # left coronary distal flow balance
f_[vindex+3] = (p_at_ - p_cord_sys_l_)/self.R_cord_sys_l + q_cord_sys_l_ # left coronary distal momentum
f_[vindex+4] = q_corp_sys_r_ - q_corp_sys_r_in_ # right coronary proximal flow balance
f_[vindex+5] = (p_cord_sys_r_ - p_ar_[1] + self.Z_corp_sys_r * q_corp_sys_r_in_)/self.R_corp_sys_r + q_corp_sys_r_ # right coronary proximal momentum
f_[vindex+6] = q_cord_sys_r_ - q_corp_sys_r_ # right coronary distal flow balance
f_[vindex+7] = (p_at_ - p_cord_sys_r_)/self.R_cord_sys_r + q_cord_sys_r_ # right coronary distal momentum
f_[vindex+8] = q_cord_sys_out_ - q_cord_sys_l_ - q_cord_sys_r_ # coronary sinus flow balance
# auxiliary map and variables
self.auxmap['V_corp_sys_l'] = aindex
self.auxmap['V_cord_sys_l'] = aindex+1
self.auxmap['V_corp_sys_r'] = aindex+2
self.auxmap['V_cord_sys_r'] = aindex+3
a_[self.auxmap['V_corp_sys_l']] = self.C_corp_sys_l * (p_ar_[0] - self.Z_corp_sys_l * q_corp_sys_l_in_) + self.V_corp_sys_l_u
a_[self.auxmap['V_cord_sys_l']] = self.C_cord_sys_l * (p_cord_sys_l_ - p_v_) + self.V_cord_sys_l_u
a_[self.auxmap['V_corp_sys_r']] = self.C_corp_sys_r * (p_ar_[1] - self.Z_corp_sys_r * q_corp_sys_r_in_) + self.V_corp_sys_r_u
a_[self.auxmap['V_cord_sys_r']] = self.C_cord_sys_r * (p_cord_sys_r_ - p_v_) + self.V_cord_sys_r_u
# safety check that we don't hand in a zero symbol for p_v
if p_v_ is sp.S.Zero: raise ValueError("Zero symbol for left ventricular pressure!")
return [q_corp_sys_l_in_,q_corp_sys_r_in_], q_cord_sys_out_
def initialize(self, var, iniparam):
try: var[self.varmap['q_corp_sys_l_in']] = iniparam['q_corp_sys_l_in_0']
except: var[self.varmap['q_corp_sys_l_in']] = iniparam['q_corp_sys_l_0']
var[self.varmap['q_corp_sys_l']] = iniparam['q_corp_sys_l_0']
var[self.varmap['p_cord_sys_l']] = iniparam['p_cord_sys_l_0']
var[self.varmap['q_cord_sys_l']] = iniparam['q_cord_sys_l_0']
try: var[self.varmap['q_corp_sys_r_in']] = iniparam['q_corp_sys_r_in_0']
except: var[self.varmap['q_corp_sys_r_in']] = iniparam['q_corp_sys_r_0']
var[self.varmap['q_corp_sys_r']] = iniparam['q_corp_sys_r_0']
var[self.varmap['p_cord_sys_r']] = iniparam['p_cord_sys_r_0']
var[self.varmap['q_cord_sys_r']] = iniparam['q_cord_sys_r_0']
try: var[self.varmap['q_ven'+str(self.vs+1)+'_sys']] = iniparam['q_ven'+str(self.vs+1)+'_sys_0']
except: var[self.varmap['q_ven'+str(self.vs+1)+'_sys']] = iniparam['q_ven_sys_0']
def print_to_screen(self, var_sq, aux):
print("Output of 0D coronary model (ZCRp_CRd_lr):")
print('{:<12s}{:<3s}{:<7.3f}{:<3s}{:<12s}{:<3s}{:<7.3f}'.format('p_cord_sys_l',' = ',var_sq[self.varmap['p_cord_sys_l']],' ','p_cord_sys_r',' = ',var_sq[self.varmap['p_cord_sys_r']]))
sys.stdout.flush()
| 2.21875
| 2
|
APPRoot/loveword/migrations/0022_auto_20210217_1455.py
|
1633743096/-
| 9
|
12779767
|
<reponame>1633743096/-
# Generated by Django 2.2.1 on 2021-02-17 06:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('loveword', '0021_quotation'),
]
operations = [
migrations.AlterField(
model_name='quotation',
name='content',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='语录内容'),
),
]
| 1.460938
| 1
|
sentenceEmbedder.py
|
varghesepaul-cp/NLP_Sentence_Similarity
| 0
|
12779768
|
import genism
from genism.models.doc2vec import Doc2Vec , TaggedDocument
from sklearn.metrics.pairwise import cosine_similarity
f= open('dataset.txt','r')
print(f.read)
corpus = [
"This is first Sentence",
"This is second Sentence",
"This is third Sentence",
"This is fourth Sentence",
"This is fifth Sentence",
]
documents = [TaggedDocument(doc,[i]) for i, doc in enumerate(corpus)]
model = Doc2Vec(documents , vector_size = 10 , window = 2 , min_count = 1 , workers =4)
model.save('sentenceEmbedderModel.pkl')
print('Model Creation Successful.' + repr(model))
vector = model.infer_vector(['this is not a sentence'])
vector_2 = model.infer_vector(['this is not a first sentence'])
vector_3 = model.infer_vector(['this is not a sentence'])
print("vector is " + repr(vector))
print("1 vs 2 " + repr(cosine_similarity([vector],[vector_2])))
print("1 vs 3 " + repr(cosine_similarity([vector],[vector_3])))
| 2.59375
| 3
|
Pillow-4.3.0/Tests/test_file_bmp.py
|
leorzz/simplemooc
| 0
|
12779769
|
from helper import unittest, PillowTestCase, hopper
from PIL import Image, BmpImagePlugin
import io
class TestFileBmp(PillowTestCase):
def roundtrip(self, im):
outfile = self.tempfile("temp.bmp")
im.save(outfile, 'BMP')
reloaded = Image.open(outfile)
reloaded.load()
self.assertEqual(im.mode, reloaded.mode)
self.assertEqual(im.size, reloaded.size)
self.assertEqual(reloaded.format, "BMP")
def test_sanity(self):
self.roundtrip(hopper())
self.roundtrip(hopper("1"))
self.roundtrip(hopper("L"))
self.roundtrip(hopper("P"))
self.roundtrip(hopper("RGB"))
def test_invalid_file(self):
with open("Tests/images/flower.jpg", "rb") as fp:
self.assertRaises(SyntaxError,
BmpImagePlugin.BmpImageFile, fp)
def test_save_to_bytes(self):
output = io.BytesIO()
im = hopper()
im.save(output, "BMP")
output.seek(0)
reloaded = Image.open(output)
self.assertEqual(im.mode, reloaded.mode)
self.assertEqual(im.size, reloaded.size)
self.assertEqual(reloaded.format, "BMP")
def test_dpi(self):
dpi = (72, 72)
output = io.BytesIO()
im = hopper()
im.save(output, "BMP", dpi=dpi)
output.seek(0)
reloaded = Image.open(output)
self.assertEqual(reloaded.info["dpi"], dpi)
def test_save_bmp_with_dpi(self):
# Test for #1301
# Arrange
outfile = self.tempfile("temp.jpg")
im = Image.open("Tests/images/hopper.bmp")
# Act
im.save(outfile, 'JPEG', dpi=im.info['dpi'])
# Assert
reloaded = Image.open(outfile)
reloaded.load()
self.assertEqual(im.info['dpi'], reloaded.info['dpi'])
self.assertEqual(im.size, reloaded.size)
self.assertEqual(reloaded.format, "JPEG")
def test_load_dib(self):
# test for #1293, Imagegrab returning Unsupported Bitfields Format
im = BmpImagePlugin.DibImageFile('Tests/images/clipboard.dib')
target = Image.open('Tests/images/clipboard_target.png')
self.assert_image_equal(im, target)
if __name__ == '__main__':
unittest.main()
| 2.3125
| 2
|
exp2/layers.py
|
raghakot/deep-learning-experiments
| 7
|
12779770
|
from keras.layers.convolutional import Convolution2D
from keras import backend as K
import tensorflow as tf
permutation = [[1, 0], [0, 0], [0, 1], [2, 0], [1, 1], [0, 2], [2, 1], [2, 2], [1, 2]]
def shift_rotate(w, shift=1):
shape = w.get_shape()
for i in range(shift):
w = tf.reshape(tf.gather_nd(w, permutation), shape)
return w
class Convolution2D_4(Convolution2D):
def call(self, x, mask=None):
w = self.W
w_rot = [w]
for i in range(3):
w = shift_rotate(w, shift=2)
w_rot.append(w)
outputs = tf.stack([K.conv2d(x, w_i, strides=self.subsample,
border_mode=self.border_mode,
dim_ordering=self.dim_ordering,
filter_shape=self.W_shape) for w_i in w_rot])
output = K.max(outputs, 0)
if self.bias:
if self.dim_ordering == 'th':
output += K.reshape(self.b, (1, self.nb_filter, 1, 1))
elif self.dim_ordering == 'tf':
output += K.reshape(self.b, (1, 1, 1, self.nb_filter))
else:
raise ValueError('Invalid dim_ordering:', self.dim_ordering)
output = self.activation(output)
return output
class Convolution2D_8(Convolution2D):
def call(self, x, mask=None):
w = self.W
w_rot = [w]
for i in range(7):
w = shift_rotate(w)
w_rot.append(w)
outputs = tf.stack([K.conv2d(x, w_i, strides=self.subsample,
border_mode=self.border_mode,
dim_ordering=self.dim_ordering,
filter_shape=self.W_shape) for w_i in w_rot])
output = K.max(outputs, 0)
if self.bias:
if self.dim_ordering == 'th':
output += K.reshape(self.b, (1, self.nb_filter, 1, 1))
elif self.dim_ordering == 'tf':
output += K.reshape(self.b, (1, 1, 1, self.nb_filter))
else:
raise ValueError('Invalid dim_ordering:', self.dim_ordering)
output = self.activation(output)
return output
| 2.71875
| 3
|
model-service/model_manager.py
|
leifan89/model-service
| 0
|
12779771
|
<filename>model-service/model_manager.py<gh_stars>0
from typing import Any
from typing import Dict
from .model.classifier import Classifier
class ModelManager:
def __init__(self, models: Dict[str, Classifier]):
self.models = models
def add_model(self, name: str, model: Classifier) -> None:
if name in self.models:
raise RuntimeError(f"Failed to add model with name {name}, already exists")
self.models[name] = model
def train_model(self, name: str, X, y) -> None:
if name in self.models:
self.models[name].train(X, y)
else:
raise RuntimeError(f"Model {name} is not found")
def classify_with_model(self, name: str, X) -> Any:
if name in self.models:
return self.models[name].classify(X)
else:
raise RuntimeError(f"Model {name} is not found")
def refresh_model(self, name: str) -> None:
if name in self.models:
self.models[name].refresh()
else:
raise RuntimeError(f"Model {name} is not found")
def shutdown(self) -> None:
for model in self.models.values():
model.shutdown()
| 2.828125
| 3
|
utils/flooder.py
|
bentettmar/kahoot-flooder
| 1
|
12779772
|
import kahoot
import threading
import utils
class Flooder:
def __init__(self, gamepin, botname, amount, delay, window):
self.gamepin = gamepin
self.botname = botname
self.amount = amount
self.delay = delay
self.window = window
self.suffix = 0
self.bot = kahoot.client()
def loop(self):
if self.suffix < int(self.amount):
self.suffix += 1
self.bot.join(int(self.gamepin), f"{self.botname} [{self.suffix}]")
self.bot.on("joined")
self.window.after(int(self.delay), self.loop())
def start(self):
notifier = utils.Notifier()
notifier.send("Kahoot Flooder", f"Starting flood with {self.amount} bots, GUI may hang.")
self.window.after(int(self.delay), self.loop())
| 2.953125
| 3
|
utils/json.py
|
visinf/mnvi
| 0
|
12779773
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json as jsn
import os
import sys
import unicodedata
from utils import six
def read_json(filename):
def _convert_from_unicode(data):
new_data = dict()
for name, value in six.iteritems(data):
if isinstance(name, six.string_types):
name = unicodedata.normalize('NFKD', name).encode(
'ascii', 'ignore')
if isinstance(value, six.string_types):
value = unicodedata.normalize('NFKD', value).encode(
'ascii', 'ignore')
if isinstance(value, dict):
value = _convert_from_unicode(value)
new_data[name] = value
return new_data
output_dict = None
with open(filename, "r") as f:
lines = f.readlines()
try:
output_dict = jsn.loads(''.join(lines), encoding='utf-8')
except:
raise ValueError('Could not read %s. %s' % (filename, sys.exc_info()[1]))
output_dict = _convert_from_unicode(output_dict)
return output_dict
def _replace_quotes(x):
return x.replace("\'", "\"")
def _parse_value(value):
if isinstance(value, tuple):
value = list(value)
if value is None:
return "null"
if isinstance(value, str):
if value.lower() == "none":
return "null"
if value.lower() == "false":
return "false"
if value.lower() == "true":
return "true"
value = value.replace("\'", "\"")
return "\"%s\"" % _replace_quotes(value)
if isinstance(value, bool):
return str(value).lower()
if isinstance(value, list):
result = "["
for i, item in enumerate(value):
result += _parse_value(item)
if i < len(value) - 1:
result += ", "
result += "]"
return result
if isinstance(value, dict):
result = "{"
item_iterator = six.itersorteditems(value)
for i, (dict_key, dict_value) in enumerate(item_iterator):
result += "\"%s\": %s" % (dict_key, _parse_value(dict_value))
if i < len(value) - 1:
result += ", "
result += "}"
return result
return "%s" % _replace_quotes(str(value))
# ----------------------------------------------------------------------------
# Writes all pairs to a filename for book keeping
# Either .txt or .json
# ----------------------------------------------------------------------------
def write_dictionary_to_file(input_dict, filename, sortkeys=False):
# ensure dir
d = os.path.dirname(filename)
if not os.path.exists(d):
os.makedirs(d)
item_iterator = six.itersorteditems(input_dict) if sortkeys else six.iteritems(input_dict)
# check for json extension
ext = os.path.splitext(filename)[1]
if ext == ".json":
with open(filename, 'w') as file:
file.write("{\n")
for i, (key, value) in enumerate(item_iterator):
file.write(" \"%s\": %s" % (key, _parse_value(value)))
if i < len(input_dict) - 1:
file.write(',\n')
else:
file.write('\n')
file.write("}\n")
else:
with open(filename, 'w') as file:
for key, value in item_iterator:
file.write('%s: %s\n' % (key, value))
| 2.65625
| 3
|
mkt/reviewers/tasks.py
|
oremj/zamboni
| 0
|
12779774
|
<reponame>oremj/zamboni<filename>mkt/reviewers/tasks.py
import datetime
import logging
from django.conf import settings
from celeryutils import task
from tower import ugettext as _
import amo
from amo.utils import send_mail_jinja
import mkt.constants.reviewers as rvw
log = logging.getLogger('z.task')
@task
def send_mail(cleaned_data, theme_lock):
"""
Send emails out for respective review actions taken on themes.
"""
theme = cleaned_data['theme']
action = cleaned_data['action']
reject_reason = cleaned_data['reject_reason']
reason = None
if reject_reason:
reason = rvw.THEME_REJECT_REASONS[reject_reason]
comment = cleaned_data['comment']
emails = set(theme.addon.authors.values_list('email', flat=True))
context = {
'theme': theme,
'base_url': settings.SITE_URL,
'reason': reason,
'comment': comment
}
subject = None
if action == rvw.ACTION_APPROVE:
subject = _('Thanks for submitting your Theme')
template = 'reviewers/themes/emails/approve.html'
theme.addon.update(status=amo.STATUS_PUBLIC)
elif action == rvw.ACTION_REJECT:
subject = _('A problem with your Theme submission')
template = 'reviewers/themes/emails/reject.html'
theme.addon.update(status=amo.STATUS_REJECTED)
reason = (rvw.THEME_REJECT_REASONS[reject_reason])
elif action == rvw.ACTION_DUPLICATE:
subject = _('A problem with your Theme submission')
template = 'reviewers/themes/emails/reject.html'
theme.addon.update(status=amo.STATUS_REJECTED)
reason = 'Duplicate'
elif action == rvw.ACTION_FLAG:
subject = _('Theme submission flagged for review')
template = 'reviewers/themes/emails/flag_reviewer.html'
emails = [settings.SENIOR_EDITORS_EMAIL]
theme.addon.update(status=amo.STATUS_REVIEW_PENDING)
# Send another email to the user notifying them that their Theme has
# been flagged.
send_mail_jinja(_('A problem with your Theme submission'),
'reviewers/themes/emails/flag_user.html', context,
recipient_list=emails,
headers={'Reply-To': theme_lock.reviewer.email})
elif action == rvw.ACTION_MOREINFO:
subject = _('A question about your Theme submission')
template = 'reviewers/themes/emails/moreinfo.html'
context['reviewer_email'] = theme_lock.reviewer.email
theme.addon.update(status=amo.STATUS_REVIEW_PENDING)
amo.log(amo.LOG.THEME_REVIEW, theme.addon, details={
'action': action,
'reject_reason': reject_reason,
'comment': comment}, user=theme_lock.reviewer)
log.info('Theme %s (%s) - %s' % (theme.addon.name, theme.id, action))
theme.approve = datetime.datetime.now()
theme.save()
send_mail_jinja(subject, template, context,
recipient_list=emails,
headers={'Reply-To': theme_lock.reviewer.email})
| 1.84375
| 2
|
47/test_password.py
|
alehpineda/bitesofpy
| 0
|
12779775
|
<gh_stars>0
from password import validate_password, used_passwords
def test_password_len():
assert not validate_password("<PASSWORD>")
assert not validate_password("<PASSWORD>")
def test_password_missing_chars():
assert not validate_password("UPPERCASE")
assert not validate_password("lowercase")
assert not validate_password("<PASSWORD>")
assert not validate_password("<PASSWORD>")
assert not validate_password("_password_")
assert not validate_password("@#$$)==1")
def test_password_only_one_letter():
assert not validate_password("@#$$)==1a")
def test_validate_password_good_pws():
assert validate_password("<PASSWORD>")
assert validate_password("<PASSWORD>")
assert validate_password("<PASSWORD>")
assert validate_password("<PASSWORD>")
def test_password_not_used_before():
assert not validate_password("<PASSWORD>")
assert not validate_password("<PASSWORD>")
def test_password_cache_cannot_reuse():
num_passwords_use = len(used_passwords)
assert validate_password("<PASSWORD>")
assert len(used_passwords) == num_passwords_use + 1
assert not validate_password("<PASSWORD>")
| 2.921875
| 3
|
tests/record/test_enums.py
|
Ellipsanime/openpype-shotgun
| 6
|
12779776
|
<reponame>Ellipsanime/openpype-shotgun<filename>tests/record/test_enums.py<gh_stars>1-10
import uuid
from assertpy import assert_that
from shotgrid_leecher.record.enums import QueryStringType
def test_query_string_type_from_unknown_type():
# Arrange
guid = uuid.uuid4()
# Act
actual = QueryStringType.from_param(str(type(guid)))
# Assert
assert_that(actual.value).is_equal_to(str)
def test_query_string_type_from_string():
# Arrange
# Act
actual = QueryStringType.from_param("StR")
# Assert
assert_that(actual.value).is_equal_to(str)
def test_query_string_type_from_float():
# Arrange
# Act
actual = QueryStringType.from_param("flOaT ")
# Assert
assert_that(actual.value).is_equal_to(float)
def test_query_string_type_from_int():
# Arrange
# Act
actual = QueryStringType.from_param(" InT")
# Assert
assert_that(actual.value).is_equal_to(int)
| 2.609375
| 3
|
day1/part1.py
|
cheddar-cheeze/advent-of-code-2018
| 0
|
12779777
|
<filename>day1/part1.py
#! /bin/python3
import requests
text = """
+6
-17
+16
+7
+12
+2
-7
-5
-4
-16
+2
+12
-16
-1
-12
-3
+8
-12
+8
-3
+18
-9
+1
-20
+15
-11
-18
-8
+18
-4
+10
+1
-2
+13
+12
+16
-6
+12
+2
+11
+5
+1
-14
+16
-20
-5
+20
+6
+13
+11
+3
-9
-15
+1
+19
+8
+19
-16
+19
-17
-17
+19
+2
+5
+16
+2
-4
+19
+6
-16
+15
+7
+5
-18
+19
-8
+9
+15
-3
-3
-15
+13
-1
+10
+13
+16
-4
+10
-12
+10
+17
-2
-17
+10
+17
+8
+7
+5
+17
+10
+14
-17
-13
+19
+6
-11
+10
+8
+1
-18
-5
-8
-19
+12
+12
-4
+8
-10
-9
+2
+11
-6
+23
+17
-10
+5
+9
+19
+4
-1
+6
+12
-16
+9
+6
-9
+4
-2
-14
-5
-9
+13
+10
-2
+17
+10
+13
-5
-12
+19
+10
+16
+13
-19
-17
+4
+12
+18
-14
+5
+17
-5
-4
+18
-5
-1
-18
+11
+12
+15
-9
+3
-7
+8
-16
+9
-13
-10
-11
+3
+2
+19
+21
+6
+6
+2
-5
-6
+13
-18
-15
-16
+8
-13
-14
+9
-3
-8
-11
+9
-15
-16
+4
+13
-18
+10
-3
+15
+8
-18
+13
-5
+18
+15
-20
+19
+26
+7
-4
+25
-8
-19
+13
-2
-8
+11
+22
-16
+9
-1
+20
+12
-9
+10
-8
+9
+19
+17
+10
+4
-11
+15
+5
+4
-11
+19
-7
-6
-9
-18
+2
-9
+16
-17
+21
+16
+7
+1
+11
-4
-20
+31
-1
+2
+17
-12
-12
+18
-16
+18
-6
+19
-5
+1
+11
-8
-1
-16
+15
+17
+4
+1
-8
+15
-3
+18
-9
-15
-16
-18
+7
-22
+21
-13
-19
+3
-25
+5
-8
-19
-18
-6
+12
-11
+19
+6
+19
+70
-12
-15
+3
+25
+1
+18
-12
+21
+14
+2
+6
-13
-12
+6
+3
+13
+2
+11
-9
+1
-13
+15
+15
+1
+10
-9
+5
+13
-17
+10
+3
+12
-7
-4
+6
+3
-16
+15
+13
-9
+12
+20
+10
+3
+14
+16
-1
-13
-9
+8
+2
+17
-13
+10
+1
+11
+20
+16
-14
+11
-17
-2
+20
+7
-24
-7
+14
-1
+5
+10
+26
-13
+30
+9
-6
+13
+31
+14
-1
+4
-24
-59
-10
+8
-20
-11
-14
-10
-6
-15
-3
-14
+10
+16
+2
-5
+4
-18
-13
-7
-11
-5
-7
-16
-15
-1
-10
+16
+8
-17
+12
-18
-5
-25
+17
-12
-10
-3
-10
+11
+4
-7
+19
+51
+4
+6
-5
-11
-2
+15
-6
-5
-29
-20
+6
+6
-4
-28
-13
-2
-15
-16
-64
+23
-36
-25
-21
-52
-118
-18
-28
+25
-6
+29
+26
+46
-69
-37
-75
-100
-463
-63213
+11
+13
-21
-4
+17
+29
-10
-16
+2
-16
+7
+11
-16
-9
-21
+6
-1
-8
-3
+7
-5
-18
+14
-10
+1
+7
+9
-19
+7
-18
-7
+8
-7
+10
+4
+7
+9
+18
+55
+21
+16
-13
-61
+11
-45
-35
-14
-8
-10
-12
+6
-7
+17
-2
-13
-20
+11
-13
+12
+13
-2
-8
+13
-23
-8
-9
+4
+2
+4
-12
-11
-11
-9
+11
+14
-18
-16
+19
-13
-14
+7
-6
-9
-16
-5
+15
+1
-8
+15
+15
+12
+10
+16
-18
+3
-17
-10
-16
-15
-16
+1
+4
+14
-10
-6
+12
-9
-8
-20
-7
+4
+6
-22
+10
-15
-13
+15
-12
+3
-14
-17
+18
-20
-19
+14
+2
-9
-16
+11
-19
-1
-19
-3
-12
-16
-4
-2
+7
-12
+14
+4
+14
+13
+1
+4
+10
+14
-16
-2
-19
-8
-8
-22
-23
+10
+1
-14
-17
-17
-6
+9
-16
+4
+11
-14
+11
-17
-9
-8
-17
-13
+17
-15
-8
-16
-3
-5
-8
-17
+2
+5
+2
-19
-1
-12
+1
+16
-8
-19
-18
-2
-15
+7
-11
-6
+12
-14
-18
+15
-18
-12
+10
+12
-17
-13
-18
+13
-11
+15
-9
+8
+11
-20
+17
+9
+18
-29
+3
-14
-14
+2
-12
-5
-19
-19
-16
-15
+13
-8
-3
-17
-4
-9
+17
-12
+17
-10
+22
+6
-9
+6
-5
+17
+17
-5
-16
-16
-13
-13
+5
+6
-24
-3
-16
-12
+7
+18
-4
+13
-8
+13
+8
+13
-18
-5
-14
+17
-11
+1
-21
-13
-10
-16
+4
+16
-18
+13
-19
-10
-2
+7
-23
+8
-3
-2
-36
-5
-6
+10
-19
+11
-19
+7
+17
+13
-6
+12
-3
+14
+11
+18
+7
+13
+4
+19
-1
+12
+1
-5
-6
-6
+1
+2
+7
-14
-3
-2
+33
+14
+25
+28
+8
+8
+11
+19
-6
+12
-16
-13
-9
+7
-2
+9
-19
-19
+12
-8
+26
-16
-16
+19
+18
+19
-18
+15
-10
-2
+8
-5
+1
-9
-25
+7
+16
+17
+12
+7
+3
+2
-16
+30
+4
+5
-13
-14
-16
+7
-12
+18
+6
+25
+24
+1
+9
+6
-3
-11
+20
-2
+18
+12
+18
-11
-2
+15
+8
-11
-16
-6
+20
+11
+11
+16
+2
+18
+12
-17
+12
+11
-9
+16
+8
-10
-7
+19
+2
+13
+12
-20
-25
+2
+9
-13
+19
-4
-10
-17
+7
-9
+10
+15
-13
-25
-1
-10
+13
-12
+7
-18
-7
-17
+9
-4
-17
-11
-7
-5
-1
-16
+14
+20
-13
+42
-25
-27
-39
+12
-41
+3
+19
-2
-23
-12
-48
-35
+6
-116
+16
+18
-2
+7
-11
-40
+41
-155
+5
+125
-238
-1
-63346
-9
-18
-3
-11
+13
-4
-3
-4
-3
+23
+11
+14
+12
+3
+7
+1
-18
+15
-7
-15
-1
-9
+14
-10
-22
-1
-24
-13
+6
-14
-7
-8
-7
-1
-5
+9
+6
+19
+13
+3
-19
-13
-15
-2
+15
+3
+24
+26
+72
-16
+14
+16
+17
+14
-4
-3
+10
+3
+11
+12
+15
+9
+12
+5
+11
-35
-23
+11
-10
-5
+3
-15
-15
+128514
"""
a = 0
for num in text.splitlines():
if '-' in num:
a = a - int(num.replace('-', ''))
if '+' in num:
a = a + int(num.replace('+', ''))
else:
pass
print("the value is {}".format(a,))
| 2.484375
| 2
|
mvc/__init__.py
|
PyXRD/pyxrd
| 27
|
12779778
|
<reponame>PyXRD/pyxrd
# coding=UTF-8
# ex:ts=4:sw=4:et=on
# -------------------------------------------------------------------------
# Copyright (C) 2014 by <NAME> <mathijs dot dumon at gmail dot com>
# Copyright (C) 2005 by <NAME> <<EMAIL>>
#
# mvc is a framework derived from the original pygtkmvc framework
# hosted at: <http://sourceforge.net/projects/pygtkmvc/>
#
# mvc is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# mvc is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110, USA.
# -------------------------------------------------------------------------
# FIXME clean this mess up
"""
Shortcuts are provided to the following classes defined in submodules:
.. class:: Model
:noindex:
.. class:: TreeStoreModel
:noindex:
.. class:: ListStoreModel
:noindex:
.. class:: TextBufferModel
:noindex:
.. class:: ModelMT
:noindex:
.. class:: Controller
:noindex:
.. class:: View
:noindex:
.. class:: Observer
:noindex:
.. class:: Observable
:noindex:
"""
from .support.version import LooseVersion
from .__version import __version__
# Class shortcuts:
from .observers import Observer
from .support.observables import Signal, Observable
from .models import *
try:
from .view import View
from .controller import Controller
except ImportError:
import logging
logging.getLogger(__name__).warning("ImportError when trying to load View and/or Controller: do you have PyGTK/GObject installed?")
| 1.304688
| 1
|
app/main/handlers/main.py
|
chrisrowles/raspi-sh
| 0
|
12779779
|
<filename>app/main/handlers/main.py<gh_stars>0
import io
import socket
import weakref
import paramiko
import tornado.websocket
from tornado.ioloop import IOLoop
from app.main.worker import Worker
from app.main.handlers.base import BaseHandler
DELAY = 3
workers = {}
def recycle(worker):
if worker.handler:
return
workers.pop(worker.id, None)
worker.close()
class HttpHandler(BaseHandler):
def get_privatekey(self):
try:
data = self.request.files.get('privatekey')[0]['body']
except TypeError:
return
return data.decode('utf-8')
def get_specific_pkey(self, pkeycls, privatekey, password):
try:
pkey = pkeycls.from_private_key(io.StringIO(privatekey),
password=password)
except paramiko.PasswordRequiredException:
raise ValueError('Need password to decrypt the private key.')
except paramiko.SSHException:
pass
else:
return pkey
def get_pkey(self, privatekey, password):
password = password.encode('utf-8') if password else None
pkey = self.get_specific_pkey(paramiko.RSAKey, privatekey, password)\
or self.get_specific_pkey(paramiko.DSSKey, privatekey, password)\
or self.get_specific_pkey(paramiko.ECDSAKey, privatekey, password)\
or self.get_specific_pkey(paramiko.Ed25519Key, privatekey,
password)
if not pkey:
raise ValueError('Not a valid private key file or '
'wrong password for decrypting the private key.')
return pkey
def get_port(self):
value = self.get_value('port')
try:
port = int(value)
except ValueError:
port = 0
if 0 < port < 65536:
return port
raise ValueError('Invalid port {}'.format(value))
def get_value(self, name):
value = self.get_argument(name)
if not value:
raise ValueError('Empty {}'.format(name))
return value
def get_args(self):
hostname = self.get_value('hostname')
port = self.get_port()
username = self.get_value('username')
password = self.get_argument('password')
privatekey = self.get_privatekey()
pkey = self.get_pkey(privatekey, password) if privatekey else None
args = (hostname, port, username, password, pkey)
return args
def ssh_connect(self):
ssh = paramiko.SSHClient()
ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
args = self.get_args()
dest_addr = '{}:{}'.format(*args[:2])
try:
ssh.connect(*args, timeout=6)
except socket.error:
raise ValueError('Unable to connect to {}'.format(dest_addr))
except paramiko.BadAuthenticationType:
raise ValueError('Authentication failed.')
channel = ssh.invoke_shell(term='xterm')
channel.setblocking(0)
worker = Worker(ssh, channel, dest_addr)
IOLoop.current().call_later(DELAY, recycle, worker)
return worker
def post(self):
worker_id = None
status = None
try:
worker = self.ssh_connect()
except Exception as e:
status = str(e)
else:
worker_id = worker.id
workers[worker_id] = worker
self.write(dict(id=worker_id, status=status))
class WebsocketHandler(tornado.websocket.WebSocketHandler):
def data_received(self, chunk):
pass
def __init__(self, *args, **kwargs):
self.loop = IOLoop.current()
self.worker_ref = None
super().__init__(*args, **kwargs)
def check_origin(self, origin):
return True
def get_addr(self):
ip = self.request.headers.get_list('X-Real-Ip')
port = self.request.headers.get_list('X-Real-Port')
addr = ':'.join(ip + port)
if not addr:
addr = '{}:{}'.format(*self.stream.socket.getpeername())
return addr
def open(self):
self.src_addr = self.get_addr()
worker = workers.pop(self.get_argument('id'), None)
if not worker:
self.close(reason='Invalid worker id')
return
self.set_nodelay(True)
worker.set_handler(self)
self.worker_ref = weakref.ref(worker)
self.loop.add_handler(worker.fd, worker, IOLoop.READ)
def on_message(self, message):
worker = self.worker_ref()
worker.data_to_dst.append(message)
worker.on_write()
def on_close(self):
worker = self.worker_ref() if self.worker_ref else None
if worker:
worker.close()
| 2.25
| 2
|
purpledrop/electrode_board.py
|
uwmisl/purpledrop-driver
| 0
|
12779780
|
import json
import numpy as np
import os
import pkg_resources
import re
from typing import Any, AnyStr, Dict, List, Optional, Tuple
def load_peripheral(pdata, templates=None):
"""Load a peripheral from a dict
This loads a peripheral with support for templates, as used in the board
definition file format
Args:
pdata: A dict containing the peripheral definition
templates: A dict mapping types to template definitions
"""
if not 'type' in pdata:
raise ValueError("Peripheral definition requires a type field")
template = None
if templates is not None and pdata['type'] in templates:
template = templates[pdata['type']]
periph = pdata
# Override electrodes with fields from template
def map_electrode(e):
eid = e['id']
if template is None:
return e
e_template = next((x for x in template['electrodes'] if x['id'] == eid), None)
if e_template is None:
return e
# Merge dicts, with values in e taking priority in case of duplicate keys
return {**e_template, **e}
periph['electrodes'] = [map_electrode(e) for e in periph['electrodes']]
return periph
class Fiducial(object):
"""Represents a fiducial location
"""
def __init__(self, corners: List[List[int]], label: str=""):
self.corners = corners
self.label = label
@staticmethod
def from_dict(data):
return Fiducial(**data)
def to_dict(self):
return {
'corners': self.corners,
'label': self.label
}
class ControlPoint(object):
"""Represents a control point in an image
A control point is a pair of corresponding points -- one in image coordinates
and the other in grid coordinates -- used to calibrate the position of
the electrode grid relative to fiducials.
"""
def __init__(self, grid_coord: Tuple[float, float], image_coord: Tuple[float, float]):
self.grid = grid_coord
self.image = image_coord
def from_dict(data):
if not 'grid' in data:
raise ValueError(f'A control point must have a `grid` and `image` attribute: {data}')
if not 'image' in data:
raise ValueError(f'A control point must have a `grid` and `image` attribute: {data}')
return ControlPoint(data['grid'], data['image'])
class Registration(object):
"""A registration is a collection of fiducials and control points which
together define relationship between the electrode locations and fiducials
"""
def __init__(self, data: dict):
if not 'fiducials' in data:
raise ValueError(f'A Registration requires a fiducials attribute, not found in: {data}')
if not 'control_points' in data:
raise ValueError(f'A Registration requires a control points attribute, not found in: {data}')
if not isinstance(data['fiducials'], list):
raise ValueError(f'A Registration `fiducial` attribute must be a list: {data}')
if not isinstance(data['control_points'], list):
raise ValueError(f'a Registration `control_points` attribute must be a list: {data}')
self.fiducials = [Fiducial.from_dict(f) for f in data['fiducials']]
self.control_points = [ControlPoint.from_dict(cp) for cp in data['control_points']]
class Layout(object):
"""Represents the 'layout' property of a baord definition
A layout defines the placement and pin mapping for the electrodes on the
board.
"""
def __init__(self, layout_def: Dict[str, Any]):
self.peripherals = None
self.grids = []
def intify_pins(grid_pins):
result = []
for row in grid_pins:
new_row: List[Optional[int]] = []
for pin in row:
if pin == -1 or pin is None:
new_row.append(None)
else:
new_row.append(int(pin))
result.append(new_row)
return result
# Old format files use 'grid' to define a single grid
# New format uses an array of objects, under the key 'grids'
if 'grid' in layout_def:
self.grids.append({
'origin': [0.0, 0.0],
'pitch': 1.0,
'pins': intify_pins(layout_def['grid'])
})
elif 'grids' in layout_def:
for g in layout_def['grids']:
self.grids.append({
'origin': g['origin'],
'pitch': g['pitch'],
'pins': intify_pins(g['pins']),
})
if 'peripherals' in layout_def:
self.peripherals = [load_peripheral(p, layout_def.get('peripheral_templates', None)) for p in layout_def['peripherals']]
def grid_location_to_pin(self, x: int, y: int, grid_number:int =0):
"""Return the pin number at given grid location, or None if no pin is
defined there.
"""
if grid_number < len(self.grids):
grid = self.grids[grid_number]['pins']
else:
grid = [[]] # Empty grid
if y < 0 or y >= len(grid):
return None
row = grid[y]
if x < 0 or x >= len(row):
return None
return grid[y][x]
def pin_to_grid_location(self, pin: int) -> Optional[Tuple[Tuple[int, int], int]]:
"""Return the grid location of a given pin number
"""
for g, grid in enumerate(self.grids):
for y, row in enumerate(grid['pins']):
for x, p in enumerate(row):
if p == pin:
return ((x, y), g)
return None
def pin_polygon(self, pin: int) -> Optional[List[Tuple[int, int]]]:
"""Get the polygon defining a pin in board coordinates
"""
# Try to find the pin in a grid
grid_info = self.pin_to_grid_location(pin)
if grid_info is not None:
loc, grid_idx = grid_info
square = np.array([[0., 0.], [0., 1.], [1., 1.], [1., 0.]])
grid = self.grids[grid_idx]
polygon = (square + loc) * grid['pitch'] + grid['origin']
return polygon.tolist()
# Try to find the pin in a peripheral
if self.peripherals is None:
return None
for periph in self.peripherals:
for el in periph['electrodes']:
if el['pin'] == pin:
polygon = np.array(el['polygon'])
rotation = np.deg2rad(periph.get('rotation', 0.0))
R = np.array([[np.cos(rotation), -np.sin(rotation)], [np.sin(rotation), np.cos(rotation)]])
polygon = np.dot(R, polygon.T).T
return (polygon + periph['origin']).tolist()
return None
def as_dict(self) -> dict:
"""Return a serializable dict version of the board definition
"""
return {
"grids": self.grids,
"peripherals": self.peripherals
}
class Board(object):
"""Represents the top-level object in an electrode board definition file
"""
def __init__(self, board_def: Dict[str, Any]):
self.registration: Optional[Registration] = None
if not 'layout' in board_def:
raise RuntimeError("Board definition file must contain a 'layout' object")
self.layout = Layout(board_def['layout'])
self.oversized_electrodes = board_def.get('oversized_electrodes', [])
if 'registration' in board_def:
self.registration = Registration(board_def['registration'])
@staticmethod
def load_from_file(filepath):
"""Create a Board from a board definition file
"""
with open(filepath, 'r') as f:
data = json.loads(f.read())
return Board(data)
@staticmethod
def load_from_string(data: AnyStr) -> 'Board':
"""Create a board from a JSON string in memory
"""
return Board(json.loads(data))
def as_dict(self) -> dict:
"""Return a serializable dict representation of the board
"""
return {
'layout': self.layout.as_dict(),
'oversized_electrodes': self.oversized_electrodes,
}
def list_boards():
"""Find all available board definitions.
Uses same search rules as load_board; see :func:`load_board`.
Returns:
A list of board names, which can be passed to `load_board`
"""
config_dir = os.path.expanduser("~/.config/purpledrop/boards")
package_files = pkg_resources.resource_listdir('purpledrop', 'boards')
if os.path.isdir(config_dir):
config_files = os.listdir(config_dir)
else:
config_files = []
board_names = []
def add_files(files):
for f in files:
print(f"Checking {f}")
match = re.match(r'(.+).json', os.path.basename(f))
if match:
board_names.append(match.group(1))
# Config files take priority, if there are any duplicates
add_files(package_files)
add_files(config_files)
return board_names
def load_board(name) -> Optional[Board]:
"""Load a board definition by name or path
Attempt to load a board definition from the name, using the following
priorities (the first to succeed is returned):
1. Load as a full path
2. Load from ~/.config/purpledrop/boards/{name}.json
3. Load from package resources (`purpledrop/boards` in repo)
"""
if os.path.isfile(name):
return Board.load_from_file(name)
home_path = os.path.expanduser(f"~/.config/purpledrop/boards/{name}.json")
if os.path.isfile(home_path):
return Board.load_from_file(home_path)
try:
resource_data = pkg_resources.resource_string('purpledrop', f"boards/{name}.json")
return Board.load_from_string(resource_data)
except FileNotFoundError:
pass
return None
| 3
| 3
|
example/FT600_massive_send/Python/usb_rx_check.py
|
Janet-ZHU/FTDI-245fifo-interface
| 0
|
12779781
|
<gh_stars>0
#-*- coding:utf-8 -*-
# Python2.7 x86
# Function: Function: Verify the correctness of the data received by the host computer
import sys, time
import numpy as np
from warnings import filterwarnings
datatype = np.uint16
def openUSB():
import ftd3xx
if sys.platform == 'win32':
import ftd3xx._ftd3xx_win32 as _ft
elif sys.platform == 'linux2':
import ftd3xx._ftd3xx_linux as _ft
usb = ftd3xx.create(0, _ft.FT_OPEN_BY_INDEX)
if usb is None:
print("*** ERROR: Can't find or open Device!")
return False, None
if (sys.platform == 'win32' and usb.getDriverVersion() < 0x01020006):
print("*** ERROR: Old kernel driver version. Please update driver!")
usb.close()
return False, None
if usb.getDeviceDescriptor().bcdUSB < 0x300:
print("*** Warning: Device is NOT connected using USB3.0 cable or port!")
return False, None
cfg = usb.getChipConfiguration()
numChannels = [4, 2, 1, 0, 0]
numChannel = numChannels[cfg.ChannelConfig]
if numChannel != 1:
print("*** ERROR:Number of Channels invalid! (numChannel=%d)" % (numChannel,) )
return False, None
return True, usb
if __name__ == '__main__':
ret, usb = openUSB()
if not ret:
sys.exit()
print("\n Reading...")
datas = []
tx_data = bytearray(16)
rx_cnt = 0
time_start = time.time()
for ii in range(4):
for jj in range(4):
data = bytes(65536*8)
usb.writePipe(0x02+0, bytes(tx_data), len(tx_data)) # While receiving massive data, scattered transmit a few data to verify the stability of FPGA code
tx_data[-1] += 1
rxc = usb.readPipe(0x82, data, len(data))
rx_cnt += rxc
datas.append(data[:rxc])
print(" recieved %dB" % rx_cnt)
time_cost = time.time() - time_start
print("\n time:%.2fs rate:%.2fMBps" % (time_cost,rx_cnt/time_cost/1000000.0) )
usb.close()
print("\n Verify...")
# 将接收到的所有数据合成一个 numpy 数组,相邻两字节合并,元素类型为 uint16
for ii,data in enumerate(datas):
arr_tmp = np.frombuffer(data, dtype=datatype)
if ii==0:
arr = arr_tmp
else:
arr = np.append(arr,arr_tmp)
filterwarnings("ignore",category=RuntimeWarning)
# 下位机发送规则为递增数组,在上位机验证数组是否为递增,以验证USB传输的正确性
corrent_cnt, uncorrect_cnt = 0, 0
for i in range(len(arr)-1):
if arr[i+1]-arr[i] == datatype(1):
corrent_cnt += 1
else:
#print(' Error at %d: %08x %08x' % (i+1, arr[i], arr[i+1]) )
uncorrect_cnt += 1
print(' correct count %d error count %d\n' % (corrent_cnt, uncorrect_cnt))
| 2.84375
| 3
|
master.py
|
jet-black/ppo-lstm-parallel
| 38
|
12779782
|
from multiprocessing import Queue, Process
from threading import Thread
import numpy as np
import utils
from agent import PPOAgent
from policy import get_policy
from worker import Worker
import environments
class SimpleMaster:
def __init__(self, env_producer):
self.env_name = env_producer.get_env_name()
self.config = environments.get_config(self.env_name)
self.worker_size = self.config["worker_num"]
self.env_producer = env_producer
self.queues = []
self.w_in_queue = Queue()
self.init_workers()
self.session = None
self.trainable_vars = None
self.accum_vars = None
self.p_opt_vars = None
self.v_opt_vars = None
self.assign_op = None
self.agent = None
self.saver = None
self.summary_writer = None
self.beta = 1
self.lr_multiplier = 1.0
self.iter_count = 1
self.variables_file_path = "models/%s/variables.txt" % self.env_name
self.model_path = "models/%s/model" % self.env_name
self.initialized = False
self.cur_step = -1
self.start()
def init_workers(self):
for i in range(self.worker_size):
q = Queue()
self.queues.append(q)
t = Process(target=make_worker, args=(self.env_producer, i, q, self.w_in_queue))
t.start()
def start(self):
import tensorflow as tf
env_opts = environments.get_env_options(self.env_name, self.env_producer.get_use_gpu())
self.summary_writer = tf.summary.FileWriter("logs/%s" % self.env_name)
self.session = utils.create_session(env_opts, True)
with tf.variable_scope("master-0"):
pol = get_policy(env_opts, self.session)
self.agent = PPOAgent(pol, self.session, "master-0", env_opts)
self.trainable_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, "master-0")
self.accum_vars = [tf.Variable(tf.zeros_like(tv.initialized_value()), trainable=False) for tv in
self.trainable_vars]
p_vars = self.agent.p_opt.variables()
v_vars = self.agent.v_opt.variables()
self.p_opt_vars = [tf.Variable(tf.zeros_like(tv.initialized_value()), trainable=False) for tv in p_vars]
self.v_opt_vars = [tf.Variable(tf.zeros_like(tv.initialized_value()), trainable=False) for tv in v_vars]
p_assign_ops = [p_vars[i].assign(self.p_opt_vars[i]) for i in range(len(p_vars))]
v_assign_ops = [v_vars[i].assign(self.v_opt_vars[i]) for i in range(len(v_vars))]
assign_ops = [self.trainable_vars[i].assign(self.accum_vars[i]) for i in
range(len(self.trainable_vars))]
self.assign_op = tf.group(assign_ops + p_assign_ops + v_assign_ops)
self.restore_variables()
self.saver = tf.train.Saver(max_to_keep=1)
self.session.run(tf.global_variables_initializer())
try:
self.saver = tf.train.import_meta_graph(
tf.train.latest_checkpoint("models/%s/" % env_opts["env_name"]) + ".meta")
self.saver.restore(self.session,
tf.train.latest_checkpoint("models/%s/" % env_opts["env_name"]))
except:
print("failed to restore model")
while True:
if self.iter_count % 10 == 0:
print("Saving model...")
self.save_variables()
self.saver.save(self.session, self.model_path, self.iter_count)
print("Model saved")
self.broadcast_weights()
self.merge_weights()
self.iter_count += 1
def restore_variables(self):
try:
lines = open(self.variables_file_path).readlines()
result = {}
for l in lines:
a, b = l.split("=")
b = b.strip()
result[a] = b
self.iter_count = int(result["global_step"]) + 1
self.beta = float(result["beta"])
self.lr_multiplier = float(result["lr_multiplier"])
except:
print("failed to restore variables")
def save_variables(self):
f = open(self.variables_file_path, "w")
lines = []
lines.append("global_step=%s\n" % self.iter_count)
lines.append("beta=%s\n" % self.beta)
lines.append("lr_multiplier=%s\n" % self.lr_multiplier)
f.writelines(lines)
f.close()
def broadcast_weights(self):
weights, p_opt_weights, v_opt_weights = self.session.run([self.trainable_vars,
self.agent.p_opt.variables(),
self.agent.v_opt.variables()])
arr = [self.beta, self.lr_multiplier, p_opt_weights, v_opt_weights, weights]
for q in self.queues:
q.put(arr)
def merge_weights(self):
results = []
for i in range(self.worker_size):
results.append(self.w_in_queue.get())
self.beta = np.mean([x[0] for x in results])
self.lr_multiplier = np.mean([x[1] for x in results])
p_opt_weights = self.make_means([x[2] for x in results])
v_opt_weights = self.make_means([x[3] for x in results])
weights = self.make_means([x[4] for x in results])
first_worker = [x for x in results if x[5]["idx"] == 0][0]
self.record_stats(first_worker[5])
fd = {}
for i, t in enumerate(self.accum_vars):
fd[t] = weights[i]
for i, t in enumerate(self.p_opt_vars):
fd[t] = p_opt_weights[i]
for i, t in enumerate(self.v_opt_vars):
fd[t] = v_opt_weights[i]
self.session.run(self.assign_op, feed_dict=fd)
def make_means(self, weights):
result = []
for i in range(len(weights[0])):
acc = []
for j in range(len(weights)):
acc.append(weights[j][i])
acc = np.mean(acc, axis=0)
result.append(acc)
return result
def record_stats(self, stats):
if self.cur_step == stats["step"]:
return
self.cur_step = stats["step"]
self.record_losses(stats["kl"], stats["entropy"], stats["hinge"], stats["src_policy_loss"],
stats["vloss"], stats["ploss"], stats["step"])
cum_rew = 0
for s in stats["stats"]:
self.log_summary(s["reward"], s["step"], s["a_probs"], s["picked_a"], s["a_dim"], s["discrete"])
cum_rew += s["reward"]
cum_rew /= max(1, len(stats["stats"]))
print("Average reward: %s" % cum_rew)
def record_losses(self, cur_kl, entropy, hinge, src_policy_loss, vloss, ploss, step):
import tensorflow as tf
summary = tf.Summary()
summary.value.add(tag='Losses/value_loss', simple_value=vloss)
summary.value.add(tag='Losses/policy_loss', simple_value=ploss)
summary.value.add(tag='Losses/kl_divergence', simple_value=cur_kl)
summary.value.add(tag='Losses/entropy', simple_value=entropy)
summary.value.add(tag='Losses/src_policy_loss', simple_value=src_policy_loss)
summary.value.add(tag='Losses/hinge', simple_value=hinge)
summary.value.add(tag='Vars/beta', simple_value=self.beta)
summary.value.add(tag='Vars/lr_multiplier', simple_value=self.lr_multiplier)
self.summary_writer.add_summary(summary, step)
self.summary_writer.flush()
def log_summary(self, reward, step, a_probs, picked_a, a_dim, discrete):
import tensorflow as tf
summary = tf.Summary()
summary.value.add(tag='Reward/per_episode', simple_value=float(reward))
if not discrete:
for i in range(a_dim):
prefix = "Action" + str(i)
summary.value.add(tag=prefix + '/mean', simple_value=float(a_probs[i]))
summary.value.add(tag=prefix + "/std", simple_value=float(a_probs[i + a_dim]))
summary.value.add(tag=prefix + '/picked', simple_value=float(picked_a[i]))
else:
for i in range(a_dim):
prefix = "Action" + str(i)
summary.value.add(tag=prefix + '/prob', simple_value=float(a_probs[i]))
summary.value.add(tag='Action/picked', simple_value=float(picked_a))
self.summary_writer.add_summary(summary, step)
self.summary_writer.flush()
def make_worker(env_producer, i, q, w_in_queue):
return Worker(env_producer, i, q, w_in_queue)
| 2.390625
| 2
|
trex/management/commands/secretkey.py
|
bjoernricks/trex
| 0
|
12779783
|
<filename>trex/management/commands/secretkey.py
# -*- coding: utf-8 -*-
#
# (c) 2014 <NAME> <<EMAIL>>
#
# See LICENSE comming with the source of 'trex' for details.
#
from optparse import make_option
from django.core.management.base import BaseCommand
from django.utils.crypto import get_random_string
class Command(BaseCommand):
help = "Create a SECRET_KEY for settings.py"
option_list = BaseCommand.option_list + (
make_option("-o", "--out", dest="file",
help="File to write the generated key"),
)
def handle(self, *args, **options):
# see django/core/management/commands/startproject.py
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
secret_key = get_random_string(50, chars)
filename = options.get("file")
if filename:
with open(filename, "w") as f:
f.write(secret_key)
f.write("\n")
else:
print secret_key
| 2.3125
| 2
|
app/resources/api.py
|
PopuriAO29/mp3-reverser
| 0
|
12779784
|
import os
from flask import abort
from flask import request
from flask import send_from_directory
from app import app
from app.main.RequestParameters import RequestParameters
from app.main.Session import Session
from app.main.Session import Status
from app.main.exceptions.exceptions import InvalidSessionIdError
API_PATH_PREFIX = '/api'
def _get_session(session_id):
try:
return Session(session_id)
except InvalidSessionIdError:
abort(404)
def _get_status_url(session_id):
return API_PATH_PREFIX + '/status/' + session_id
def _get_result_file_url(session_id):
return API_PATH_PREFIX + '/results/' + session_id
@app.route(API_PATH_PREFIX + '/process', methods=['POST'])
def new_request():
try:
file = request.files['file']
if not file:
abort(400)
# TODO Validate file
session = Session()
session.store_input_file(file)
request_parameters = RequestParameters.parse(request.form)
session.start_processing(request_parameters)
return {
'status_url': _get_status_url(session.session_id),
}
except ValueError:
abort(400)
@app.route(API_PATH_PREFIX + '/health')
def health():
return 'health'
@app.route(API_PATH_PREFIX + '/status/<path:session_id>')
def check_status(session_id):
session = _get_session(session_id)
status_result = {
'status': 'in_progress',
}
status = session.get_status()
if status == Status.SUCCESS:
status_result['status'] = 'done'
elif status == Status.FAILURE:
status_result['status'] = 'error'
is_done = session.get_status() == Status.SUCCESS
if is_done:
status_result['result_url'] = _get_result_file_url(session_id)
return status_result
@app.route(API_PATH_PREFIX + '/results/<path:session_id>')
def results(session_id):
session = _get_session(session_id)
return send_from_directory(
os.path.join('..', session.get_session_directory()),
session.get_result_file_name(),
)
| 2.484375
| 2
|
tests/extractor.py
|
benstobbs/winevt-kb
| 9
|
12779785
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for the Windows Event Log message resource extractor class."""
import unittest
from dfvfs.helpers import fake_file_system_builder
from dfvfs.helpers import windows_path_resolver
from dfvfs.lib import definitions as dfvfs_definitions
from dfvfs.path import factory as path_spec_factory
from winevtrc import extractor
from winevtrc import resources
from tests import test_lib as shared_test_lib
class TestOutputWriter(object):
"""Class that defines a test output writer.
Attributes:
event_log_providers (list[EventLogProvider]): event log providers.
message_files (list[MessageFile]): message files.
"""
def __init__(self):
"""Initializes the test output writer."""
super(TestOutputWriter, self).__init__()
self.event_log_providers = []
self.message_files = []
def Close(self):
"""Closes the output writer."""
return
def Open(self):
"""Opens the output writer.
Returns:
bool: True if successful or False if not.
"""
return True
def WriteEventLogProvider(self, event_log_provider):
"""Writes the Event Log provider.
Args:
event_log_provider (EventLogProvider): event log provider.
"""
self.event_log_providers.append(event_log_provider)
# pylint: disable=unused-argument
def WriteMessageFile(
self, event_log_provider, message_resource_file, message_filename,
message_file_type):
"""Writes the Windows Message Resource file.
Args:
event_log_provider (EventLogProvider): event log provider.
message_resource_file (MessageResourceFile): message resource file.
message_filename (str): message filename.
message_file_type (str): message file type.
"""
self.message_files.append(message_resource_file)
class EventMessageStringRegistryFileReaderTest(shared_test_lib.BaseTestCase):
"""Tests for the Windows Registry file reader."""
def testOpen(self):
"""Tests the Open function."""
volume_scanner = extractor.EventMessageStringExtractor()
file_reader = extractor.EventMessageStringRegistryFileReader(
volume_scanner)
test_file_path = self._GetTestFilePath(['SOFTWARE'])
# TODO: implement tests.
# file_reader.Open(test_file_path)
# file_reader.Open('bogus')
_ = file_reader
_ = test_file_path
@shared_test_lib.skipUnlessHasTestFile(['SOFTWARE'])
@shared_test_lib.skipUnlessHasTestFile(['SYSTEM'])
class EventMessageStringExtractorTest(shared_test_lib.BaseTestCase):
"""Tests for the Windows Event Log message resource extractor."""
# pylint: disable=protected-access
def _CreateTestEventMessageStringExtractor(self):
"""Creates an event message string extractor for testing.
Returns:
EventMessageStringExtractor: an event message string extractor.
"""
file_system_builder = fake_file_system_builder.FakeFileSystemBuilder()
test_file_path = self._GetTestFilePath(['SOFTWARE'])
file_system_builder.AddFileReadData(
'/Windows/System32/config/SOFTWARE', test_file_path)
test_file_path = self._GetTestFilePath(['SYSTEM'])
file_system_builder.AddFileReadData(
'/Windows/System32/config/SYSTEM', test_file_path)
mount_point = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_FAKE, location='/')
extractor_object = extractor.EventMessageStringExtractor()
extractor_object._file_system = file_system_builder.file_system
extractor_object._path_resolver = (
windows_path_resolver.WindowsPathResolver(
file_system_builder.file_system, mount_point))
extractor_object._windows_directory = 'C:\\Windows'
extractor_object._path_resolver.SetEnvironmentVariable(
'SystemRoot', extractor_object._windows_directory)
extractor_object._path_resolver.SetEnvironmentVariable(
'WinDir', extractor_object._windows_directory)
return extractor_object
def testWindowsVersionProperty(self):
"""Tests the windows_version property."""
extractor_object = self._CreateTestEventMessageStringExtractor()
windows_version = extractor_object.windows_version
# TODO: improve test.
self.assertIsNone(windows_version)
def testCollectEventLogTypes(self):
"""Tests the _CollectEventLogTypes function."""
extractor_object = self._CreateTestEventMessageStringExtractor()
event_log_types = extractor_object._CollectEventLogTypes()
self.assertEqual(len(event_log_types), 3)
self.assertEqual(len(event_log_types['Application']), 65)
self.assertEqual(len(event_log_types['Security']), 7)
self.assertEqual(len(event_log_types['System']), 186)
# TODO: hide duplication warnings.
event_log_types = extractor_object._CollectEventLogTypes(
all_control_sets=True)
self.assertEqual(len(event_log_types), 3)
self.assertEqual(len(event_log_types['Application']), 65)
self.assertEqual(len(event_log_types['Security']), 7)
self.assertEqual(len(event_log_types['System']), 186)
def testCollectEventLogProvidersFromKey(self):
"""Tests the _CollectEventLogProvidersFromKey function."""
extractor_object = self._CreateTestEventMessageStringExtractor()
generator = extractor_object._CollectEventLogProvidersFromKey(None)
# TODO: fix generator method.
self.assertIsNotNone(generator)
def testExtractMessageFile(self):
"""Tests the _ExtractMessageFile function."""
extractor_object = self._CreateTestEventMessageStringExtractor()
# TODO: improve test.
output_writer = TestOutputWriter()
processed_message_filenames = []
event_log_provider = resources.EventLogProvider(
'log_type', 'log_source', 'provider_guid')
message_filename = ''
message_file_type = ''
extractor_object._ExtractMessageFile(
output_writer, processed_message_filenames, event_log_provider,
message_filename, message_file_type)
self.assertEqual(len(output_writer.event_log_providers), 0)
self.assertEqual(len(output_writer.message_files), 0)
def testGetEventLogProviders(self):
"""Tests the _GetEventLogProviders function."""
extractor_object = self._CreateTestEventMessageStringExtractor()
event_log_providers = list(extractor_object._GetEventLogProviders())
self.assertEqual(len(event_log_providers), 258)
event_log_providers = list(
extractor_object._GetEventLogProviders(all_control_sets=True))
self.assertEqual(len(event_log_providers), 516)
def testGetSystemRoot(self):
"""Tests the _GetSystemRoot function."""
extractor_object = self._CreateTestEventMessageStringExtractor()
system_root = extractor_object._GetSystemRoot()
self.assertEqual(system_root, 'C:\\WINDOWS')
def testGetWindowsVersion(self):
"""Tests the _GetWindowsVersion function."""
extractor_object = self._CreateTestEventMessageStringExtractor()
windows_version = extractor_object._GetWindowsVersion()
# TODO: improve test.
self.assertIsNone(windows_version)
def testOpenMessageResourceFile(self):
"""Tests the _OpenMessageResourceFile function."""
extractor_object = self._CreateTestEventMessageStringExtractor()
# TODO: improve test.
message_resource_file = extractor_object._OpenMessageResourceFile(
'C:\\Windows\\System32\\wrc_test.dll')
self.assertIsNone(message_resource_file)
# TODO: test _OpenMessageResourceFileByPathSpec
def testExtractEventLogMessageStrings(self):
"""Tests the ExtractEventLogMessageStrings function."""
extractor_object = self._CreateTestEventMessageStringExtractor()
output_writer = TestOutputWriter()
extractor_object.ExtractEventLogMessageStrings(output_writer)
self.assertEqual(len(output_writer.event_log_providers), 258)
self.assertEqual(len(output_writer.message_files), 0)
if __name__ == '__main__':
unittest.main()
| 2.28125
| 2
|
apps/zblog/api_view/views.py
|
zhengze/zblogsite
| 3
|
12779786
|
<gh_stars>1-10
from apps.zblog.models import (Article,
Category,
Tag,
Album,
Photo,
Music
)
from rest_framework import viewsets
from apps.zblog.serializers import (ArticleSerializer,
CategorySerializer,
TagSerializer,
AlbumSerializer,
PhotoSerializer,
MusicSerializer
)
class ArticleViewSet(viewsets.ModelViewSet):
queryset = Article.objects.all()
serializer_class = ArticleSerializer
class CategoryViewSet(viewsets.ModelViewSet):
queryset = Category.objects.all()
serializer_class = CategorySerializer
class TagViewSet(viewsets.ModelViewSet):
queryset = Tag.objects.all()
serializer_class = TagSerializer
class AlbumViewSet(viewsets.ModelViewSet):
queryset = Album.objects.all()
serializer_class = AlbumSerializer
class PhotoViewSet(viewsets.ModelViewSet):
queryset = Photo.objects.all()
serializer_class = PhotoSerializer
class MusicViewSet(viewsets.ModelViewSet):
queryset = Music.objects.all()
serializer_class = MusicSerializer
| 2.109375
| 2
|
murano/dsl/macros.py
|
OndrejVojta/murano
| 1
|
12779787
|
<reponame>OndrejVojta/murano
# Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import types
import eventlet.greenpool as greenpool
import yaql.context
import murano.dsl.dsl_exception as dsl_exception
import murano.dsl.exceptions as exceptions
import murano.dsl.expressions as expressions
import murano.dsl.helpers as helpers
import murano.dsl.yaql_expression as yaql_expression
class CodeBlock(expressions.DslExpression):
def __init__(self, body):
if not isinstance(body, types.ListType):
body = [body]
self.code_block = map(expressions.parse_expression, body)
def execute(self, context, murano_class):
for expr in self.code_block:
def action():
try:
expr.execute(context, murano_class)
except (dsl_exception.MuranoPlException,
exceptions.InternalFlowException):
raise
except Exception as ex:
raise dsl_exception.MuranoPlException.\
from_python_exception(ex, context)
if hasattr(expr, 'virtual_instruction'):
instruction = expr.virtual_instruction
helpers.execute_instruction(instruction, action, context)
else:
action()
class MethodBlock(CodeBlock):
def __init__(self, body, name=None):
super(MethodBlock, self).__init__(body)
self._name = name
def execute(self, context, murano_class):
new_context = yaql.context.Context(context)
new_context.set_data(self._name, '?currentMethod')
try:
super(MethodBlock, self).execute(new_context, murano_class)
except exceptions.ReturnException as e:
return e.value
except exceptions.BreakException:
raise exceptions.DslInvalidOperationError(
'Break cannot be used on method level')
except exceptions.ContinueException:
raise exceptions.DslInvalidOperationError(
'Continue cannot be used on method level')
else:
return None
class ReturnMacro(expressions.DslExpression):
def __init__(self, Return):
self._value = Return
def execute(self, context, murano_class):
raise exceptions.ReturnException(
helpers.evaluate(self._value, context))
class BreakMacro(expressions.DslExpression):
def __init__(self, Break):
if Break:
raise exceptions.DslSyntaxError('Break cannot have value')
def execute(self, context, murano_class):
raise exceptions.BreakException()
class ContinueMacro(expressions.DslExpression):
def __init__(self, Continue):
if Continue:
raise exceptions.DslSyntaxError('Continue cannot have value')
def execute(self, context, murano_class):
raise exceptions.ContinueException()
class ParallelMacro(CodeBlock):
def __init__(self, Parallel, Limit=None):
super(ParallelMacro, self).__init__(Parallel)
if Limit:
self._limit = yaql_expression.YaqlExpression(str(Limit))
else:
self._limit = len(self.code_block)
def execute(self, context, murano_class):
if not self.code_block:
return
limit = helpers.evaluate(self._limit, context)
gpool = greenpool.GreenPool(helpers.evaluate(limit, context))
for expr in self.code_block:
gpool.spawn_n(expr.execute, context, murano_class)
gpool.waitall()
class IfMacro(expressions.DslExpression):
def __init__(self, If, Then, Else=None):
if not isinstance(If, yaql_expression.YaqlExpression):
raise exceptions.DslSyntaxError(
'Condition must be of expression type')
self._code1 = CodeBlock(Then)
self._code2 = None if Else is None else CodeBlock(Else)
self._condition = If
def execute(self, context, murano_class):
res = self._condition.evaluate(context)
if not isinstance(res, types.BooleanType):
raise exceptions.DslInvalidOperationError(
'Condition must be evaluated to boolean type')
if res:
self._code1.execute(context, murano_class)
elif self._code2 is not None:
self._code2.execute(context, murano_class)
class WhileDoMacro(expressions.DslExpression):
def __init__(self, While, Do):
if not isinstance(While, yaql_expression.YaqlExpression):
raise TypeError()
self._code = CodeBlock(Do)
self._condition = While
def execute(self, context, murano_class):
while True:
res = self._condition.evaluate(context)
if not isinstance(res, types.BooleanType):
raise exceptions.DslSyntaxError(
'Condition must be of expression type')
try:
if res:
self._code.execute(context, murano_class)
else:
break
except exceptions.BreakException:
break
except exceptions.ContinueException:
continue
class ForMacro(expressions.DslExpression):
def __init__(self, For, In, Do):
if not isinstance(For, types.StringTypes):
raise exceptions.DslSyntaxError(
'For value must be of string type')
self._code = CodeBlock(Do)
self._var = For
self._collection = In
def execute(self, context, murano_class):
collection = helpers.evaluate(self._collection, context)
for t in collection:
context.set_data(t, self._var)
try:
self._code.execute(context, murano_class)
except exceptions.BreakException:
break
except exceptions.ContinueException:
continue
class RepeatMacro(expressions.DslExpression):
def __init__(self, Repeat, Do):
if not isinstance(Repeat, (int, yaql_expression.YaqlExpression)):
raise exceptions.DslSyntaxError(
'Repeat value must be either int or expression')
self._count = Repeat
self._code = CodeBlock(Do)
def execute(self, context, murano_class):
count = helpers.evaluate(self._count, context)
for t in range(0, count):
try:
self._code.execute(context, murano_class)
except exceptions.BreakException:
break
except exceptions.ContinueException:
continue
class MatchMacro(expressions.DslExpression):
def __init__(self, Match, Value, Default=None):
if not isinstance(Match, types.DictionaryType):
raise exceptions.DslSyntaxError(
'Match value must be of dictionary type')
self._switch = Match
self._value = Value
self._default = None if Default is None else CodeBlock(Default)
def execute(self, context, murano_class):
match_value = helpers.evaluate(self._value, context)
for key, value in self._switch.iteritems():
if key == match_value:
CodeBlock(value).execute(context, murano_class)
return
if self._default is not None:
self._default.execute(context, murano_class)
class SwitchMacro(expressions.DslExpression):
def __init__(self, Switch, Default=None):
if not isinstance(Switch, types.DictionaryType):
raise exceptions.DslSyntaxError(
'Switch value must be of dictionary type')
self._switch = Switch
for key, value in self._switch.iteritems():
if not isinstance(key, (yaql_expression.YaqlExpression,
types.BooleanType)):
raise exceptions.DslSyntaxError(
'Switch cases must be must be either '
'boolean or expression')
self._default = None if Default is None else CodeBlock(Default)
def execute(self, context, murano_class):
matched = False
for key, value in self._switch.iteritems():
res = helpers.evaluate(key, context)
if not isinstance(res, types.BooleanType):
raise exceptions.DslInvalidOperationError(
'Switch case must be evaluated to boolean type')
if res:
matched = True
CodeBlock(value).execute(context, murano_class)
if self._default is not None and not matched:
self._default.execute(context, murano_class)
class DoMacro(expressions.DslExpression):
def __init__(self, Do):
self._code = CodeBlock(Do)
def execute(self, context, murano_class):
self._code.execute(context, murano_class)
def register():
expressions.register_macro(DoMacro)
expressions.register_macro(ReturnMacro)
expressions.register_macro(BreakMacro)
expressions.register_macro(ContinueMacro)
expressions.register_macro(ParallelMacro)
expressions.register_macro(IfMacro)
expressions.register_macro(WhileDoMacro)
expressions.register_macro(ForMacro)
expressions.register_macro(RepeatMacro)
expressions.register_macro(MatchMacro)
expressions.register_macro(SwitchMacro)
| 1.8125
| 2
|
project/db.py
|
Melchizedek13/CS50
| 0
|
12779788
|
<filename>project/db.py
import os
import sqlite3
from typing import Dict, List, Tuple
conn = sqlite3.connect(os.path.join('db', 'expenses.db'))
cursor = conn.cursor()
def insert(table: str, column_values: Dict):
columns = ', '.join( column_values.keys() )
values = [tuple(column_values.values())]
placeholders = ", ".join("?" * len(column_values.keys()))
cursor.executemany(
f"INSERT INTO {table} "
f"({columns}) "
f"VALUES ({placeholders})",
values)
conn.commit()
def fetchall(table: str, columns: List[str]) -> List[Tuple]:
columns_joined = ", ".join(columns)
cursor.execute(f"SELECT {columns_joined} FROM {table}")
rows = cursor.fetchall()
result = []
for row in rows:
dict_row = {}
for index, column in enumerate(columns):
dict_row[column] = row[index]
result.append(dict_row)
return result
def delete(table: str, row_id: int) -> None:
row_id = int(row_id)
cursor.execute(f"delete from {table} where id={row_id}")
conn.commit()
def get_cursor():
return cursor
def _init_expenses_tabs():
with open('dbaccountingddl.sql', 'r', encoding='utf8') as f:
sql_stmt = f.read()
cursor.executescript(sql_stmt)
conn.commit()
def check_expenses_tabs_exists():
cursor.execute("select name from sqlite_master where type='table' and name='expense'")
table_exists = cursor.fetchall()
if table_exists:
return
_init_expenses_tabs()
| 3.484375
| 3
|
log.py
|
SwaksharDeb/classification-with-costly-features
| 0
|
12779789
|
<gh_stars>0
import numpy as np
import time, sys, utils
from consts import *
#==============================
class PerfAgent():
def __init__(self, env, brain):
self.env = env
self.brain = brain
self.agents = self.env.agents
self.done = np.zeros(self.agents, dtype=np.bool)
self.total_r = np.zeros(self.agents)
self.total_len = np.zeros(self.agents, dtype=np.int32)
self.total_corr = np.zeros(self.agents, dtype=np.int32)
self.s = self.env.reset()
def act(self, s):
m = np.zeros((self.agents, ACTION_DIM)) # create max_mask
m[:, CLASSES:] = s[:, FEATURE_DIM:]
p = self.brain.predict_np(s) - MAX_MASK_CONST * m # select an action not considering those already performed
a = np.argmax(p, axis=1)
return a
def step(self):
a = self.act(self.s)
s_, r, done = self.env.step(a)
self.s = s_
newly_finished = ~self.done & done
self.done = self.done | done
self.total_r = self.total_r + r * (newly_finished | ~done)
self.total_len = self.total_len + ~done
self.total_corr = self.total_corr + (r == REWARD_CORRECT) * newly_finished
def run(self):
while not np.all(self.done):
# utils.print_progress(np.sum(self.done), self.agents, step=1)
self.step()
avg_r = np.mean(self.total_r)
avg_len = np.mean(self.total_len)
avg_corr = np.mean(self.total_corr)
return avg_r, avg_len, avg_corr
#==============================
class PerfEnv:
def __init__(self, data, costs, ff):
data_val_idx = np.random.choice(len(data), LOG_PERF_VAL_SIZE)
self.x = data.iloc[data_val_idx, 0:-1].astype('float32').values
self.y = data.iloc[data_val_idx, -1].astype('int32').values
self.costs = costs.values
self.agents = LOG_PERF_VAL_SIZE
self.lin_array = np.arange(self.agents)
self.ff = ff
def reset(self):
self.mask = np.zeros( (self.agents, FEATURE_DIM) )
self.done = np.zeros( self.agents, dtype=np.bool )
return self._get_state()
def step(self, action):
self.mask[self.lin_array, action - CLASSES] = 1
r = -self.costs[action - CLASSES] * self.ff
for i in np.where(action < CLASSES)[0]:
r[i] = REWARD_CORRECT if action[i] == self.y[i] else REWARD_INCORRECT
self.done[i] = 1
s_ = self._get_state()
return (s_, r, self.done)
def _get_state(self):
x_ = self.x * self.mask
x_ = np.concatenate( (x_, self.mask), axis=1 ).astype(np.float32)
return x_
#==============================
class Log:
def __init__(self, data_val, costs, ff, brain):
self.env = PerfEnv(data_val, costs, ff)
self.brain = brain
self.LOG_TRACKED_STATES = np.vstack(LOG_TRACKED_STATES).astype(np.float32)
self.LEN = len(self.LOG_TRACKED_STATES)
if BLANK_INIT:
mode = "w"
else:
mode = "a"
self.files = []
for i in range(self.LEN):
self.files.append( open("run_%d.dat" % i, mode) )
self.perf_file = open("run_perf.dat", mode)
self.time = 0
def log(self):
val = self.brain.predict_np(self.LOG_TRACKED_STATES)
for i in range(self.LEN):
w = val[i].data
for k in w:
self.files[i].write('%.4f ' % k)
self.files[i].write('\n')
self.files[i].flush()
def print_speed(self):
if self.time == 0:
self.time = time.perf_counter()
return
now = time.perf_counter()
elapsed = now - self.time
self.time = now
samples_processed = LOG_EPOCHS * EPOCH_STEPS * AGENTS
updates_processed = LOG_EPOCHS
updates_total = LOG_EPOCHS * BATCH_SIZE
fps_smpl = samples_processed / elapsed
fps_updt = updates_processed / elapsed
fps_updt_t = updates_total / elapsed
print("Perf.: {:.0f} smp/s, {:.1f} upd/s, {:.1f} upd_smp/s".format(fps_smpl, fps_updt, fps_updt_t))
def log_perf(self):
agent = PerfAgent(self.env, self.brain)
avg_r, avg_len, avg_corr = agent.run()
print("{:.3f} {:.3f} {:.3f}".format(avg_r, avg_len, avg_corr), file=self.perf_file, flush=True)
| 2.359375
| 2
|
src/pyrobot/kinect2/camera.py
|
wangcongrobot/pyrobot
| 0
|
12779790
|
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import rospkg
import threading
import yaml
from copy import deepcopy
import message_filters
import numpy as np
import pyrobot.utils.util as prutil
import rospy
from pyrobot.core import Camera
from sensor_msgs.msg import CameraInfo
from sensor_msgs.msg import Image
from std_msgs.msg import Float64
import sys
ros_path = '/opt/ros/kinetic/lib/python2.7/dist-packages'
if ros_path in sys.path:
sys.path.remove(ros_path)
import cv2
sys.path.append(ros_path)
from cv_bridge import CvBridge, CvBridgeError
class Kinect2Camera(Camera):
"""
This is camera class that interfaces with the KinectV2 camera
"""
def __init__(self, configs):
"""
Constructor of the KinectV2Camera class.
:param configs: Camera specific configuration object
:type configs: YACS CfgNode
"""
super(Kinect2Camera, self).__init__(configs=configs)
self.cv_bridge = CvBridge()
self.camera_info_lock = threading.RLock()
self.camera_img_lock = threading.RLock()
self.rgb_img = None
self.depth_img = None
self.camera_info = None
self.camera_P = None
rospy.Subscriber(self.configs.CAMERA.ROSTOPIC_CAMERA_INFO_STREAM,
CameraInfo,
self._camera_info_callback)
rgb_topic = self.configs.CAMERA.ROSTOPIC_CAMERA_RGB_STREAM
self.rgb_sub = message_filters.Subscriber(rgb_topic, Image)
depth_topic = self.configs.CAMERA.ROSTOPIC_CAMERA_DEPTH_STREAM
self.depth_sub = message_filters.Subscriber(depth_topic, Image)
img_subs = [self.rgb_sub, self.depth_sub]
self.sync = message_filters.ApproximateTimeSynchronizer(img_subs,
queue_size=10,
slop=0.2)
self.sync.registerCallback(self._sync_callback)
self.DepthMapFactor = float(self.configs.CAMERA.DEPTH_MAP_FACTOR)
self.intrinsic_mat = None
def _sync_callback(self, rgb, depth):
self.camera_img_lock.acquire()
try:
self.rgb_img = self.cv_bridge.imgmsg_to_cv2(rgb, "bgr8")
self.rgb_img = self.rgb_img[:, :, ::-1]
self.depth_img = self.cv_bridge.imgmsg_to_cv2(depth, "passthrough")
except CvBridgeError as e:
rospy.logerr(e)
self.camera_img_lock.release()
def _camera_info_callback(self, msg):
self.camera_info_lock.acquire()
self.camera_info = msg
self.camera_P = np.array(msg.P).reshape((3, 4))
self.camera_info_lock.release()
def get_rgb(self):
'''
This function returns the RGB image perceived by the camera.
:rtype: np.ndarray or None
'''
self.camera_img_lock.acquire()
rgb = deepcopy(self.rgb_img)
self.camera_img_lock.release()
return rgb
def get_depth(self):
'''
This function returns the depth image perceived by the camera.
:rtype: np.ndarray or None
'''
self.camera_img_lock.acquire()
depth = deepcopy(self.depth_img)
self.camera_img_lock.release()
return depth
def get_rgb_depth(self):
'''
This function returns both the RGB and depth
images perceived by the camera.
:rtype: np.ndarray or None
'''
self.camera_img_lock.acquire()
rgb = deepcopy(self.rgb_img)
depth = deepcopy(self.depth_img)
self.camera_img_lock.release()
return rgb, depth
def get_intrinsics(self):
"""
This function returns the camera intrinsics.
:rtype: np.ndarray
"""
if self.camera_P is None:
return self.camera_P
self.camera_info_lock.acquire()
P = deepcopy(self.camera_P)
self.camera_info_lock.release()
return P[:3, :3]
def get_current_pcd(self):
"""
Return the point cloud at current time step (one frame only)
:returns: tuple (pts, colors)
pts: point coordinates in camera frame (shape: :math:`[N, 3]`)
colors: rgb values for pts_in_cam (shape: :math:`[N, 3]`)
:rtype: tuple(np.ndarray, np.ndarray)
"""
rgb_im, depth_im = self.get_rgb_depth()
depth = depth_im.reshape(-1) / self.DepthMapFactor
rgb = rgb_im.reshape(-1, 3)
if self.intrinsic_mat is None:
self.intrinsic_mat = self.get_intrinsics()
self.intrinsic_mat_inv = np.linalg.inv(self.intrinsic_mat)
#TODO: image height --> rgb_im.shape[0] and width--> rgb_im.shape[1]
img_pixs = np.mgrid[0: rgb_im.shape[0]: 1,
0: rgb_im.shape[1]: 1]
img_pixs = img_pixs.reshape(2, -1)
img_pixs[[0, 1], :] = img_pixs[[1, 0], :]
self.uv_one = np.concatenate((img_pixs,
np.ones((1, img_pixs.shape[1]))))
self.uv_one_in_cam = np.dot(self.intrinsic_mat_inv, self.uv_one)
pts_in_cam = np.multiply(self.uv_one_in_cam, depth)
pts_in_cam = np.concatenate((pts_in_cam,
np.ones((1, pts_in_cam.shape[1]))),
axis=0)
pts = pts_in_cam[:3, :].T
return pts, rgb
def pix_to_3dpt(self, rs, cs, reduce = 'none', k=5):
"""
Get the 3D points of the pixels in RGB images.
:param rs: rows of interest in the RGB image.
It can be a list or 1D numpy array
which contains the row indices.
The default value is None,
which means all rows.
:param cs: columns of interest in the RGB image.
It can be a list or 1D numpy array
which contains the column indices.
The default value is None,
which means all columns.
:param reduce: whether to consider the depth at nearby pixels
'none': no neighbour consideration
'mean': depth based on the mean of kernel sized k centered at [rs,cs]
'max': depth based on the max of kernel sized k centered at [rs,cs]
'min': depth based on the min of kernel sized k centered at [rs,cs]
:param k: kernel size for reduce type['mean', 'max', 'min']
:type rs: list or np.ndarray
:type cs: list or np.ndarray
:type reduce: str
:tyep k: int
:returns: tuple (pts, colors)
pts: point coordinates in world frame
(shape: :math:`[N, 3]`)
colors: rgb values for pts_in_cam
(shape: :math:`[N, 3]`)
:rtype: tuple(np.ndarray, np.ndarray)
"""
assert isinstance(rs,
int) or isinstance(rs,
list) or isinstance(rs,
np.ndarray)
assert isinstance(cs,
int) or isinstance(cs,
list) or isinstance(cs,
np.ndarray)
if isinstance(rs, int):
rs = [rs]
if isinstance(cs, int):
cs = [cs]
if isinstance(rs, np.ndarray):
rs = rs.flatten()
if isinstance(cs, np.ndarray):
cs = cs.flatten()
rgb_im, depth_im = self.get_rgb_depth()
R,C,_ = rgb_im.shape
if reduce == 'none':
depth_im = depth_im[rs, cs]
elif reduce == 'mean':
depth_im = np.array([np.mean(depth_im[max(i-k,0):min(i+k,R), max(j-k,0):min(j+k,C)]) for i,j in zip(rs,cs)])
elif reduce == 'max':
depth_im = np.array([np.max(depth_im[max(i-k,0):min(i+k,R), max(j-k,0):min(j+k,C)]) for i,j in zip(rs,cs)])
elif reduce == 'min':
depth_im = np.array([np.min(depth_im[max(i-k,0):min(i+k,R), max(j-k,0):min(j+k,C)]) for i,j in zip(rs,cs)])
else:
raise ValueError('Invalid reduce name provided, only the following'
' are currently available: [{}, {}, {}, {}]'.format('none','mean', 'max', 'min'))
#depth_im = depth_im[rs, cs]
depth = depth_im.reshape(-1) / self.DepthMapFactor
img_pixs = np.stack((rs, cs)).reshape(2, -1)
img_pixs[[0, 1], :] = img_pixs[[1, 0], :]
uv_one = np.concatenate((img_pixs,
np.ones((1, img_pixs.shape[1]))))
if self.intrinsic_mat is None:
self.intrinsic_mat = self.get_intrinsics()
self.intrinsic_mat_inv = np.linalg.inv(self.intrinsic_mat)
uv_one_in_cam = np.dot(self.intrinsic_mat_inv, uv_one)
pts_in_cam = np.multiply(uv_one_in_cam, depth)
pts_in_cam = np.concatenate((pts_in_cam,
np.ones((1, pts_in_cam.shape[1]))),
axis=0)
pts = pts_in_cam[:3, :].T
colors = rgb_im[rs, cs].reshape(-1, 3)
return pts, colors
| 2.1875
| 2
|
models/pose/loss/pose_modules.py
|
raviv/torchcv
| 308
|
12779791
|
<reponame>raviv/torchcv
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# Author: <NAME>(<EMAIL>)
# Loss function for Pose Estimation.
import torch
import torch.nn as nn
from torch.autograd import Variable
class OPMseLoss(nn.Module):
def __init__(self, configer):
super(OPMseLoss, self).__init__()
self.configer = configer
reduction = 'elementwise_mean'
if self.configer.exists('loss', 'params') and 'mse_reduction' in self.configer.get('loss', 'params'):
reduction = self.configer.get('loss', 'params')['mse_reduction']
self.mse_loss = nn.MSELoss(reduction=reduction)
def forward(self, inputs, *targets, mask=None, weights=None):
loss = 0.0
if isinstance(inputs, list):
if weights is not None:
for i in range(len(inputs)):
if mask is not None:
loss += weights[i] * self.mse_loss(inputs[i]*mask, targets)
else:
loss += weights[i] * self.mse_loss(inputs[i], targets)
else:
for i in range(len(inputs)):
if mask is not None:
loss += self.mse_loss(inputs[i]*mask, targets)
else:
loss += self.mse_loss(inputs[i], targets)
else:
if mask is not None:
loss = self.mse_loss(inputs*mask, targets)
else:
loss = self.mse_loss(inputs, targets)
if self.configer.get('mse_loss', 'reduction') == 'sum':
loss = loss / targets.size(0)
return loss
class PartLoss(nn.Module):
def __init__(self, configer):
super(PartLoss, self).__init__()
self.configer = configer
self.mse_loss = nn.MSELoss(size_average=False)
def forward(self, inputs, targets, mask=None):
inputs = inputs.view(inputs.size(0), -1, 6, inputs.size(2), inputs.size(3))
targets = targets.view(targets.size(0), -1, 6, targets.size(2), targets.size(3))
paf_loss = self.mse_loss(inputs[:, :, 0:2, :, :], targets[:, :, 0:2, :, :])
part_loss = self.mse_loss(inputs[:, :, 2:6, :, :], targets[:, :, 2:6, :, :])
loss = paf_loss + part_loss * 6.0
loss = loss / targets.size(0)
return loss
class CapsuleLoss(nn.Module):
def __init__(self, configer):
super(CapsuleLoss, self).__init__()
self.configer = configer
self.mse_loss = nn.MSELoss(reduction=self.configer.get('capsule_loss', 'reduction'))
def forward(self, inputs, targets, masks=None, is_focal=False):
preds = torch.sqrt((inputs**2).sum(dim=1, keepdim=False))
if masks is not None:
preds = preds * masks
if is_focal:
loss = self.mse_loss(preds, targets)
else:
diff = preds - targets
diff = diff ** 2
alpha = 2.0
weights = targets * alpha
weights = torch.exp(weights)
diff = weights * diff
loss = diff.mean()
return loss
class EmbeddingLoss(nn.Module):
def __init__(self, configer):
super(EmbeddingLoss, self).__init__()
self.configer = configer
self.num_keypoints = self.configer.get('data', 'num_keypoints')
self.l_vec = self.configer.get('capsule', 'l_vec')
self.mse_loss = nn.MSELoss(size_average=False)
def forward(self, inputs, tags, numH, sigma=0.1):
batch_size = inputs.size(0)
h_tag_means = [[Variable(torch.zeros(self.l_vec,), requires_grad=True).cuda()
for h in range(numH[b].numpy()[0])] for b in range(inputs.size()[0])]
for b in range(batch_size):
for n in range(numH[b].numpy()[0]):
valik = 0
for k in range(self.num_keypoints):
tag = inputs[b].masked_select(tags[b][k].eq(n+1).unsqueeze(0))
if tag.size() != torch.Size([]):
h_tag_means[b][n] += tag
valik = valik + 1
h_tag_means[b][n] = h_tag_means[b][n] / max(valik, 1)
loss_list = list()
for b in range(batch_size):
for n in range(numH[b].numpy()[0]):
for k in range(self.num_keypoints):
tag = inputs[b].masked_select(tags[b][k].eq(n+1).unsqueeze(0))
if tag.size() != torch.Size([]):
loss_list.append(self.mse_loss(tag, h_tag_means[b][n]))
for b in range(batch_size):
for n1 in range(numH[b].numpy()[0]):
for n2 in range(numH[b].numpy()[0]):
if n1 != n2:
loss_same = torch.exp(-self.mse_loss(h_tag_means[b][n1], h_tag_means[b][n2]) / sigma / sigma)
loss_list.append(loss_same)
if len(loss_list) == 0:
loss = 0.0
else:
loss = loss_list[0]
for i in range(len(loss_list)-1):
loss += loss_list[i+1]
return loss
| 2.546875
| 3
|
Scripts/SeqGui/SeqGui.py
|
eoc21/biopython
| 3
|
12779792
|
<reponame>eoc21/biopython
import string
from Bio import Seq
from Bio import Alphabet
from Bio.Alphabet import IUPAC
from wxPython.wx import *
from Bio import Translate
from Bio import Transcribe
ID_APPLY = 101
ID_CLEAR = 102
ID_EXIT = 103
ID_CLOSE = 104
ID_ABOUT = 105
ID_CODON = 106
ID_TRANSFORM = 107
class ParamsPanel( wxPanel ):
def __init__(self, parent, log):
wxPanel.__init__(self, parent, -1)
codon_table_static = wxStaticText( self, -1, 'Codon Tables', \
style = wxALIGN_CENTRE )
lc = wxLayoutConstraints()
lc.top.SameAs( self, wxTop, 5 )
lc.left.SameAs( self, wxLeft, 5 )
lc.height.AsIs()
lc.right.SameAs( self, wxRight, 5 )
codon_table_static.SetConstraints( lc )
codon_table_lb = wxListBox( self, ID_CODON )
lc = wxLayoutConstraints()
lc.top.Below( codon_table_static, 5 )
lc.left.SameAs( self, wxLeft, 5 )
lc.height.PercentOf( self, wxHeight, 30 )
lc.right.SameAs( self, wxRight, 5 )
codon_table_lb.SetConstraints( lc )
self.codon_table_lb = codon_table_lb
codon_table_lb.Append( 'Standard' )
codon_table_lb.Append( 'Vertebrate Mitochondrial' )
codon_table_lb.Append( 'Yeast Mitochondrial' )
codon_table_lb.Append( 'Mold Mitochondrial' )
codon_table_lb.Append( 'Invertebrate Mitochondrial' )
codon_table_lb.Append( 'Echinoderm Mitochondrial' )
codon_table_lb.Append( 'Euplotid Nuclear' )
codon_table_lb.Append( 'Bacterial' )
codon_table_lb.Append( 'Alternative Yeast Nuclear' )
codon_table_lb.Append( 'Ascidian Mitochondrial' )
codon_table_lb.Append( 'Flatworm Mitochondrial' )
codon_table_lb.Append( 'Blepharisma Macronuclear' )
codon_table_lb.SetSelection( 0 )
transform_static = wxStaticText( self, -1, 'Transformation', \
style = wxALIGN_CENTRE )
lc = wxLayoutConstraints()
lc.top.Below( codon_table_lb, 5 )
lc.left.SameAs( self, wxLeft, 5 )
lc.height.AsIs()
lc.right.SameAs( self, wxRight, 5 )
transform_static.SetConstraints( lc )
transform_lb = wxListBox( self, ID_TRANSFORM )
lc = wxLayoutConstraints()
lc.top.Below( transform_static, 5 )
lc.left.SameAs( self, wxLeft, 5 )
lc.height.PercentOf( self, wxHeight, 30 )
lc.right.SameAs( self, wxRight, 5 )
transform_lb.SetConstraints( lc )
transform_lb.Append( 'Transcribe' )
transform_lb.Append( 'Translate' )
transform_lb.Append( 'Back translate' )
transform_lb.Append( 'Back transcribe' )
transform_lb.SetSelection( 1 )
self.transform_lb = transform_lb
class SeqPanel( wxPanel ):
def __init__(self, parent, log):
self.parent = parent
wxPanel.__init__(self, parent, -1)
apply_button = wxButton( self, ID_APPLY, "Apply" )
clear_button = wxButton( self, ID_CLEAR, "Clear" )
close_button = wxButton( self, ID_CLOSE, "Close" )
EVT_BUTTON( self, ID_CLOSE, self.OnClose )
EVT_BUTTON( self, ID_APPLY, self.OnApply )
EVT_BUTTON( self, ID_CLEAR, self.OnClear )
lc = wxLayoutConstraints()
lc.bottom.SameAs( self, wxBottom, 10 )
lc.left.SameAs( self, wxLeft, 10 )
lc.height.AsIs( )
lc.width.PercentOf( self, wxWidth, 25 )
apply_button.SetConstraints( lc )
lc = wxLayoutConstraints()
lc.bottom.SameAs( self, wxBottom, 10 )
lc.left.RightOf( apply_button, 5 )
lc.height.AsIs()
lc.width.PercentOf( self, wxWidth, 25 )
clear_button.SetConstraints( lc )
lc = wxLayoutConstraints()
lc.bottom.SameAs( self, wxBottom, 10 )
lc.left.RightOf( clear_button, 5 )
lc.height.AsIs()
lc.width.PercentOf( self, wxWidth, 25 )
close_button.SetConstraints( lc )
src_static = wxStaticText( self, -1, 'Original Sequence', \
style = wxALIGN_CENTRE )
lc = wxLayoutConstraints()
lc.top.SameAs( self, wxTop, 5 )
lc.left.SameAs( self, wxLeft, 5 )
lc.height.AsIs()
lc.right.SameAs( self, wxRight, 5 )
src_static.SetConstraints( lc )
src_text = wxTextCtrl( self, -1, '', style = wxTE_MULTILINE )
lc = wxLayoutConstraints()
lc.top.Below( src_static, 5 )
lc.left.SameAs( self, wxLeft, 5 )
lc.height.PercentOf( self, wxHeight, 30 )
lc.right.SameAs( self, wxRight, 5 )
src_text.SetConstraints( lc )
self.src_text = src_text
dest_static = wxStaticText( self, -1, 'Transformed Sequence', \
style = wxALIGN_CENTRE )
lc = wxLayoutConstraints()
lc.top.Below( src_text, 5 )
lc.left.SameAs( self, wxLeft, 5 )
lc.height.AsIs()
lc.right.SameAs( self, wxRight, 5 )
dest_static.SetConstraints( lc )
dest_text = wxTextCtrl( self, -1, '', style = wxTE_MULTILINE )
lc = wxLayoutConstraints()
lc.top.Below( dest_static, 5 )
lc.left.SameAs( self, wxLeft, 5 )
lc.height.PercentOf( self, wxHeight, 30 )
lc.right.SameAs( self, wxRight, 5 )
dest_text.SetConstraints( lc )
self.dest_text = dest_text
def OnClose( self, event ):
parent = self.GetParent()
parent.Destroy()
def OnApply( self, event ):
codon_table_lb = self.parent.params_panel.codon_table_lb
selection = codon_table_lb.GetStringSelection()
print selection
codon_table = selection[:]
transform_lb = self.parent.params_panel.transform_lb
selection = transform_lb.GetStringSelection()
transform = selection[:]
print transform
if( transform == 'Translate' ):
self.translate( codon_table )
elif( transform == 'Back translate' ):
self.back_translate( codon_table )
elif( transform == 'Transcribe' ):
self.transcribe()
elif( transform == 'Back transcribe' ):
self.back_transcribe()
def OnClear( self, event ):
self.src_text.Clear()
self.dest_text.Clear()
def translate( self, codon_table ):
trans = Translate.unambiguous_dna_by_name[ codon_table ]
text = self.src_text.GetValue()
seq = text[:]
seq = string.join( string.split( seq ) )
dna = Seq.Seq( seq, IUPAC.unambiguous_dna )
print dna
protein = trans.translate_to_stop( dna )
self.dest_text.Clear()
self.dest_text.SetValue( protein.tostring() )
def back_translate( self, codon_table ):
trans = Translate.unambiguous_dna_by_name[ codon_table ]
text = self.src_text.GetValue()
seq = text[:]
seq = string.join( string.split( seq ) )
protein = Seq.Seq( seq, IUPAC.unambiguous_dna )
print protein
dna = trans.back_translate( protein )
self.dest_text.Clear()
self.dest_text.SetValue( dna.tostring() )
def transcribe( self ):
trans = Transcribe.unambiguous_transcriber
text = self.src_text.GetValue()
seq = text[:]
seq = string.join( string.split( seq ) )
dna = Seq.Seq( seq, IUPAC.unambiguous_dna )
print dna
rna = trans.transcribe( dna )
self.dest_text.Clear()
self.dest_text.SetValue( rna.tostring() )
def back_transcribe( self ):
trans = Transcribe.unambiguous_transcriber
text = self.src_text.GetValue()
seq = text[:]
seq = string.join( string.split( seq ) )
rna = Seq.Seq( seq, IUPAC.unambiguous_rna )
print rna
dna = trans.back_transcribe( rna )
self.dest_text.Clear()
self.dest_text.SetValue( dna.tostring() )
class SeqFrame(wxFrame):
def __init__(self, parent, ID, title):
wxFrame.__init__(self, parent, ID, title,
wxDefaultPosition, wxSize(500, 400))
self.SetAutoLayout( true )
self.CreateStatusBar()
self.SetStatusText("This is the statusbar")
menu = wxMenu()
menu.Append(ID_ABOUT, "&About",
"More information about this program")
menu.AppendSeparator()
menu.Append(ID_EXIT, "E&xit", "Terminate the program")
menuBar = wxMenuBar()
menuBar.Append(menu, "&File");
self.SetMenuBar(menuBar)
params_panel = ParamsPanel(self, -1)
lc = wxLayoutConstraints()
lc.top.SameAs( self, wxTop, 10 )
lc.left.SameAs( self, wxLeft, 5 )
lc.bottom.SameAs( self, wxBottom, 5 )
lc.width.PercentOf( self, wxWidth, 40 )
params_panel.SetConstraints( lc )
seq_panel = SeqPanel(self, -1)
lc = wxLayoutConstraints()
lc.top.SameAs( self, wxTop, 10 )
lc.left.RightOf( params_panel, 5 )
lc.bottom.SameAs( self, wxBottom, 5 )
lc.right.SameAs( self, wxRight )
seq_panel.SetConstraints( lc )
self.seq_panel = seq_panel
self.params_panel = params_panel
EVT_MENU( self, ID_EXIT, self.exit )
def exit( self, event ):
self.Close( true )
class MyApp(wxApp):
def OnInit(self):
frame = SeqFrame(NULL, -1, "Greetings from biopython")
frame.Show(true)
self.SetTopWindow(frame)
return true
app = MyApp(0)
app.MainLoop()
| 2.21875
| 2
|
stackvm/devices/stdio.py
|
Dentosal/StackVM
| 1
|
12779793
|
from ..device import Device
from ..byteutil import *
class StdioDevice(Device):
VERSION = "1.0.0"
def write(self, pop_fn):
count = pop_fn()
data = bytes(pop_fn() for _ in range(count)).decode("utf-8")
print(data, end="")
def read(self, push_fn):
data = input().encode("utf-8")
cells = str_to_u32unicode(data.decode("utf-8"))
for v in reversed(cells):
push_fn(int_from_bytes(v))
push_fn(len(cells))
| 2.78125
| 3
|
renconstruct/tasks/clean.py
|
devorbitus/renconstruct
| 0
|
12779794
|
<reponame>devorbitus/renconstruct
### System ###
import os
from glob import glob
from subprocess import run
### Logging ###
from renconstruct import logger
class CleanTask:
# The higher priority, the earlier the task runs
# This is relative to all other enabled tasks
PRIORITY = -1000
def __init__(self, name, config):
self.name = name
self.config = config
def post_build(self):
run(
"renutil clean {}".format(self.config["renutil"]["version"]),
capture_output=True,
shell=True,
)
unused_apks = [
item
for item in glob(os.path.join(self.config["output"], "*.apk"))
if not item.endswith("-universal-release.apk")
]
for file in unused_apks:
logger.debug("Removing file '{}'".format(os.path.basename(file)))
os.remove(file)
| 2.28125
| 2
|
pyabc/visualization/sample.py
|
Pat-Laub/pyABC
| 0
|
12779795
|
import matplotlib.pyplot as plt
import numpy as np
from typing import List, Union
from ..storage import History
from .util import to_lists_or_default
def plot_sample_numbers(
histories: Union[List, History],
labels: Union[List, str] = None,
rotation: int = 0,
title: str = "Total required samples",
size: tuple = None):
"""
Plot required numbers of samples over all iterations.
Parameters
----------
histories: Union[List, History]
The histories to plot from. History ids must be set correctly.
labels: Union[List ,str], optional
Labels corresponding to the histories. If None are provided,
indices are used as labels.
rotation: int, optional (default = 0)
Rotation to apply to the plot's x tick labels. For longer labels,
a tilting of 45 or even 90 can be preferable.
title: str, optional (default = "Total required samples")
Title for the plot.
size: tuple of float, optional
The size of the plot in inches.
Returns
-------
ax: Axis of the generated plot.
"""
# preprocess input
histories, labels = to_lists_or_default(histories, labels)
# create figure
fig, ax = plt.subplots()
n_run = len(histories)
# extract sample numbers
samples = []
for history in histories:
# note: the first entry corresponds to the calibration and should
# be included here to be fair against methods not requiring
# calibration
samples.append(np.array(history.get_all_populations()['samples']))
# create matrix
n_pop = max(len(sample) for sample in samples)
matrix = np.zeros((n_pop, n_run))
for i_sample, sample in enumerate(samples):
matrix[:len(sample), i_sample] = sample
# plot bars
for i_pop in range(n_pop):
ax.bar(x=np.arange(n_run),
height=matrix[i_pop, :],
bottom=np.sum(matrix[:i_pop, :], axis=0))
# add labels
ax.set_xticks(np.arange(n_run))
ax.set_xticklabels(labels, rotation=rotation)
ax.set_title(title)
ax.set_ylabel("Samples")
ax.set_xlabel("Run")
# set size
if size is not None:
fig.set_size_inches(size)
fig.tight_layout()
return ax
| 3.140625
| 3
|
adclassifier/feature_selection.py
|
BoudhayanBanerjee/political-ad-classifier
| 2
|
12779796
|
<reponame>BoudhayanBanerjee/political-ad-classifier
from sklearn.feature_selection import VarianceThreshold
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2
from sklearn.feature_selection import mutual_info_classif
from sklearn.feature_selection import RFE
from sklearn.feature_selection import SelectFromModel
# TODO
| 1.851563
| 2
|
mojo/devtools/common/android_gdb/session.py
|
zbowling/mojo
| 1
|
12779797
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Manages a debugging session with GDB.
This module is meant to be imported from inside GDB. Once loaded, the
|DebugSession| attaches GDB to a running Mojo Shell process on an Android
device using a remote gdbserver.
At startup and each time the execution stops, |DebugSession| associates
debugging symbols for every frame. For more information, see |DebugSession|
documentation.
"""
import gdb
import glob
import itertools
import logging
import os
import os.path
import shutil
import subprocess
import sys
import tempfile
import traceback
import urllib2
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import android_gdb.config as config
from android_gdb.remote_file_connection import RemoteFileConnection
from android_gdb.signatures import get_signature
logging.getLogger().setLevel(logging.INFO)
def _gdb_execute(command):
"""Executes a GDB command."""
return gdb.execute(command, to_string=True)
class Mapping(object):
"""Represents a mapped memory region."""
def __init__(self, line):
self.start = int(line[0], 16)
self.end = int(line[1], 16)
self.size = int(line[2], 16)
self.offset = int(line[3], 16)
self.filename = line[4]
def _get_mapped_files():
"""Retrieves all the files mapped into the debugged process memory.
Returns:
List of mapped memory regions grouped by files.
"""
# info proc map returns a space-separated table with the following fields:
# start address, end address, size, offset, file path.
mappings = [Mapping(x) for x in
[x.split() for x in
_gdb_execute("info proc map").split('\n')]
if len(x) == 5 and x[4][0] == '/']
res = {}
for m in mappings:
libname = m.filename[m.filename.rfind('/') + 1:]
res[libname] = res.get(libname, []) + [m]
return res.values()
class DebugSession(object):
def __init__(self, build_directory_list, package_name, pyelftools_dir, adb):
build_directories = build_directory_list.split(',')
if len(build_directories) == 0 or not all(map(os.path.exists,
build_directories)):
logging.fatal("Please pass a list of valid build directories")
sys.exit(1)
self._package_name = package_name
self._adb = adb
self._remote_file_cache = os.path.join(os.getenv('HOME'), '.mojosymbols')
if pyelftools_dir != None:
sys.path.append(pyelftools_dir)
try:
import elftools.elf.elffile as elffile
except ImportError:
logging.fatal("Unable to find elftools module; please install pyelftools "
"and specify its path on the command line using "
"--pyelftools-dir.")
sys.exit(1)
self._elffile_module = elffile
self._libraries = self._find_libraries(build_directories)
self._rfc = RemoteFileConnection('localhost', 10000)
self._remote_file_reader_process = None
if not os.path.exists(self._remote_file_cache):
os.makedirs(self._remote_file_cache)
self._done_mapping = set()
self._downloaded_files = []
def __del__(self):
# Note that, per python interpreter documentation, __del__ is not
# guaranteed to be called when the interpreter (GDB, in our case) quits.
# Also, most (all?) globals are no longer available at this time (launching
# a subprocess does not work).
self.stop()
def stop(self, _unused_return_value=None):
if self._remote_file_reader_process != None:
self._remote_file_reader_process.kill()
def _find_libraries(self, lib_dirs):
"""Finds all libraries in |lib_dirs| and key them by their signatures.
"""
res = {}
for lib_dir in lib_dirs:
for fn in glob.glob('%s/*.so' % lib_dir):
with open(fn, 'r') as f:
s = get_signature(f, self._elffile_module)
if s is not None:
res[s] = fn
return res
def _associate_symbols(self, mapping, local_file):
with open(local_file, "r") as f:
elf = self._elffile_module.ELFFile(f)
s = elf.get_section_by_name(".text")
text_address = mapping[0].start + s['sh_offset']
_gdb_execute("add-symbol-file %s 0x%x" % (local_file, text_address))
def _download_file(self, signature, remote):
"""Downloads a remote file either from the cloud or through GDB connection.
Returns:
The filename of the downloaded file
"""
temp_file = tempfile.NamedTemporaryFile()
logging.info("Trying to download symbols from the cloud.")
symbols_url = "http://storage.googleapis.com/mojo/symbols/%s" % signature
try:
symbol_file = urllib2.urlopen(symbols_url)
try:
with open(temp_file.name, "w") as dst:
shutil.copyfileobj(symbol_file, dst)
logging.info("Getting symbols for %s at %s." % (remote, symbols_url))
# This allows the deletion of temporary files on disk when the
# debugging session terminates.
self._downloaded_files.append(temp_file)
return temp_file.name
finally:
symbol_file.close()
except urllib2.HTTPError:
pass
logging.info("Downloading file %s" % remote)
_gdb_execute("remote get %s %s" % (remote, temp_file.name))
# This allows the deletion of temporary files on disk when the debugging
# session terminates.
self._downloaded_files.append(temp_file)
return temp_file.name
def _find_mapping_for_address(self, mappings, address):
"""Returns the list of all mappings of the file occupying the |address|
memory address.
"""
for file_mappings in mappings:
for mapping in file_mappings:
if address >= mapping.start and address <= mapping.end:
return file_mappings
return None
def _try_to_map(self, mapping):
remote_file = mapping[0].filename
if remote_file in self._done_mapping:
return False
self._done_mapping.add(remote_file)
self._rfc.open(remote_file)
signature = get_signature(self._rfc, self._elffile_module)
if signature is not None:
if signature in self._libraries:
self._associate_symbols(mapping, self._libraries[signature])
else:
# This library file is not known locally. Download it from the device or
# the cloud and put it in cache so, if it got symbols, we can see them.
local_file = os.path.join(self._remote_file_cache, signature)
if not os.path.exists(local_file):
tmp_output = self._download_file(signature, remote_file)
shutil.move(tmp_output, local_file)
self._associate_symbols(mapping, local_file)
return True
return False
def _map_symbols_on_current_thread(self, mapped_files):
"""Updates the symbols for the current thread using files from mapped_files.
"""
frame = gdb.newest_frame()
while frame and frame.is_valid():
if frame.name() is None:
m = self._find_mapping_for_address(mapped_files, frame.pc())
if m is not None and self._try_to_map(m):
# Force gdb to recompute its frames.
_gdb_execute("info threads")
frame = gdb.newest_frame()
assert frame.is_valid()
if (frame.older() is not None and
frame.older().is_valid() and
frame.older().pc() != frame.pc()):
frame = frame.older()
else:
frame = None
def update_symbols(self, current_thread_only):
"""Updates the mapping between symbols as seen from GDB and local library
files.
If current_thread_only is True, only update symbols for the current thread.
"""
logging.info("Updating symbols")
mapped_files = _get_mapped_files()
# Map all symbols from native libraries packages with the APK.
for file_mappings in mapped_files:
filename = file_mappings[0].filename
if ((filename.startswith('/data/data/') or
filename.startswith('/data/app')) and
not filename.endswith('.apk') and
not filename.endswith('.dex')):
logging.info('Pre-mapping: %s' % file_mappings[0].filename)
self._try_to_map(file_mappings)
if current_thread_only:
self._map_symbols_on_current_thread(mapped_files)
else:
logging.info('Updating all threads\' symbols')
current_thread = gdb.selected_thread()
nb_threads = len(_gdb_execute("info threads").split("\n")) - 2
for i in xrange(nb_threads):
try:
_gdb_execute("thread %d" % (i + 1))
self._map_symbols_on_current_thread(mapped_files)
except gdb.error:
traceback.print_exc()
current_thread.switch()
def _get_device_application_pid(self, application):
"""Gets the PID of an application running on a device."""
output = subprocess.check_output([self._adb, 'shell', 'ps'])
for line in output.split('\n'):
elements = line.split()
if len(elements) > 0 and elements[-1] == application:
return elements[1]
return None
def start(self):
"""Starts a debugging session."""
gdbserver_pid = self._get_device_application_pid('gdbserver')
if gdbserver_pid is not None:
subprocess.check_call([self._adb, 'shell', 'kill', gdbserver_pid])
shell_pid = self._get_device_application_pid(self._package_name)
if shell_pid is None:
raise Exception('Unable to find a running mojo shell.')
subprocess.check_call([self._adb, 'forward', 'tcp:9999', 'tcp:9999'])
subprocess.Popen(
[self._adb, 'shell', 'gdbserver', '--attach', ':9999', shell_pid],
# os.setpgrp ensures signals passed to this file (such as SIGINT) are
# not propagated to child processes.
preexec_fn = os.setpgrp)
# Kill stray remote reader processes. See __del__ comment for more info.
remote_file_reader_pid = self._get_device_application_pid(
config.REMOTE_FILE_READER_DEVICE_PATH)
if remote_file_reader_pid is not None:
subprocess.check_call([self._adb, 'shell', 'kill',
remote_file_reader_pid])
self._remote_file_reader_process = subprocess.Popen(
[self._adb, 'shell', config.REMOTE_FILE_READER_DEVICE_PATH],
stdout=subprocess.PIPE, preexec_fn = os.setpgrp)
port = int(self._remote_file_reader_process.stdout.readline())
subprocess.check_call([self._adb, 'forward', 'tcp:10000', 'tcp:%d' % port])
self._rfc.connect()
_gdb_execute('target remote localhost:9999')
self.update_symbols(current_thread_only=False)
def on_stop(_):
self.update_symbols(current_thread_only=True)
gdb.events.stop.connect(on_stop)
gdb.events.exited.connect(self.stop)
# Register the update-symbols command.
UpdateSymbols(self)
class UpdateSymbols(gdb.Command):
"""Command to update symbols loaded into GDB.
GDB usage: update-symbols [all|current]
"""
_UPDATE_COMMAND = "update-symbols"
def __init__(self, session):
super(UpdateSymbols, self).__init__(self._UPDATE_COMMAND, gdb.COMMAND_STACK)
self._session = session
def invoke(self, arg, _unused_from_tty):
if arg == 'current':
self._session.update_symbols(current_thread_only=True)
else:
self._session.update_symbols(current_thread_only=False)
def complete(self, text, _unused_word):
if text == self._UPDATE_COMMAND:
return ('all', 'current')
elif text in self._UPDATE_COMMAND + ' all':
return ['all']
elif text in self._UPDATE_COMMAND + ' current':
return ['current']
else:
return []
| 2.015625
| 2
|
python/clock.py
|
h-nari/NetLCD
| 0
|
12779798
|
<filename>python/clock.py
import time
import netLcd
from sys import argv
from PIL import Image
from datetime import datetime
usage = '%s ip_addr'
if len(argv) < 2:
print(usage % argv[0])
exit(1)
nd = netLcd.NetLcd(argv[1])
nd.clear(obscure=1)
day0 = sec0 = -1
while(1):
now = datetime.now();
if day0 != now.day:
s = now.strftime("%Y/%m/%d(%a)")
nd.text(s, color='#8080ff',tx=20,ty=80,text_size=3)
day0 = now.day
if sec0 != now.second:
s = now.strftime("%H:%M:%S")
nd.text(s, color='green', tx=10, ty=120, text_size=6)
sec0 = now.second
time.sleep(0.1)
| 3.09375
| 3
|
lab2_part2.py
|
mariac-molina/KR
| 0
|
12779799
|
from agents import *
class Dirt(Thing):
pass
vac = VacuumEnvironment()
d1 = Dirt()
d2 = Dirt()
d3 = Dirt()
vac.add_thing(d1, [0,0])
vac.add_thing(d2, [0,1])
vac.add_thing(d3, [0,2])
| 2.046875
| 2
|
bot.py
|
0ceanlight/mcbeDiscordBot
| 5
|
12779800
|
<filename>bot.py<gh_stars>1-10
import datetime
import json
import logging
import aiohttp
import discord
from discord.ext import commands
extensions = [
"cogs.utils",
"cogs.admin",
"cogs.src",
# "cogs.trans",
"cogs.player",
"cogs.general",
# "cogs.webserver",
# "cogs.twitter",
"cogs.logs",
"cogs.errorhandler",
]
def get_prefix(bot, message):
"""A callable Prefix for our bot. This could be edited to allow per server prefixes."""
prefixes = ["steve ", "STEVE ", "/", "!", "@", "Steve "]
# Check to see if we are outside of a guild. e.g DM's etc.
# if not message.guild:
# Only allow ? to be used in DMs
# return '?'
# If we are in a guild, we allow for the user to mention us or use any of the prefixes in our list.
return commands.when_mentioned_or(*prefixes)(bot, message)
class BedrockBot(commands.Bot):
def __init__(self):
super().__init__(
command_prefix=get_prefix,
case_insensitive=True,
allowed_mentions=discord.AllowedMentions(
everyone=False, users=True, roles=False
),
intents=discord.Intents.all(),
)
self.logger = logging.getLogger("discord")
self.messageBlacklist = []
self.session = aiohttp.ClientSession()
with open("custom_commands.json", "r") as f:
self.custom_commands = json.load(f)
with open("config.json", "r") as f:
self.config = json.load(f)
config = self.config
async def on_ready(self):
self.uptime = datetime.datetime.utcnow()
game = discord.Game("Mining away")
await self.change_presence(activity=game)
with open("blacklist.json", "r") as f:
try:
self.blacklist = json.load(f)
except json.decoder.JSONDecodeError:
self.blacklist = []
with open("runs_blacklist.json", "r") as f:
try:
self.runs_blacklist = json.load(f)
except json.decoder.JSONDecodeError:
self.runs_blacklist = {"videos": [], "players": []}
for extension in extensions:
self.load_extension(extension)
self.logger.warning(
f"Online: {self.user} (ID: {self.user.id}) (Discord.py: {discord.__version__})"
)
async def on_message(self, message):
if message.author.bot or message.author.id in self.blacklist:
return
await self.process_commands(message)
try:
command = message.content.split()[0]
except IndexError:
pass
try:
if command in self.custom_commands:
await message.channel.send(self.custom_commands[command])
return
except:
return
def run(self):
super().run(self.config["token"], reconnect=True)
| 2.734375
| 3
|
krcg/config.py
|
smeea/krcg
| 6
|
12779801
|
"""Configuration"""
#: static KRCG server
KRCG_STATIC_SERVER = "https://static.krcg.org"
SUPPORTED_LANGUAGES = ["fr", "es"]
VEKN_TWDA_URL = "http://www.vekn.fr/decks/twd.htm"
#: aliases to match players abbreviations and typos in the TWDA
ALIASES = {
# parsing will consider the legacy double dash "--" as a comment mark
"bang nakh": "Bang Nakh — Tiger's Claws",
# common HTML decoding failures
"alia, god=92s messenger": "Alia, God's Messenger",
"pentex=99 subversion": "Pentex™ Subversion",
# traditions
"the first tradition": "First Tradition: The Masquerade",
"first tradition": "First Tradition: The Masquerade",
"1st tradition": "First Tradition: The Masquerade",
"the second tradition": "Second Tradition: Domain",
"second tradition": "Second Tradition: Domain",
"2nd trad": "Second Tradition: Domain",
"2nd tradition": "Second Tradition: Domain",
"2nd tradition: domain": "Second Tradition: Domain",
"the third tradition": "Third Tradition: Progeny",
"third tradition": "Third Tradition: Progeny",
"3rd tradition": "Third Tradition: Progeny",
"the fourth tradition": "Fourth Tradition: The Accounting",
"fourth tradition": "Fourth Tradition: The Accounting",
"4th tradition": "Fourth Tradition: The Accounting",
"4th tradition: accounting": "Fourth Tradition: The Accounting",
"the fifth tradition": "Fifth Tradition: Hospitality",
"fifth tradition": "Fifth Tradition: Hospitality",
"5th tradition": "Fifth Tradition: Hospitality",
"the sixth tradition": "Sixth Tradition: Destruction",
"sixth tradition": "Sixth Tradition: Destruction",
"6th tradition": "Sixth Tradition: Destruction",
# hunting grounds
"academic hg": "Academic Hunting Ground",
"amusement park hg": "Amusement Park Hunting Ground",
"asylum hg": "Asylum Hunting Ground",
"base hg": "Base Hunting Ground",
"campground hg": "Campground Hunting Ground",
"corporate hg": "Corporate Hunting Ground",
"fetish club hg": "Fetish Club Hunting Ground",
"fetish hg": "Fetish Club Hunting Ground",
"institution hg": "Institution Hunting Ground",
"jungle hg": "Jungle Hunting Ground",
"library hg": "Library Hunting Ground",
"morgue hg": "Morgue Hunting Ground",
"palace hg": "Palace Hunting Ground",
"park hg": "Park Hunting Ground",
"poacher's hg": "Poacher's Hunting Ground",
"political hg": "Political Hunting Ground",
"port hg": "Port Hunting Ground",
"shanty Town hg": "Shanty Town Hunting Ground",
"slum hg": "Slum Hunting Ground",
"society hg": "Society Hunting Ground",
"temple hg": "Temple Hunting Ground",
"underworld hg": "Underworld Hunting Ground",
"university hg": "University Hunting Ground",
"uptown hg": "Uptown Hunting Ground",
"warzone hg": "Warzone Hunting Ground",
"zoo hg": "Zoo Hunting Ground",
"academic h.g.": "Academic Hunting Ground",
"amusement park h.g.": "Amusement Park Hunting Ground",
"asylum h.g.": "Asylum Hunting Ground",
"base h.g.": "Base Hunting Ground",
"campground h.g.": "Campground Hunting Ground",
"corporate h.g.": "Corporate Hunting Ground",
"fetish club h.g.": "Fetish Club Hunting Ground",
"institution h.g.": "Institution Hunting Ground",
"jungle h.g.": "Jungle Hunting Ground",
"library h.g.": "Library Hunting Ground",
"morgue h.g.": "Morgue Hunting Ground",
"palace h.g.": "Palace Hunting Ground",
"park h.g.": "Park Hunting Ground",
"poacher's h.g.": "Poacher's Hunting Ground",
"political h.g.": "Political Hunting Ground",
"port h.g.": "Port Hunting Ground",
"shanty Town h.g.": "Shanty Town Hunting Ground",
"slum h.g.": "Slum Hunting Ground",
"society h.g.": "Society Hunting Ground",
"temple h.g.": "Temple Hunting Ground",
"underworld h.g.": "Underworld Hunting Ground",
"university h.g.": "University Hunting Ground",
"uptown h.g.": "Uptown Hunting Ground",
"warzone h.g.": "Warzone Hunting Ground",
"zoo h.g.": "Zoo Hunting Ground",
"acad. hg": "Academic Hunting Ground",
"univ. hg": "University Hunting Ground",
"univ hg": "University Hunting Ground",
# powerbases
"pb: barranquilla": "Powerbase: Barranquilla",
"pb: berlin": "Powerbase: Berlin",
"pb: cape verde": "Powerbase: Cape Verde",
"pb: chicago": "Powerbase: Chicago",
"pb: los angeles": "Powerbase: Los Angeles",
"pb: luanda": "Powerbase: Luanda",
"pb: madrid": "Powerbase: Madrid",
"pb:madrid": "Powerbase: Madrid",
"pb: mexico city": "Powerbase: Mexico City",
"pb: montreal": "Powerbase: Montreal",
"pb:montreal": "Powerbase: Montreal",
"pb: new york": "Powerbase: New York",
"pb: rome": "Powerbase: Rome",
"pb: savannah": "Powerbase: Savannah",
"pb: tshwane": "Powerbase: Tshwane",
"pb: washington, d.c.": "Powerbase: Washington, D.C.",
"pb: zurich": "Powerbase: Zürich",
"pb barranquilla": "Powerbase: Barranquilla",
"pb berlin": "Powerbase: Berlin",
"pb cape verde": "Powerbase: Cape Verde",
"pb chicago": "Powerbase: Chicago",
"pb los angeles": "Powerbase: Los Angeles",
"pb luanda": "Powerbase: Luanda",
"pb madrid": "Powerbase: Madrid",
"pb mexico city": "Powerbase: Mexico City",
"pb montreal": "Powerbase: Montreal",
"pb new york": "Powerbase: New York",
"pb rome": "Powerbase: Rome",
"pb savannah": "Powerbase: Savannah",
"pb tshwane": "Powerbase: Tshwane",
"pb washington, d.c.": "Powerbase: Washington, D.C.",
"pb zurich": "Powerbase: Zürich",
"powerbase zurich": "Powerbase: Zürich",
# punctuation
"behind you": "Behind You!",
"psyche": "Psyche!",
# too short to be registered as alternate name automatically
"call": "Call, The",
"coven": "Coven, The",
"rack": "Rack, The",
"talaq": "Talaq, The Immortal",
# known abbreviations
"antediluvian a.": "Antediluvian Awakening",
"anthelios, the": "Anthelios, The Red Star",
"archon inv.": "Archon Investigation",
"carlton": "<NAME>",
"carver's meat packing": "Carver's Meat Packing and Storage",
"con ag": "Conservative Agitation",
"direct": "Direct Intervention",
"delaying": "Delaying Tactics",
"dreams": "Dreams of the Sphinx",
"dreams of the s.": "Dreams of the Sphinx",
"effective": "Effective Management",
"elysium": "Elysium: The Arboretum",
"elysium: versailles": "Elysium: The Palace of Versailles",
"entice": "Enticement",
"felix fix": 'Felix "Fix" Hessian (Wraith)',
"forced": "Forced Awakening",
"forced aw": "Forced Awakening",
"foreshadowing": "Foreshadowing Destruction",
"golconda": "Golconda: Inner Peace",
"govern": "Govern the Unaligned",
"gtu": "Govern the Unaligned",
"heidelberg": "Heidelberg Castle, Germany",
"heidelburg": "Heidelberg Castle, Germany",
"info highway": "Information Highway",
"infohighway": "Information Highway",
"info hwy": "Information Highway",
"js simmons": "J. S. Simmons, Esq.",
"js simmons esq": "J. S. Simmons, Esq.",
"krc": "Kine Resources Contested",
"krcg": "KRCG News Radio",
"krcg news": "KRCG News Radio",
"laptop": "Laptop Computer",
"laptops": "Laptop Computer",
"laptop comp": "Laptop Computer",
"laptop comp.": "Laptop Computer",
"legal manip": "Legal Manipulations",
"london eve star": "London Evening Star, Tabloid Newspaper",
"london evening star, tabloid": "London Evening Star, Tabloid Newspaper",
"malk. dementia": "Malkavian Dementia",
"masquer": "Masquer (Wraith)",
"mister winthrop": "Mr. Winthrop",
"molotov": "Molotov Cocktail",
"ohoyo hopoksia": "Ohoyo Hopoksia (Bastet)",
"owl": "Owl Companion",
"patagia: flaps": "Patagia: Flaps Allowing Limited Flight",
"pentex": "Pentex(TM) Subversion",
"pto": "Protect Thine Own",
"pulse": "Pulse of the Canaille",
"storage": "Storage Annex",
"sudden": "Sudden Reversal",
"telepathic misdir.": "Telepathic Misdirection",
"true love's kiss": "True Love's Face",
"temptation of g.p.": "Temptation of Greater Power",
"ventrue hq": "Ventrue Headquarters",
"voter cap": "Voter Captivation",
"wake with ef": "Wake with Evening's Freshness",
"wake with e.f": "Wake with Evening's Freshness",
"wake wef": "Wake with Evening's Freshness",
"wwef": "Wake with Evening's Freshness",
"wake...": "Wake with Evening's Freshness",
"wake w/eve. freshness": "Wake with Evening's Freshness",
"wake w/evening...": "Wake with Evening's Freshness",
"wake": "Wake with Evening's Freshness",
"wakes": "Wake with Evening's Freshness",
"wakeys": "Wake with Evening's Freshness",
"waste man op": "Waste Management Operation",
"wmrh": "WMRH Talk Radio",
"wwstick": "Weighted Walking Stick",
# misspellings not fixed by difflib
"2th tradition": "Second Tradition: Domain",
"ancient influnse": "Ancient Influence",
"blodd doll": "Blood Doll",
"carver's meat packing plant": "Carver's Meat Packing and Storage",
"deflekion": "deflection",
"denys": "Deny",
"divine intervention": "Direct Intervention",
"dogde": "Dodge",
"dominat skill": "Dominate",
"dominate: skillcard": "Dominate",
"eagle sight": "Eagle's Sight",
"sprit touch": "Spirit's Touch",
"guard dog": "Guard Dogs",
"golgonda": "Golconda: Inner Peace",
"info superhighway": "Information Highway",
"j.<NAME>": "<NAME>, Esq.",
"judgement": "Judgment: Camarilla Segregation",
"krcg newspaper": "KRCG News Radio",
"krcg radio station": "KRCG News Radio",
"lost in the crowd": "Lost in Crowds",
"lost n the crowds": "Lost in Crowds",
"milicent smith: vampire hunter": "Millic<NAME>, Puritan Vampire Hunter",
"obfuscate skill": "Obfuscate",
"ps: istanbul": "Praxis Seizure: Istanbul",
"rejuvenation": "Rejuvenate",
"rumour mill tabloid": "The Rumor Mill, Tabloid Newspaper",
"rumor mill, the": "The Rumor Mill, Tabloid Newspaper",
"soul gems": "Soul Gem of Etrius",
"tomb of rameses the cheesemonger": "Tomb of Rameses III",
"truth of a 1000 lies": "Truth of a Thousand Lies",
"veil of legions": "Veil the Legions",
# 🥚
"enkil cock": "enkil cog",
"parity shit": "Parity Shift",
"heart of cheating": "Heart of Nizchetus",
}
#: type order for deck display
TYPE_ORDER = [
"Master",
"Conviction",
"Action",
"Action/Combat",
"Action/Reaction",
"Ally",
"Equipment",
"Political Action",
"Retainer",
"Power",
"Action Modifier",
"Action Modifier/Combat",
"Action Modifier/Reaction",
"Reaction",
"Combat",
"Combat/Reaction",
"Event",
]
#: some decks in the TWDA do not respect the rules when it comes to deck size
TWDA_CHECK_DECK_FAILS = {
"2k9avangarda", # 59 cards listed
"2k8glqmich", # 91 cards listed
"2k8sanfranqual", # 59 cards listed
"2k8pwbsla2", # 91 cards listed
"2k6faceaface", # 91 cards listed
"2k4virolaxboston", # 91 cards listed
"2k4edith", # 91 cards listed
"2k4pariscup", # 11 crypt cards listed
"2k3nycanarch", # 91 cards listed
"ckgc2k", # 91 cards listed
"saveface2k1", # 91 cards listed
"genconuk2k1-treasure", # 91 cards listed
"jd32000", # 91 cards listed
"dog", # 100 cards listed
"matt-alamut", # 91 cards listed
"stevewampler", # 59 cards listed
}
| 1.71875
| 2
|
setup.py
|
sergief/norma43parser
| 0
|
12779802
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="norma43parser",
version="1.1.2",
license="MIT",
author="<NAME>",
author_email="<EMAIL>",
description="Parser for Bank Account information files formatted in Norma 43",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/sergief/norma43parser",
packages=setuptools.find_packages(),
keywords=["norma43", "parser", "bank", "account", "n43", "csb"],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
python_requires=">=3.6",
)
| 1.515625
| 2
|
tests/integration/call_run_within_script_with_autokeras_test.py
|
bhack/cloud
| 0
|
12779803
|
"""
Search for a good model for the
[MNIST](https://keras.io/datasets/#mnist-database-of-handwritten-digits) dataset.
"""
import argparse
import os
import autokeras as ak
import tensorflow_cloud as tfc
from tensorflow.keras.datasets import mnist
parser = argparse.ArgumentParser(description="Model save path arguments.")
parser.add_argument("--path", required=True, type=str, help="Keras model save path")
args = parser.parse_args()
tfc.run(
chief_config=tfc.COMMON_MACHINE_CONFIGS["V100_1X"],
docker_base_image="haifengjin/autokeras:1.0.3",
)
# Prepare the dataset.
(x_train, y_train), (x_test, y_test) = mnist.load_data()
print(x_train.shape) # (60000, 28, 28)
print(y_train.shape) # (60000,)
print(y_train[:3]) # array([7, 2, 1], dtype=uint8)
# Initialize the ImageClassifier.
clf = ak.ImageClassifier(max_trials=2)
# Search for the best model.
clf.fit(x_train, y_train, epochs=10)
# Evaluate on the testing data.
print("Accuracy: {accuracy}".format(accuracy=clf.evaluate(x_test, y_test)[1]))
clf.export_model().save(os.path.join(args.path, "model.h5"))
| 2.8125
| 3
|
resourses/courses_resource.py
|
maxazure/papers
| 0
|
12779804
|
from flask_restful import Resource, reqparse, request
from flask_restful import fields, marshal_with, marshal
from sqlalchemy.exc import IntegrityError
from sqlalchemy import or_, and_, text
from flask_jwt_extended import jwt_required
from models.course import Course
from app import db
from utils.util import max_res
from helpers.courses_resource_helper import *
class CoursesResource(Resource):
@jwt_required
def get(self, course_id=None):
if course_id:
course = Course.find_by_id(course_id)
return max_res(marshal(course, course_fields))
else:
conditions = []
args = course_query_parser.parse_args()
page = args['page']
per_page = args['pagesize']
if args['orderby'] not in sortable_fields:
return max_res('', code=500, errmsg='排序非法字段')
sort = args['orderby']
if args['desc']>0:
sort = args['orderby'] + ' desc'
conditions = make_conditions(conditions,args)
# 在这里添加更多的 条件查询 例如
# if args['name'] is not None:
# conditions.append(Course.name.like('%'+args['name']+'%'))
if conditions is []:
pagination = Course.query.order_by(text(sort)).paginate(page, per_page, error_out=False)
else:
pagination = Course.query.filter(*conditions).order_by(text(sort)).paginate(page, per_page, error_out=False)
paginate = {
'total':pagination.total,
'pageSize': pagination.per_page,
'current': pagination.page
}
print(pagination.items)
return max_res(marshal({
'pagination': paginate,
'list': [marshal(u, course_fields) for u in pagination.items]
}, course_list_fields))
@jwt_required
def post(self):
args = course_post_parser.parse_args()
course = Course(**args)
try:
course.add()
except IntegrityError:
return max_res('', code=401, errmsg='名称重复')
return max_res(marshal(course, course_fields))
def put(self, course_id=None):
course = Course.find_by_id(course_id)
args = course_update_parser.parse_args()
course = update_all_fields(args, course)
#可以在这里继续添加 需要更新的字段 如
# if args['name']:
# o.name = args['name']
#
db.session.commit()
try:
course.update()
except Exception as e:
return max_res('',500, 'Failed to modify.')
return max_res(marshal(course, course_fields))
def delete(self, course_id=None):
course = Course.find_by_id(course_id)
try:
course.delete()
except Exception as e:
return max_res('',500, 'The record has already deleted.')
return max_res('The course has been deleted.')
| 2.296875
| 2
|
aasaan/ashramvisit/models.py
|
deepakkt/aasaan
| 0
|
12779805
|
<filename>aasaan/ashramvisit/models.py<gh_stars>0
from django.db import models
from contacts.models import Center, Contact, IndividualContactRoleZone, Zone
from smart_selects.db_fields import GroupedForeignKey
class AshramVisit(models.Model):
Center = GroupedForeignKey(Center, 'zone')
arrival_date = models.DateTimeField("Arrival Date & Time")
departure_time = models.TimeField("Departure Time")
participant_count = models.IntegerField()
lunch = models.BooleanField()
dinner = models.BooleanField()
contact_person = models.CharField(max_length=100)
mobile_no = models.CharField("Mobile Number", max_length=15)
def __str__(self):
return "%s - %s" %(self.Center, self.participant_count)
| 2.15625
| 2
|
others/montecarlo.py
|
1lch2/PythonExercise
| 1
|
12779806
|
import random
import math
import numpy as np
import matplotlib.pyplot as plt
# Calculating Pi using Monte Carlo algorithm.
def montecarlo_pi(times:int):
inside = 0
total = times
for i in range(times):
x_i = random.random()
y_i = random.random()
delta = x_i ** 2 + y_i **2 - 1
if delta <= 0:
inside += 1
approx_pi = 4 * inside / total
print('\nRandom test: ' + str(times))
print('Approximation of pi is:{:.8f}'.format(approx_pi))
return approx_pi
if __name__ == '__main__':
numlist = [100, 500, 1000, 5000, 10000, 50000, 100000, 500000, 1000000, 5000000, 10000000, 30000000, 50000000, 75000000, 100000000]
x_list = list(np.log10(numlist))
pi_ = []
for times in numlist:
pi_.append(montecarlo_pi(times))
plt.figure()
plt.plot([min(x_list), max(x_list)], [math.pi, math.pi], color='red', label='true value')
plt.plot(x_list, pi_, 'b.-', label='approximation')
plt.legend()
plt.xlabel('log10(n)')
plt.ylabel('pi')
my_y_ticks = np.arange(3, 3.4, 0.02)
plt.yticks(my_y_ticks)
plt.ylim((min(pi_)-0.1, max(pi_)+0.1))
plt.show()
| 3.59375
| 4
|
Week5/ps6/experiments.py
|
AustinKladke/6.00.1x-MIT-CS-Python
| 0
|
12779807
|
<reponame>AustinKladke/6.00.1x-MIT-CS-Python<filename>Week5/ps6/experiments.py
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 10 19:37:20 2021
@author: akladke
"""
import string
# letter_lst = []
# num_lst = []
# count_lower = 1
# for i in string.ascii_lowercase:
# print(ord(i))
# letter_lst.append(i)
# num_lst.append(count_lower)
# count_lower += 1
# print(letter_lst)
# print(num_lst)
original_dict = {}
count_lower = 1
for i in string.ascii_lowercase:
original_dict[i] = count_lower
original_dict[i] = ord(i)
count_lower += 1
print(original_dict)
original_dict_2 = {}
count_upper = 1
for i in string.ascii_uppercase:
original_dict_2[i] = count_upper
original_dict_2[i] = ord(i)
count_upper += 1
print(original_dict_2)
# Practice shifting dictionary key values by certain amounts
# Need to shift each letter by certain amount, making sure that
# it circles back to 0 (or 97) if goes beyond end of alphabet. Can
# then use chr to convert the number back into the right letter so
# can end up with example: {A:C, B:D}, etc.
# ord range for lowercase letters => 97 - 122
shift = 3
for i in original_dict:
if original_dict[i] + shift > 122:
x = (122 - original_dict[i])
x2 = abs(shift - x)
original_dict[i] = chr(97 + x2 - 1)
else:
original_dict[i] += shift
original_dict[i] = chr(original_dict[i])
print(original_dict)
#for i in original_dict:
#original_dict[i] = chr(original_dict[i])
#print(original_dict)
# for i in original_dict:
# original_dict[i] = original_dict[i].upper()
# original_dict[i] = original_dict.pop(i.upper())
# print(original_dict)
# ord range for uppercase letters => 65 - 90
shift = 3
for i in original_dict_2:
if original_dict_2[i] + shift > 90:
x = (90 - original_dict_2[i])
x2 = abs(shift - x)
original_dict_2[i] = chr(65 + x2 - 1)
else:
original_dict_2[i] += shift
original_dict_2[i] = chr(original_dict_2[i])
print(original_dict_2)
final_combined_dict = {}
final_combined_dict.update(original_dict)
final_combined_dict.update(original_dict_2)
print(final_combined_dict)
# Use cipher to translate word
word = 'abc jkl. Ok 123!'
new_dict = final_combined_dict
new_word = ""
for i in word:
try:
new_word += new_dict[i]
except:
new_word += i
print(new_word)
| 3.609375
| 4
|
service/settings/production.py
|
Mystopia/fantastic-doodle
| 3
|
12779808
|
<gh_stars>1-10
from service.settings.base import *
SECRET_KEY = os.getenv('SECRET_KEY')
DEBUG = False
ALLOWED_HOSTS = [h.strip() for h in os.getenv('ALLOWED_HOSTS', '').split(',') if h]
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# Security: SSL/HTTPS
# https://docs.djangoproject.com/en/dev/topics/security/#ssl-https
## Set SECURE_SSL_REDIRECT to True, so that requests over HTTP are redirected to HTTPS.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_SSL_REDIRECT = True
## Use ‘secure’ cookies.
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
## Use HTTP Strict Transport Security (HSTS)
SECURE_HSTS_SECONDS = 3600
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
| 1.671875
| 2
|
CRI_WeeklyMaps/__init__.py
|
adambreznicky/smudge_python
| 1
|
12779809
|
__file__ = '__init__.py'
__date__ = '6/18/2015'
__author__ = 'ABREZNIC'
| 1.125
| 1
|
notebooks/old/lupton_rgb.py
|
train-your-deblender/cutout-evaluation
| 0
|
12779810
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
#
# TODO: Remove this when https://github.com/parejkoj/astropy/tree/luptonRGB
# is in Astropy.
"""
Combine 3 images to produce a properly-scaled RGB image following Lupton et al. (2004).
For details, see : http://adsabs.harvard.edu/abs/2004PASP..116..133L
The three images must be aligned and have the same pixel scale and size.
Example usage:
imageR = np.random.random((100,100))
imageG = np.random.random((100,100))
imageB = np.random.random((100,100))
image = lupton_rgb.makeRGB(imageR, imageG, imageB, fileName='randoms.png')
lupton_rgb.displayRGB(image)
"""
import numpy as np
try:
import scipy.misc
HAVE_SCIPY_MISC = True
except ImportError:
HAVE_SCIPY_MISC = False
# from lsst.afw.display.displayLib import replaceSaturatedPixels, getZScale
def compute_intensity(imageR, imageG=None, imageB=None):
"""
Return a naive total intensity from the red, blue, and green intensities.
Parameters
----------
imageR : `~numpy.ndarray`
Intensity of image to be mapped to red; or total intensity if imageG and
imageB are None.
imageG : `~numpy.ndarray`
Intensity of image to be mapped to green; or None.
imageB : `~numpy.ndarray`
Intensity of image to be mapped to blue; or None.
"""
if imageG is None or imageB is None:
assert imageG is None and imageB is None, \
"Please specify either a single image or red, green, and blue images"
return imageR
intensity = (imageR + imageG + imageB)/3.0
# Repack into whatever type was passed to us
return np.array(intensity, dtype=imageR.dtype)
def zscale(image, nSamples=1000, contrast=0.25):
"""
TBD: replace with newly added astropy.zscale function.
This emulates ds9's zscale feature. Returns the suggested minimum and
maximum values to display.
Parameters
----------
image : `~numpy.ndarray`
The image to compute the scaling on.
nSamples : int
How many samples to take when building the histogram.
contrast : float
???
"""
stride = image.size/nSamples
samples = image.flatten()[::stride]
samples.sort()
chop_size = int(0.10*len(samples))
subset = samples[chop_size:-chop_size]
i_midpoint = int(len(subset)/2)
I_mid = subset[i_midpoint]
fit = np.polyfit(np.arange(len(subset)) - i_midpoint, subset, 1)
# fit = [ slope, intercept]
z1 = I_mid + fit[0]/contrast * (1-i_midpoint)/1.0
z2 = I_mid + fit[0]/contrast * (len(subset)-i_midpoint)/1.0
return z1, z2
class Mapping(object):
"""Baseclass to map red, blue, green intensities into uint8 values"""
def __init__(self, minimum=None, image=None):
"""
Create a mapping
Parameters
----------
minimum : float or sequence(3)
Intensity that should be mapped to black (a scalar or array for R, G, B).
image : `~numpy.ndarray`
The image to be used to calculate the mapping.
If provided, it is also used as the default for makeRgbImage().
"""
self._uint8Max = float(np.iinfo(np.uint8).max)
try:
len(minimum)
except:
minimum = 3*[minimum]
assert len(minimum) == 3, "Please provide 1 or 3 values for minimum"
self.minimum = minimum
self._image = image
def makeRgbImage(self, imageR=None, imageG=None, imageB=None,
xSize=None, ySize=None, rescaleFactor=None):
"""
Convert 3 arrays, imageR, imageG, and imageB into a numpy RGB image.
Parameters
----------
imageR : `~numpy.ndarray`
Image to map to red (if None, use the image passed to the constructor).
imageG : `~numpy.ndarray`
Image to map to green (if None, use imageR).
imageB : `~numpy.ndarray`
Image to map to blue (if None, use imageR).
xSize : int
Desired width of RGB image (or None). If ySize is None, preserve
aspect ratio.
ySize : int
Desired height of RGB image (or None).
rescaleFactor : float
Make size of output image rescaleFactor*size of the input image.
Cannot be specified if xSize or ySize are given.
"""
if imageR is None:
if self._image is None:
raise RuntimeError("You must provide an image or pass one to the constructor")
imageR = self._image
if imageG is None:
imageG = imageR
if imageB is None:
imageB = imageR
if xSize is not None or ySize is not None:
assert rescaleFactor is None, "You may not specify a size and rescaleFactor"
h, w = imageR.shape
if ySize is None:
ySize = int(xSize*h/float(w) + 0.5)
elif xSize is None:
xSize = int(ySize*w/float(h) + 0.5)
# need to cast to int when passing tuple to imresize.
size = (int(ySize), int(xSize)) # n.b. y, x order for scipy
elif rescaleFactor is not None:
size = float(rescaleFactor) # a float is intepreted as a percentage
else:
size = None
if size is not None:
if not HAVE_SCIPY_MISC:
raise RuntimeError("Unable to rescale as scipy.misc is unavailable.")
imageR = scipy.misc.imresize(imageR, size, interp='bilinear', mode='F')
imageG = scipy.misc.imresize(imageG, size, interp='bilinear', mode='F')
imageB = scipy.misc.imresize(imageB, size, interp='bilinear', mode='F')
return np.dstack(self._convertImagesToUint8(imageR, imageG, imageB)).astype(np.uint8)
def intensity(self, imageR, imageG, imageB):
"""
Return the total intensity from the red, blue, and green intensities.
This is a naive computation, and may be overridden by subclasses.
"""
return compute_intensity(imageR, imageG, imageB)
def mapIntensityToUint8(self, I):
"""
Return an array which, when multiplied by an image, returns that image
mapped to the range of a uint8, [0, 255] (but not converted to uint8).
The intensity is assumed to have had minimum subtracted (as that can be
done per-band).
"""
with np.errstate(invalid='ignore', divide='ignore'): # n.b. np.where can't and doesn't short-circuit
return np.where(I <= 0, 0, np.where(I < self._uint8Max, I, self._uint8Max))
def _convertImagesToUint8(self, imageR, imageG, imageB):
"""Use the mapping to convert images imageR, imageG, and imageB to a triplet of uint8 images"""
imageR = imageR - self.minimum[0] # n.b. makes copy
imageG = imageG - self.minimum[1]
imageB = imageB - self.minimum[2]
fac = self.mapIntensityToUint8(self.intensity(imageR, imageG, imageB))
imageRGB = [imageR, imageG, imageB]
for c in imageRGB:
c *= fac
c[c < 0] = 0 # individual bands can still be < 0, even if fac isn't
pixmax = self._uint8Max
r0, g0, b0 = imageRGB # copies -- could work row by row to minimise memory usage
with np.errstate(invalid='ignore', divide='ignore'): # n.b. np.where can't and doesn't short-circuit
for i, c in enumerate(imageRGB):
c = np.where(r0 > g0,
np.where(r0 > b0,
np.where(r0 >= pixmax, c*pixmax/r0, c),
np.where(b0 >= pixmax, c*pixmax/b0, c)),
np.where(g0 > b0,
np.where(g0 >= pixmax, c*pixmax/g0, c),
np.where(b0 >= pixmax, c*pixmax/b0, c))).astype(np.uint8)
c[c > pixmax] = pixmax
imageRGB[i] = c
return imageRGB
class LinearMapping(Mapping):
"""A linear map map of red, blue, green intensities into uint8 values"""
def __init__(self, minimum=None, maximum=None, image=None):
"""
A linear stretch from [minimum, maximum].
If one or both are omitted use image min and/or max to set them.
Parameters
----------
minimum : float
Intensity that should be mapped to black (a scalar or array for R, G, B).
maximum : float
Intensity that should be mapped to white (a scalar).
"""
if minimum is None or maximum is None:
assert image is not None, "You must provide an image if you don't set both minimum and maximum"
if minimum is None:
minimum = image.min()
if maximum is None:
maximum = image.max()
Mapping.__init__(self, minimum=minimum, image=image)
self.maximum = maximum
if maximum is None:
self._range = None
else:
assert maximum - minimum != 0, "minimum and maximum values must not be equal"
self._range = float(maximum - minimum)
def mapIntensityToUint8(self, I):
with np.errstate(invalid='ignore', divide='ignore'): # n.b. np.where can't and doesn't short-circuit
return np.where(I <= 0, 0,
np.where(I >= self._range, self._uint8Max/I, self._uint8Max/self._range))
class ZScaleMapping(LinearMapping):
"""
A mapping for a linear stretch chosen by the zscale algorithm.
(preserving colours independent of brightness)
x = (I - minimum)/range
"""
def __init__(self, image, nSamples=1000, contrast=0.25):
"""
A linear stretch from [z1, z2] chosen by the zscale algorithm.
Parameters
----------
nSamples : int
The number of samples to use to estimate the zscale parameters.
contrast : float
The number of samples to use to estimate the zscale parameters.
"""
z1, z2 = zscale(image, nSamples, contrast)
LinearMapping.__init__(self, z1, z2, image)
class AsinhMapping(Mapping):
"""
A mapping for an asinh stretch (preserving colours independent of brightness)
x = asinh(Q (I - minimum)/range)/Q
This reduces to a linear stretch if Q == 0
See http://adsabs.harvard.edu/abs/2004PASP..116..133L
"""
def __init__(self, minimum, dataRange, Q=8):
"""
asinh stretch from minimum to minimum + dataRange, scaled by Q, via:
x = asinh(Q (I - minimum)/dataRange)/Q
Parameters
----------
minimum : float
Intensity that should be mapped to black (a scalar or array for R, G, B).
dataRange : float
minimum+dataRange defines the white level of the image.
Q : float
The asinh softening parameter.
"""
Mapping.__init__(self, minimum)
epsilon = 1.0/2**23 # 32bit floating point machine epsilon; sys.float_info.epsilon is 64bit
if abs(Q) < epsilon:
Q = 0.1
else:
Qmax = 1e10
if Q > Qmax:
Q = Qmax
if False:
self._slope = self._uint8Max/Q # gradient at origin is self._slope
else:
frac = 0.1 # gradient estimated using frac*range is _slope
self._slope = frac*self._uint8Max/np.arcsinh(frac*Q)
self._soften = Q/float(dataRange)
def mapIntensityToUint8(self, I):
with np.errstate(invalid='ignore', divide='ignore'): # n.b. np.where can't and doesn't short-circuit
return np.where(I <= 0, 0, np.arcsinh(I*self._soften)*self._slope/I)
class AsinhZScaleMapping(AsinhMapping):
"""
A mapping for an asinh stretch, estimating the linear stretch by zscale.
x = asinh(Q (I - z1)/(z2 - z1))/Q
See AsinhMapping
"""
def __init__(self, image1, image2=None, image3=None, Q=8, pedestal=None):
"""
Create an asinh mapping from an image, setting the linear part of the
stretch using zscale.
Parameters
----------
image1 : `~numpy.ndarray`
The image to analyse,
# or a list of 3 images to be converted to an intensity image.
image2 : `~numpy.ndarray`
the second image to analyse (must be specified with image3).
image3 : `~numpy.ndarray`
the third image to analyse (must be specified with image2).
Q : float
The asinh softening parameter.
pedestal : float or sequence(3)
The value, or array of 3 values, to subtract from the images; or None.
pedestal, if not None, is removed from the images when calculating
the zscale stretch, and added back into Mapping.minimum.
"""
if image2 is None or image3 is None:
assert image2 is None and image3 is None, "Please specify either a single image or three images."
image = [image1]
else:
image = [image1, image2, image3]
if pedestal is not None:
try:
assert len(pedestal) in (1, 3,), "Please provide 1 or 3 pedestals."
except TypeError:
pedestal = 3*[pedestal]
image = list(image) # needs to be mutable
for i, im in enumerate(image):
if pedestal[i] != 0.0:
image[i] = im - pedestal[i] # n.b. a copy
else:
pedestal = len(image)*[0.0]
image = compute_intensity(*image)
zscale = ZScaleMapping(image)
dataRange = zscale.maximum - zscale.minimum[0] # zscale.minimum is always a triple
minimum = zscale.minimum
for i, level in enumerate(pedestal):
minimum[i] += level
AsinhMapping.__init__(self, minimum, dataRange, Q)
self._image = image
def makeRGB(imageR, imageG=None, imageB=None, minimum=0, dataRange=5, Q=8,
saturatedBorderWidth=0, saturatedPixelValue=None,
xSize=None, ySize=None, rescaleFactor=None,
fileName=None):
"""
Make an RGB color image from 3 images using an asinh stretch.
Parameters
----------
imageR : `~numpy.ndarray`
Image to map to red (if None, use the image passed to the constructor).
imageG : `~numpy.ndarray`
Image to map to green (if None, use imageR).
imageB : `~numpy.ndarray`
Image to map to blue (if None, use imageR).
minimum : float
Intensity that should be mapped to black (a scalar or array for R, G, B).
dataRange : float
minimum+dataRange defines the white level of the image.
Q : float
The asinh softening parameter.
saturatedBorderWidth : int
If saturatedBorderWidth is non-zero, replace saturated pixels with saturatedPixelValue.
Note that replacing saturated pixels requires that the input images be MaskedImages.
saturatedPixelValue : float
Value to replace saturated pixels with.
xSize : int
Desired width of RGB image (or None). If ySize is None, preserve aspect ratio.
ySize : int
Desired height of RGB image (or None).
rescaleFactor : float
Make size of output image rescaleFactor*size of the input image.
Cannot be specified if xSize or ySize are given.
"""
if imageG is None:
imageG = imageR
if imageB is None:
imageB = imageR
if saturatedBorderWidth:
if saturatedPixelValue is None:
raise ValueError("saturatedPixelValue must be set if saturatedBorderWidth is set")
msg = "Cannot do this until we extract replaceSaturatedPixels out of afw/display/saturated.cc"
raise NotImplementedError(msg)
# replaceSaturatedPixels(imageR, imageG, imageB, saturatedBorderWidth, saturatedPixelValue)
asinhMap = AsinhMapping(minimum, dataRange, Q)
rgb = asinhMap.makeRgbImage(imageR, imageG, imageB,
xSize=xSize, ySize=ySize, rescaleFactor=rescaleFactor)
if fileName:
writeRGB(fileName, rgb)
return rgb
def displayRGB(rgb, show=True, title=None):
"""
Display an rgb image using matplotlib.
Parameters
----------
rgb : `~numpy.ndarray`
The RGB image to display
show : bool
If true, call plt.show()
title : str
Title to use for the displayed image.
"""
import matplotlib.pyplot as plt
plt.imshow(rgb, interpolation='nearest', origin="lower")
if title:
plt.title(title)
if show:
plt.show()
return plt
def writeRGB(fileName, rgbImage):
"""
Write an RGB image to disk.
Most versions of matplotlib support png and pdf (although the eps/pdf/svg
writers may be buggy, possibly due an interaction with useTeX=True in the
matplotlib settings).
If your matplotlib bundles pil/pillow you should also be able to write jpeg
and tiff files.
Parameters
----------
fileName : str
The output file. The extension defines the format, and must be
supported by matplotlib.imsave().
rgbImage : `~numpy.ndarray`
The RGB image to save.
"""
import matplotlib.image
matplotlib.image.imsave(fileName, rgbImage)
| 2.3125
| 2
|
mendeley/models/files.py
|
providedh/mendeley-python-sdk
| 0
|
12779811
|
import os
import re
from mendeley.response import SessionResponseObject
class File(SessionResponseObject):
"""
A file attached to a document.
.. attribute:: id
.. attribute:: size
.. attribute:: file_name
.. attribute:: mime_type
.. attribute:: filehash
.. attribute:: download_url
"""
content_type = 'application/vnd.mendeley-file.1+json'
filename_regex = re.compile('filename="(\S+)"')
@property
def download_url(self):
"""
the URL at which the file can be downloaded. This is only valid for a short time, so should not be cached.
"""
file_url = '/files/%s' % self.id
rsp = self.session.get(file_url, allow_redirects=False)
return rsp.headers['location']
def document(self, view=None):
"""
:param view: document view to return.
:return: a :class:`UserDocument <mendeley.models.documents.UserDocument>` or
:class:`CatalogDocument <mendeley.models.catalog.CatalogDocument>`, depending on which the document is
attached to.
"""
if 'document_id' in self.json:
return self.session.documents.get_lazy(self.json['document_id'], view=view)
elif 'catalog_id' in self.json:
return self.session.catalog.get_lazy(self.json['catalog_id'], view=view)
else:
return None
def download(self, directory):
"""
Downloads the file.
:param directory: the directory to download the file to. This must exist.
:return: the path to the downloaded file.
"""
rsp = self.session.get('/files/%s' % self.id, stream=True)
filename = self.filename_regex.search(rsp.headers['content-disposition']).group(1)
path = os.path.join(directory, filename)
with open(path, 'wb') as f:
for block in rsp.iter_content(1024):
if not block:
break
f.write(block)
return path
def delete(self):
"""
Deletes the file.
"""
self.session.delete('/files/%s' % self.id)
@classmethod
def fields(cls):
return ['id', 'size', 'file_name', 'mime_type', 'filehash']
| 2.734375
| 3
|
python/snpx/snpx_tf/arch/resnet.py
|
ahmedezzat85/SNPX_ML
| 0
|
12779812
|
import tensorflow as tf
from . tf_net import TFNet
class Resnet(TFNet):
"""
"""
def __init__(self, data, data_format, num_classes, is_train=True):
dtype = data.dtype.base_dtype
super(Resnet, self).__init__(dtype, data_format, train=is_train)
self.net_out = tf.identity(data, name='data')
self.num_classes = num_classes
def _resnet_block(self, filters, kernel, stride=(1,1), act_fn='relu', conv_1x1=0, name=None):
""" """
data = self.net_out
shortcut = data
bn_out = self.batch_norm(data, act_fn, name=name+'_bn')
if conv_1x1:
shortcut = self.convolution(bn_out, filters, (1,1), stride, pad='same', act_fn='',
no_bias=True, name=name+'_1x1_conv')
net_out = self.convolution(bn_out, filters, kernel, stride, act_fn=act_fn, add_bn=True,
name=name+'_conv1')
net_out = self.convolution(net_out, filters, kernel, (1,1), act_fn='', no_bias=True,
name=name+'_conv2')
self.net_out = net_out + shortcut
def _resnet_unit(self, num_blocks, filters, kernel, stride=1, act_fn='relu', name=None):
""" """
strides = (stride, stride)
self._resnet_block(filters, kernel, strides, act_fn, conv_1x1=1, name=name+'_block0')
for i in range(1, num_blocks):
self._resnet_block(filters, kernel, (1,1), act_fn, name=name+'_block'+str(i))
def __call__(self, num_stages=3, num_blocks=3, filters=[16, 32, 64], strides=[1,2,2]):
""" """
self.net_out = self.convolution(self.net_out, filters[0], (3,3), (1,1), act_fn='',
no_bias=True, name='Conv0')
for k in range(num_stages):
self._resnet_unit(num_blocks, filters[k], (3,3), strides[k], name='stage'+str(k))
net_out = self.pooling(self.net_out, 'avg', (8,8), name="global_pool")
net_out = self.dropout(net_out, 0.5)
net_out = self.flatten(net_out)
net_out = self.Softmax(net_out, self.num_classes)
return net_out
def snpx_net_create(num_classes, input_data, data_format="NHWC", is_training=True):
""" """
net = Resnet(input_data, data_format, num_classes, is_training)
net_out = net(num_stages=3, num_blocks=3, filters=[16, 32, 64], strides=[1,2,2])
return net_out
| 2.78125
| 3
|
multilingual_t5/r_bn_en/__init__.py
|
sumanthd17/mt5
| 0
|
12779813
|
"""r_bn_en dataset."""
from .r_bn_en import RBnEn
| 0.980469
| 1
|
host/workers.py
|
mjsamuel/Homebridge-MagicHome-Sync
| 2
|
12779814
|
<filename>host/workers.py
import logging, threading, time
from PIL import ImageGrab
class StoppableThread(threading.Thread):
def __init__(self, *args, **kwargs):
super(StoppableThread, self).__init__(*args, **kwargs)
self._stop_event = threading.Event()
def stop(self):
self._stop_event.set()
def stopped(self):
return self._stop_event.is_set()
def sync_screen(light, polling_interval):
# Resolution of scaled down image
rx = 64
ry = 36
total_pixels = rx * ry
prev_colour = None
while threading.current_thread().stopped() is False:
img = ImageGrab.grab()
img = img.resize((rx, ry))
red = green = blue = 0
for y in range(0, img.size[1]):
for x in range(0, img.size[0]):
pixel = img.getpixel((x,y))
red = red + pixel[0]
green = green + pixel[1]
blue = blue + pixel[2]
red = int(red / total_pixels)
green = int(green / total_pixels)
blue = int(blue / total_pixels)
colour = (red, green, blue)
if colour != prev_colour:
light.setRgb(colour)
prev_colour = colour
logging.info("RGB %3d %3d %3d Sent" % colour)
else:
logging.info("Same colour not sent")
time.sleep(polling_interval)
light.turnOff()
| 2.765625
| 3
|
ex093.py
|
Gustavo-Dev-Web/python
| 0
|
12779815
|
<reponame>Gustavo-Dev-Web/python
dados = {}
gols = []
dados['Nome'] = str(input('Nome do Atleta:'))
partidas = int(input('Quantas partidas o jogador jogou:'))
for c in range(0,partidas):
gols.append(int(input(f'Quantos gols o jogador {dados["Nome"]} fez na partida {c}: ')))
dados['Gols'] = gols[:]
dados['Total'] = sum(gols)
print('--'*30)
print(dados)
print('--'*30)
for k,v in dados.items():
print(f'{k}: {v}')
print('--'*30)
print(f'O jogador {dados["Nome"]} jogou {partidas} partidas')
for c in range(0, partidas):
print(f' => Na partida {c}, fez {dados["Gols"][c]} gols')
print(f'Foi um total de {dados["Total"]} gols')
| 3.59375
| 4
|
src/yolo_preparing.py
|
vikasg603/Vehicle-Front-Rear-Detection-for-License-Plate-Detection-Enhancement
| 29
|
12779816
|
'''
This file is used to delete the images without labels
(Delete the images without corresponding txt file)
AND THEN
Generate the train.txt and test.txt for YOLO
'''
from os.path import splitext, isfile, join, basename
from os import remove
from glob import glob, iglob
# root path
dir = '/home/shaoheng/Documents/cars_label_FRNet/cars/foryolo'
'''
First part
'''
def clean_data(dir):
# define the dir path which includes all the images and labels
img_lst = glob(dir+'/*.jpg') # modify to png if needed
for img in img_lst:
split_former = splitext(img)[0]
if isfile(split_former + '.txt') is False:
remove(img)
print 'remove', img
'''
second part, source:https://github.com/ManivannanMurugavel/YOLO-Annotation-Tool/blob/master/process.py
'''
def training_data_sumup(dir):
# Percentage of images to be used for the test set
percentage_test = 0
# Create and/or truncate train.txt and test.txt
file_train = open('train.txt', 'w')
file_test = open('test.txt', 'w')
# Populate train.txt and test.txt
counter = 1
if percentage_test:
index_test = round(100 / percentage_test)
for pathAndFilename in iglob(join(dir, "*.jpg")):
title, ext = splitext(basename(pathAndFilename))
if percentage_test and counter == index_test:
counter = 1
file_test.write(dir + "/" + title + '.jpg' + "\n")
else:
file_train.write(dir + "/" + title + '.jpg' + "\n")
counter = counter + 1
# clean_data(dir)
training_data_sumup(dir)
| 3.15625
| 3
|
splendor_sim/src/action/reserve_card_action.py
|
markbrockettrobson/SplendorBots
| 1
|
12779817
|
import copy
import typing
import splendor_sim.interfaces.action.i_action as i_action
import splendor_sim.interfaces.card.i_card as i_card
import splendor_sim.interfaces.coin.i_coin_type as i_coin_type
import splendor_sim.interfaces.game_state.i_game_state as i_game_state
import splendor_sim.interfaces.player.i_player as i_player
class ReserveCardAction(i_action.IAction):
def __init__(
self,
valid_coin_type_set: typing.Set[i_coin_type.ICoinType],
current_player: i_player.IPlayer,
coins: typing.Dict[i_coin_type.ICoinType, int],
card: i_card.ICard,
):
self._validate_input(valid_coin_type_set, coins)
self._card = card
self._coin_dictionary = copy.copy(coins)
self._current_player = current_player
def validate(self, game_state: i_game_state.IGameState) -> bool:
number_of_reserved_cards = (
self._current_player.get_card_inventory().get_number_of_reserved_cards()
)
max_number_of_reserved_cards = (
self._current_player.get_card_inventory().get_max_number_of_reserved_cards()
)
if number_of_reserved_cards < max_number_of_reserved_cards:
if self._card in game_state.get_card_reserve().get_cards_for_sale():
if game_state.get_coin_reserve().has_minimum(self._coin_dictionary):
return True
return False
def execute(self, game_state: i_game_state.IGameState) -> None:
if not self.validate(game_state):
raise ValueError("invalid action")
game_state.get_coin_reserve().remove_coins(self._coin_dictionary)
game_state.get_card_reserve().remove_card(self._card)
self._current_player.get_coin_inventory().add_coins(self._coin_dictionary)
self._current_player.get_card_inventory().add_card_to_reserved(self._card)
@staticmethod
def _validate_input(
valid_coin_type_set: typing.Set[i_coin_type.ICoinType],
coins: typing.Dict[i_coin_type.ICoinType, int],
):
if len(coins.keys()) != 1:
raise ValueError("can only take 1 type of coin")
for coin_type, number_of_coins in coins.items():
if number_of_coins != 1:
raise ValueError("can only take one coin")
if coin_type not in valid_coin_type_set:
raise ValueError("invalid coin type")
| 2.390625
| 2
|
RecoVertex/BeamSpotProducer/scripts/copyFiles.py
|
nistefan/cmssw
| 0
|
12779818
|
#!/usr/bin/env python
import sys,os,commands
from CommonMethods import *
def main():
if len(sys.argv) < 3:
error = "Usage: copyFiles.py fromDir destDir (optional filter)"
exit(error)
fromDir = sys.argv[1]
print fromDir
if (fromDir[len(fromDir)-1] != '/'):
fromDir += '/'
destDir = sys.argv[2] + "/"
filter = ""
if len(sys.argv) > 3:
filter = sys.argv[3]
fileList = ls(fromDir,filter)
copiedFiles = cp(fromDir,destDir,fileList)
if len(copiedFiles) != len(fileList):
error = "ERROR: I couldn't copy all files from " + fromDir
exit(error)
if __name__ == "__main__":
main()
| 3.28125
| 3
|
spawn_npc.py
|
Matrix-King-Studio/HydraDeepQNet
| 0
|
12779819
|
<gh_stars>0
import logging
import random
import carla
def spawn_npc(client, world, blueprint_library, safe=True, number_of_vehicles=30, number_of_walkers=0):
actor_list = []
try:
blueprints = blueprint_library.filter('vehicle.*')
if safe:
blueprints = [x for x in blueprints if int(x.get_attribute('number_of_wheels')) == 4]
blueprints = [x for x in blueprints if not x.id.endswith('isetta')]
blueprints = [x for x in blueprints if not x.id.endswith('carlacola')]
spawn_points = world.get_map().get_spawn_points()
number_of_spawn_points = len(spawn_points)
if number_of_vehicles < number_of_spawn_points:
random.shuffle(spawn_points)
elif number_of_vehicles > number_of_spawn_points:
msg = 'requested %d vehicles, but could only find %d spawn points'
logging.warning(msg, number_of_vehicles, number_of_spawn_points)
number_of_vehicles = number_of_spawn_points
# @todo cannot import these directly.
SpawnActor = carla.command.SpawnActor
SetAutopilot = carla.command.SetAutopilot
FutureActor = carla.command.FutureActor
batch = []
for n, transform in enumerate(spawn_points):
if n >= number_of_vehicles:
break
blueprint = random.choice(blueprints)
if blueprint.has_attribute('color'):
color = random.choice(blueprint.get_attribute('color').recommended_values)
blueprint.set_attribute('color', color)
blueprint.set_attribute('role_name', 'autopilot')
batch.append(SpawnActor(blueprint, transform).then(SetAutopilot(FutureActor, True)))
for response in client.apply_batch_sync(batch):
if response.error:
logging.error(response.error)
else:
actor_list.append(response.actor_id)
print('spawned %d vehicles, press Ctrl+C to exit.' % len(actor_list))
except Exception as e:
print(f"spawn_npc Error: {e}")
print('\ndestroying %d actors' % len(actor_list))
client.apply_batch([carla.command.DestroyActor(x) for x in actor_list])
| 2.46875
| 2
|
tools/tensorflow/cnn/alexnet/unpickle.py
|
feifeibear/dlbench
| 181
|
12779820
|
<gh_stars>100-1000
import cPickle
import numpy as np
import tensorflow as tf
PATH = './cifar-10-batches-py'
TARGETPATH = '/home/comp/csshshi/tensorflow/cifar-10-batches-py'
TEST_FILES = ['test_batch']
FILES = ['data_batch_1', 'data_batch_2', 'data_batch_3', 'data_batch_4', 'data_batch_5']
TRAIN_COUNT = 50000
EVAL_COUNT = 10000
IMAGE_SIZE = 32
NUM_CLASSES = 10
unpickled = {}
def unpickle(file):
dict = unpickled.get(file)
if dict:
return dict
fo = open(file, 'rb')
dict = cPickle.load(fo)
fo.close()
unpickled[file] = dict
return dict
def get_next_batch(batch_size, step, is_test=False):
files = FILES
if is_test:
files = TEST_FILES
file_index = step % len(FILES)
filename = files[file_index]
filename = '%s/%s'%(PATH, filename)
dict = unpickle(filename)
data_index = step/len(files) * batch_size
images = dict['data'][data_index:data_index+batch_size]
labels = dict['labels'][data_index:data_index+batch_size]
reshaped_images = [np.reshape(image, (IMAGE_SIZE, IMAGE_SIZE, 3)) for image in images]
return reshaped_images, labels
| 2.25
| 2
|
src/commands/huificator.py
|
slimsevernake/osbb-bot
| 9
|
12779821
|
import re
HUEVELS = {
'у': 'хую',
'У': 'хую',
'е': 'хуе',
'Е': 'хуе',
'ё': 'хуё',
'Ё': 'хуё',
'а': 'хуя',
'А': 'хуя',
'о': 'хуё',
'О': 'хуё',
'э': 'хуе',
'Э': 'хуе',
'я': 'хуя',
'Я': 'хуя',
'и': 'хуи',
'И': 'хуи',
'ы': 'хуы',
'Ы': 'хуы',
'ю': 'хую',
'Ю': 'хую'
}
PUNCT_MARKS = [',', '.', ';', ':']
def count_syllabiles(word):
count = 0
for letter in word:
if letter in HUEVELS:
count += 1
return count
def get_last_letter(word):
if word == '':
return word
last_letter = word[-1]
if last_letter in PUNCT_MARKS:
return get_last_letter(word[:-1])
return last_letter
def first_vowel(word):
res = re.search("[уеёыаоэяию]", word, re.IGNORECASE)
if res:
return res.start(), res.group()
return -1, ''
def huificator(word):
num_syl = count_syllabiles(word)
last_letter = get_last_letter(word)
if num_syl == 0:
return word
if num_syl == 1:
if last_letter in HUEVELS:
return word
pos, vow = first_vowel(word)
if pos == -1:
return word
repl = HUEVELS[vow].upper() if len(word) >= 2 and word[:2].isupper() else HUEVELS[vow]
result = repl + word[pos+1:]
if word.isupper():
result = result.upper()
elif word[:1].isupper():
result = result[:1].upper() + result[1:]
return result
| 3.484375
| 3
|
workshop_sections/extras/lstm_text_classification/trainer/task.py
|
CharleyGuo/tensorflow-workshop
| 691
|
12779822
|
import model
import tensorflow as tf
import utils
def train(target,
num_param_servers,
is_chief,
lstm_size=64,
input_filenames=None,
sentence_length=128,
vocab_size=2**15,
learning_rate=0.01,
output_dir=None,
batch_size=1024,
embedding_size=128,
num_epochs=2):
graph = tf.Graph()
with graph.as_default():
sentences, scores = model.get_inputs(
input_filenames, batch_size, num_epochs, sentence_length)
with tf.device(tf.train.replica_device_setter()):
lstm = model.BasicRegressionLSTM(
sentences,
scores,
num_param_servers,
vocab_size,
learning_rate,
embedding_size,
lstm_size
)
tf.contrib.learn.train(
graph,
output_dir,
lstm.train_op,
lstm.loss,
global_step_tensor=lstm.global_step,
supervisor_is_chief=is_chief,
supervisor_master=target
)
if __name__ == "__main__":
parser = utils.base_parser()
parser.add_argument(
'--learning-rate',
type=float,
default=0.01
)
utils.dispatch(
train,
**parser.parse_args().__dict__
)
| 2.515625
| 3
|
commhelp.py
|
Myselfminer/N
| 2
|
12779823
|
<gh_stars>1-10
##def get(what):
## if what=="q":
## what=what.strip("?help ")
## site=what
## else:
## a=open("commreg.temp","r")
## a.readlines()
## result=[]
## for i in a:
## result.append(a[i+site*5]+":"+a[i+site*5])
## return result
def get():
a=open("commreg.temp","r")
b=a.readlines()
new=[]
for i in b:
result=i.strip("\n")
new.append(result)
newer=""
for i in new:
newer=newer+i+"\n"
return result
| 2.9375
| 3
|
orig/dos2unix.py
|
benhoyt/pas2go
| 33
|
12779824
|
<filename>orig/dos2unix.py
import os
for path in os.listdir('.'):
if not path.endswith('.PAS'):
continue
with open(path, 'rb') as f:
dos_text = f.read()
unix_text = dos_text.replace(b'\r\n', b'\n').replace(b'\t', b' ')
with open(path, 'wb') as f:
f.write(unix_text)
print(f'{path:12} {len(dos_text):5} -> {len(unix_text):5}')
| 2.421875
| 2
|
klaxer/api.py
|
klaxer/klaxer
| 2
|
12779825
|
"""The main Klaxer server"""
import logging
import json
import hug
from falcon import HTTP_400, HTTP_500
from klaxer.rules import Rules
from klaxer.errors import AuthorizationError, NoRouteFoundError, ServiceNotDefinedError
from klaxer.lib import classify, enrich, filtered, route, send, validate
from klaxer.models import Alert
from klaxer.users import create_user, add_message, bootstrap, api_key_authentication, is_existing_user
CURRENT_FILTERS = []
RULES = Rules()
@hug.post('/alert/{service_name}/{token}')
def incoming(service_name: hug.types.text, token: hug.types.text, response, debug=False, body=None):
"""An incoming alert. The core API method"""
try:
validate(service_name, token)
alert = Alert.from_service(service_name, body)
alert = classify(alert, RULES.get_classification_rules(service_name))
# Filter based on rules (e.g. junk an alert if a string is in the body or if it came from a CI bot).
if filtered(alert, RULES.get_exclusion_rules(service_name)):
return
#Filtered based on user interactions (e.g. bail if we've snoozed the notification type snoozed).
if filtered(alert, CURRENT_FILTERS):
return
#Enriched based on custom rules (e.g. all alerts with 'keepalive' have '@deborah' appended to them so Deborah gets an extra level of notification priority.
alert = enrich(alert, RULES.get_enrichment_rules(service_name))
# Determine where the message goes
alert = route(alert, RULES.get_routing_rules(service_name))
# Present relevant debug info without actually sending the Alert
if debug:
return alert.to_dict()
#The target channel gets queried for the most recent message. If it's identical, perform rollup. Otherwise, post the alert.
send(alert)
return {"status": "ok"}
except (AuthorizationError, NoRouteFoundError, ServiceNotDefinedError) as error:
logging.exception('Failed to serve an alert response')
response.status = HTTP_500
return {"status": error.message}
@hug.post('/user/register')
def register(response, body=None):
"""Register for Klaxer and get a key in return."""
if not body:
response.status = HTTP_400
return {"status": "No request body provided"}
email = body.get('email')
name = body.get('name')
if not email or not name:
response.status = HTTP_400
return {"status": "Please provide a valid name and email."}
if is_existing_user(email):
response.status = HTTP_400
return {"status": f"Email {email} is already registered"}
user = create_user(name=name, email=email)
return {
'id': user.id,
'api_key': user.api_key
}
@hug.get('/user/me', requires=api_key_authentication)
def profile(user: hug.directives.user, response, body=None):
"""If authenticated, give the user back their profile information."""
return user.to_dict()
@hug.startup()
def startup(api):
"""Bootstrap the database when the API starts."""
bootstrap()
| 2.109375
| 2
|
test_djangocms_blog/settings.py
|
marshalc/djangocms-blog
| 0
|
12779826
|
"""
Django settings for test_djangocms_blog project.
Generated by 'django-admin startproject' using Django 3.0.5.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
from tempfile import mkdtemp
# From blog tests, but why?
def gettext(s):
return s
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = <KEY>'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
PRE_DJANGO_APPS = [
'test_djangocms_blog',
'djangocms_admin_style',
]
DJANGO_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
DJANGOCMS_APPS = [
'django.contrib.sites',
'cms',
'menus',
'treebeard',
]
DJANGOCMS_BLOG_APPS = [
'djangocms_blog', # Local repo version, not a pip install!
'filer',
'parler',
'meta',
'easy_thumbnails',
'django.contrib.sitemaps',
'djangocms_text_ckeditor',
'taggit',
'taggit_autosuggest',
# 'aldryn_apphooks_config',
# 'aldryn_search',
'djangocms_video',
'sortedm2m',
]
INTERNAL_TEST_APPS = [
'test_djangocms_blog.media_app',
]
INSTALLED_APPS = PRE_DJANGO_APPS + DJANGO_APPS + DJANGOCMS_APPS + DJANGOCMS_BLOG_APPS + INTERNAL_TEST_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'test_djangocms_blog.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
MIGRATION_MODULES = {}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
AUTH_USER_MODEL = 'test_djangocms_blog.CustomUser'
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
# LANGUAGE_CODE = 'en-us'
LANGUAGE_CODE = 'en'
LANGUAGES = (
('en', gettext('English')),
('fr', gettext('French')),
('it', gettext('Italiano')),
)
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
CMS_LANGUAGES = {
1: [
{
'code': 'en',
'name': gettext('English'),
'public': True,
},
{
'code': 'it',
'name': gettext('Italiano'),
'public': True,
},
{
'code': 'fr',
'name': gettext('French'),
'public': True,
},
],
2: [
{
'code': 'en',
'name': gettext('English'),
'public': True,
},
],
'default': {
'hide_untranslated': False,
},
}
PARLER_LANGUAGES = {
1: (
{'code': 'en'},
{'code': 'it'},
{'code': 'fr'},
),
2: (
{'code': 'en'},
),
'default': {
'fallbacks': ['en'],
'hide_untranslated': False,
}
}
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
# App specific config
CMS_TEMPLATES = (
('blog.html', 'Blog template'),
)
META_SITE_PROTOCOL = 'http'
META_USE_SITES = True
META_SITE_DOMAIN = 'example.com'
META_USE_OG_PROPERTIES = True
META_USE_TWITTER_PROPERTIES = True
META_USE_GOOGLEPLUS_PROPERTIES = True
THUMBNAIL_PROCESSORS = (
'easy_thumbnails.processors.colorspace',
'easy_thumbnails.processors.autocrop',
'filer.thumbnail_processors.scale_and_crop_with_subject_location',
'easy_thumbnails.processors.filters',
)
FILE_UPLOAD_TEMP_DIR = mkdtemp()
SITE_ID = 1
HAYSTACK_CONNECTIONS = {
'default': {}
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
BLOG_AUTO_SETUP = False
try:
import cmsplugin_filer_image.migrations_django # pragma: no cover # NOQA
MIGRATION_MODULES['cmsplugin_filer_image'] = 'cmsplugin_filer_image.migrations_django'
except ImportError:
pass
try:
import knocker # pragma: no cover # NOQA
INSTALLED_APPS.append('knocker')
INSTALLED_APPS.append('channels')
INSTALLED_APPS.append('djangocms_blog.liveblog', )
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'asgiref.inmemory.ChannelLayer',
'ROUTING': 'test_djangocms_blog.routing.channel_routing',
},
}
except ImportError:
pass
| 1.914063
| 2
|
test/http2_test/http2_test_server.py
|
farcaller/grpc
| 0
|
12779827
|
# Copyright 2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""HTTP2 Test Server"""
import argparse
import logging
import twisted
import twisted.internet
import twisted.internet.endpoints
import twisted.internet.reactor
import http2_base_server
import test_goaway
import test_max_streams
import test_ping
import test_rst_after_data
import test_rst_after_header
import test_rst_during_data
_TEST_CASE_MAPPING = {
'rst_after_header': test_rst_after_header.TestcaseRstStreamAfterHeader,
'rst_after_data': test_rst_after_data.TestcaseRstStreamAfterData,
'rst_during_data': test_rst_during_data.TestcaseRstStreamDuringData,
'goaway': test_goaway.TestcaseGoaway,
'ping': test_ping.TestcasePing,
'max_streams': test_max_streams.TestcaseSettingsMaxStreams,
}
class H2Factory(twisted.internet.protocol.Factory):
def __init__(self, testcase):
logging.info('Creating H2Factory for new connection.')
self._num_streams = 0
self._testcase = testcase
def buildProtocol(self, addr):
self._num_streams += 1
logging.info('New Connection: %d' % self._num_streams)
if not _TEST_CASE_MAPPING.has_key(self._testcase):
logging.error('Unknown test case: %s' % self._testcase)
assert(0)
else:
t = _TEST_CASE_MAPPING[self._testcase]
if self._testcase == 'goaway':
return t(self._num_streams).get_base_server()
else:
return t().get_base_server()
if __name__ == '__main__':
logging.basicConfig(
format='%(levelname) -10s %(asctime)s %(module)s:%(lineno)s | %(message)s',
level=logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument('--test_case', choices=sorted(_TEST_CASE_MAPPING.keys()),
help='test case to run', required=True)
parser.add_argument('--port', type=int, default=8080,
help='port to run the server (default: 8080)')
args = parser.parse_args()
logging.info('Running test case %s on port %d' % (args.test_case, args.port))
endpoint = twisted.internet.endpoints.TCP4ServerEndpoint(
twisted.internet.reactor, args.port, backlog=128)
endpoint.listen(H2Factory(args.test_case))
twisted.internet.reactor.run()
| 1.34375
| 1
|
bqskit/ir/opt/instantiaters/qfactor.py
|
jkalloor3/bqskit
| 0
|
12779828
|
"""This module implements the QFactor class."""
from __future__ import annotations
import logging
from typing import Any
from typing import TYPE_CHECKING
import numpy as np
import numpy.typing as npt
from bqskitrs import QFactorInstantiatorNative
from bqskit.ir.opt.instantiater import Instantiater
from bqskit.qis.state.state import StateVector
from bqskit.qis.unitary import LocallyOptimizableUnitary
from bqskit.qis.unitary.unitarymatrix import UnitaryMatrix
if TYPE_CHECKING:
from bqskit.ir.circuit import Circuit
_logger = logging.getLogger(__name__)
class QFactor(QFactorInstantiatorNative, Instantiater):
"""The QFactor circuit instantiater."""
def __new__(cls, **kwargs: dict[str, Any]) -> Any:
if 'cost_fn_gen' in kwargs:
del kwargs['cost_fn_gen']
return super().__new__(cls, **kwargs)
def instantiate(
self,
circuit: Circuit,
target: UnitaryMatrix | StateVector,
x0: npt.NDArray[np.float64],
) -> npt.NDArray[np.float64]:
"""Instantiate `circuit`, see Instantiater for more info."""
return super().instantiate(circuit, target, x0)
@staticmethod
def is_capable(circuit: Circuit) -> bool:
"""Return true if the circuit can be instantiated."""
return all(
isinstance(gate, LocallyOptimizableUnitary)
for gate in circuit.gate_set
)
@staticmethod
def get_violation_report(circuit: Circuit) -> str:
"""
Return a message explaining why `circuit` cannot be instantiated.
Args:
circuit (Circuit): Generate a report for this circuit.
Raises:
ValueError: If `circuit` can be instantiated with this
instantiater.
"""
invalid_gates = {
gate
for gate in circuit.gate_set
if not isinstance(gate, LocallyOptimizableUnitary)
}
if len(invalid_gates) == 0:
raise ValueError('Circuit can be instantiated.')
return (
'Cannot instantiate circuit with qfactor'
' because the following gates are not locally optimizable: %s.'
% ', '.join(str(g) for g in invalid_gates)
)
@staticmethod
def get_method_name() -> str:
"""Return the name of this method."""
return 'qfactor'
| 2.453125
| 2
|
backtest/indicators.py
|
tacchang001/Store_OANDA_data_locally
| 0
|
12779829
|
<reponame>tacchang001/Store_OANDA_data_locally
# https://note.mu/addwis/n/n2c2dc09af892
import numpy as np
import pandas as pd
from scipy.signal import lfilter, lfilter_zi
from numba import jit
_OPEN = 'o'
_HIGH = 'h'
_LOW = 'l'
_CLOSE = 'c'
# dfのデータからtfで指定するタイムフレームの4本足データを作成する関数
def TF_ohlc(df, tf):
x = df.resample(tf).ohlc()
O = x[_OPEN][_OPEN]
H = x[_HIGH][_HIGH]
L = x[_LOW][_LOW]
C = x[_CLOSE][_CLOSE]
ret = pd.DataFrame({_OPEN: O, _HIGH: H, _LOW: L, _CLOSE: C},
columns=[_OPEN, _HIGH, _LOW, _CLOSE])
return ret.dropna()
# dfのデータに Median, Typical, Weighted price を追加する関数
def ext_ohlc(df):
O = df[_OPEN]
H = df[_HIGH]
L = df[_LOW]
C = df[_CLOSE]
ext = pd.DataFrame({'Median': (H + L) / 2,
'Typical': (H + L + C) / 3,
'Weighted': (H + L + C * 2) / 4},
columns=['Median', 'Typical', 'Weighted'])
return pd.concat([df, ext], axis=1)
# シフト関数
def shift(x, n=1):
return np.concatenate((np.zeros(n), x[:-n]))
# SMA on Array
@jit
def SMAonArray(x, ma_period):
x[np.isnan(x)] = 0
y = np.empty_like(x)
y[:ma_period - 1] = np.nan
y[ma_period - 1] = np.sum(x[:ma_period])
for i in range(ma_period, len(x)):
y[i] = y[i - 1] + x[i] - x[i - ma_period]
return y / ma_period
# EMA on Array
@jit
def EMAonArray(x, alpha):
x[np.isnan(x)] = 0
y = np.empty_like(x)
y[0] = x[0]
for i in range(1, len(x)):
y[i] = alpha * x[i] + (1 - alpha) * y[i - 1]
return y
# Adaptive EMA on Array
@jit
def AEMAonArray(x, alpha):
x[np.isnan(x)] = 0
alpha[np.isnan(alpha)] = 0
y = np.empty_like(x)
y[0] = x[0]
for i in range(1, len(x)):
y[i] = alpha[i] * x[i] + (1 - alpha[i]) * y[i - 1]
return y
# 共通移動平均 on Array
def MAonArray(a, ma_period, ma_method):
if ma_method == 'SMA':
y = SMAonArray(a, ma_period)
elif ma_method == 'EMA':
y = EMAonArray(a, 2 / (ma_period + 1))
elif ma_method == 'SMMA':
y = EMAonArray(a, 1 / ma_period)
elif ma_method == 'LWMA':
h = np.arange(ma_period, 0, -1) * 2 / ma_period / (ma_period + 1)
y = lfilter(h, 1, a)
y[:ma_period - 1] = np.nan
return y
# 共通移動平均 on Series
def MAonSeries(s, ma_period, ma_method):
return pd.Series(MAonArray(s.values, ma_period, ma_method), index=s.index)
# iMA()関数
def iMA(df, ma_period, ma_shift=0, ma_method='SMA', applied_price=_CLOSE):
return MAonSeries(df[applied_price], ma_period, ma_method).shift(ma_shift)
# iATR()関数
def iATR(df, ma_period, ma_method='SMA'):
TR = np.max(np.vstack((df[_HIGH].values, shift(df[_CLOSE].values))).T, axis=1) \
- np.min(np.vstack((df[_LOW].values, shift(df[_CLOSE].values))).T, axis=1)
return pd.Series(MAonArray(TR, ma_period, ma_method), index=df.index)
# iDEMA()関数
@jit
def iDEMA(df, ma_period, ma_shift=0, applied_price=_CLOSE):
alpha = 2 / (ma_period + 1)
a1 = 2 * (alpha - 1)
a2 = (1 - alpha) ** 2
b0 = alpha * (2 - alpha)
b1 = 2 * alpha * (alpha - 1)
x = df[applied_price].values
y = np.empty_like(x)
y[0] = x[0]
y[1] = b0 * x[1] + b1 * x[0] - a1 * y[0] - a2 * y[0]
for i in range(2, len(x)):
y[i] = b0 * x[i] + b1 * x[i - 1] - a1 * y[i - 1] - a2 * y[i - 2]
return pd.Series(y, index=df.index).shift(ma_shift)
# iTEMA()関数
@jit
def iTEMA(df, ma_period, ma_shift=0, applied_price=_CLOSE):
alpha = 2 / (ma_period + 1)
a1 = 3 * (alpha - 1)
a2 = 3 * (1 - alpha) ** 2
a3 = (alpha - 1) ** 3
b0 = 3 * alpha * (1 - alpha) + alpha ** 3
b1 = 3 * alpha * (alpha - 2) * (1 - alpha)
b2 = 3 * alpha * (1 - alpha) ** 2
x = df[applied_price].values
y = np.empty_like(x)
y[0] = x[0]
y[1] = b0 * x[1] + b1 * x[0] + b2 * x[0] - a1 * y[0] - a2 * y[0] - a3 * y[0]
y[2] = b0 * x[2] + b1 * x[1] + b2 * x[0] - a1 * y[1] - a2 * y[0] - a3 * y[0]
for i in range(3, len(x)):
y[i] = b0 * x[i] + b1 * x[i - 1] + b2 * x[i - 2] - a1 * y[i - 1] - a2 * y[i - 2] - a3 * y[i - 3]
return pd.Series(y, index=df.index).shift(ma_shift)
# iMomentum()関数
@jit
def iMomentum(df, mom_period, applied_price=_CLOSE):
x = df[applied_price].values
y = np.empty_like(x)
y[:mom_period] = np.nan
for i in range(mom_period, len(x)):
y[i] = x[i] / x[i - mom_period] * 100
return pd.Series(y, index=df.index)
# iRSI()関数
def iRSI(df, ma_period, applied_price=_CLOSE):
diff = df[applied_price].diff()
positive = MAonSeries(diff.clip_lower(0), ma_period, 'SMMA')
negative = MAonSeries(diff.clip_upper(0), ma_period, 'SMMA')
return 100 - 100 / (1 - positive / negative)
# iStdDev()関数
def iStdDev(df, ma_period, ma_shift=0, applied_price=_CLOSE):
return df[applied_price].rolling(ma_period).std(ddof=0).shift(ma_shift)
# iAO()関数
def iAO(df):
return MAonSeries(df['Median'], 5, 'SMA') - MAonSeries(df['Median'], 34, 'SMA')
# iAC()関数
def iAC(df):
AO = iAO(df).values
return pd.Series(AO - SMAonArray(AO, 5), index=df.index)
# iBearsPower()関数
def iBearsPower(df, ma_period):
return df[_LOW] - MAonSeries(df[_CLOSE], ma_period, 'EMA')
# iBullsPower()関数
def iBullsPower(df, ma_period):
return df[_HIGH] - MAonSeries(df[_CLOSE], ma_period, 'EMA')
# iCCI()関数
@jit
def iCCI(df, ma_period, applied_price='Typical'):
price = df[applied_price].values
SP = SMAonArray(price, ma_period)
M = price - SP
D = np.zeros(len(M))
for i in range(len(D)):
for j in range(ma_period):
D[i] += abs(price[i - j] - SP[i])
D *= 0.015 / ma_period
return pd.Series(M / D, index=df.index)
# iDeMarker()関数
def iDeMarker(df, ma_period):
DeMax = df[_HIGH].diff().clip_lower(0).values
DeMin = -df[_LOW].diff().clip_upper(0).values
SDeMax = SMAonArray(DeMax, ma_period)
SDeMin = SMAonArray(DeMin, ma_period)
return pd.Series(SDeMax / (SDeMax + SDeMin), index=df.index)
# iEnvelopes()関数
def iEnvelopes(df, ma_period, deviation, ma_shift=0, ma_method='SMA', applied_price=_CLOSE):
price = df[applied_price]
MA = MAonSeries(price, ma_period, ma_method).shift(ma_shift)
Upper = MA * (1 + deviation / 100)
Lower = MA * (1 - deviation / 100)
return pd.DataFrame({'Upper': Upper, 'Lower': Lower},
columns=['Upper', 'Lower'])
# iMACD()関数
def iMACD(df, fast_period, slow_period, signal_period, applied_price=_CLOSE):
price = df[applied_price].values
Main = MAonArray(price, fast_period, 'EMA') - MAonArray(price, slow_period, 'EMA')
Signal = SMAonArray(Main, signal_period)
return pd.DataFrame({'Main': Main, 'Signal': Signal},
columns=['Main', 'Signal'], index=df.index)
# iOsMA()関数
def iOsMA(df, fast_period, slow_period, signal_period, applied_price=_CLOSE):
MACD = iMACD(df, fast_period, slow_period, signal_period, applied_price)
return MACD['Main'] - MACD['Signal']
# iTriX()関数
def iTriX(df, ma_period, applied_price=_CLOSE):
EMA1 = MAonSeries(df[applied_price], ma_period, 'EMA')
EMA2 = MAonSeries(EMA1, ma_period, 'EMA')
EMA3 = MAonSeries(EMA2, ma_period, 'EMA')
return EMA3.diff() / EMA3.shift()
# iAMA()関数
def iAMA(df, ma_period, fast_period, slow_period, ma_shift=0, applied_price=_CLOSE):
price = df[applied_price]
Signal = price.diff(ma_period).abs()
Noise = price.diff().abs().rolling(ma_period).sum()
ER = Signal.values / Noise.values
FastSC = 2 / (fast_period + 1)
SlowSC = 2 / (slow_period + 1)
SSC = ER * (FastSC - SlowSC) + SlowSC
AMA = AEMAonArray(price.values, SSC * SSC)
return pd.Series(AMA, index=df.index).shift(ma_shift)
# iFrAMA()関数
def iFrAMA(df, ma_period, ma_shift=0, applied_price=_CLOSE):
price = df[applied_price]
H = df[_HIGH]
L = df[_LOW]
N1 = (H.rolling(ma_period).max() - L.rolling(ma_period).min()) / ma_period
N2 = (H.shift(ma_period).rolling(ma_period).max() - L.shift(ma_period).rolling(ma_period).min()) / ma_period
N3 = (H.rolling(2 * ma_period).max() - L.rolling(2 * ma_period).min()) / (2 * ma_period)
D = (np.log(N1.values + N2.values) - np.log(N3.values)) / np.log(2)
A = np.exp(-4.6 * (D - 1))
FRAMA = AEMAonArray(price.values, A)
return pd.Series(FRAMA, index=df.index).shift(ma_shift)
# aiRVI()関数
def iRVI(df, m_period):
CO = df[_CLOSE].values - df[_OPEN].values
HL = df[_HIGH].values - df[_LOW].values
MA = lfilter([1 / 6, 1 / 3, 1 / 3, 1 / 6], 1, CO)
RA = lfilter([1 / 6, 1 / 3, 1 / 3, 1 / 6], 1, HL)
Main = SMAonArray(MA, ma_period) / SMAonArray(RA, ma_period)
Signal = lfilter([1 / 6, 1 / 3, 1 / 3, 1 / 6], 1, Main)
return pd.DataFrame({'Main': Main, 'Signal': Signal},
columns=['Main', 'Signal'], index=df.index)
# iWPR()関数
def iWPR(df, period):
Max = df[_HIGH].rolling(period).max()
Min = df[_LOW].rolling(period).min()
return (df[_CLOSE] - Max) / (Max - Min) * 100
# iVIDyA()関数
def iVIDyA(df, cmo_period, ma_period, ma_shift=0, applied_price=_CLOSE):
price = df[applied_price]
UpSum = price.diff().clip_lower(0).rolling(cmo_period).sum()
DnSum = -price.diff().clip_upper(0).rolling(cmo_period).sum()
CMO = np.abs((UpSum - DnSum) / (UpSum + DnSum)).values
VIDYA = AEMAonArray(price.values, 2 / (ma_period + 1) * CMO)
return pd.Series(VIDYA, index=df.index).shift(ma_shift)
# iBands()関数
def iBands(df, bands_period, deviation, bands_shift=0, applied_price=_CLOSE):
price = df[applied_price].shift(bands_shift)
Base = price.rolling(bands_period).mean()
sigma = price.rolling(bands_period).std(ddof=0)
Upper = Base + sigma * deviation
Lower = Base - sigma * deviation
return pd.DataFrame({'Base': Base, 'Upper': Upper, 'Lower': Lower},
columns=['Base', 'Upper', 'Lower'])
# iStochastic()関数
def iStochastic(df, Kperiod, Dperiod, slowing, ma_method='SMA', price_field='LOWHIGH'):
if price_field == 'LOWHIGH':
high = df[_HIGH]
low = df[_LOW]
elif price_field == 'CLOSECLOSE':
high = low = df[_CLOSE]
Hline = high.rolling(Kperiod).max().values
Lline = low.rolling(Kperiod).min().values
close = df[_CLOSE].values
sumlow = SMAonArray(close - Lline, slowing)
sumhigh = SMAonArray(Hline - Lline, slowing)
Main = sumlow / sumhigh * 100
Signal = MAonArray(Main, Dperiod, ma_method)
return pd.DataFrame({'Main': Main, 'Signal': Signal},
columns=['Main', 'Signal'], index=df.index)
# iHLBand()関数
def iHLBand(df, band_period, band_shift=0, price_field='LOWHIGH'):
if price_field == 'LOWHIGH':
high = df[_HIGH]
low = df[_LOW]
elif price_field == 'CLOSECLOSE':
high = low = df[_CLOSE]
Upper = high.rolling(band_period).max().shift(band_shift)
Lower = low.rolling(band_period).min().shift(band_shift)
return pd.DataFrame({'Upper': Upper, 'Lower': Lower},
columns=['Upper', 'Lower'])
# iAlligator()関数
def iAlligator(df, jaw_period, jaw_shift, teeth_period, teeth_shift,
lips_period, lips_shift, ma_method='SMMA', applied_price='Median'):
price = df[applied_price]
Jaw = MAonSeries(price, jaw_period, ma_method).shift(jaw_shift)
Teeth = MAonSeries(price, teeth_period, ma_method).shift(teeth_shift)
Lips = MAonSeries(price, lips_period, ma_method).shift(lips_shift)
return pd.DataFrame({'Jaw': Jaw, 'Teeth': Teeth, 'Lips': Lips},
columns=['Jaw', 'Teeth', 'Lips'])
# iGator()関数
def iGator(df, jaw_period, jaw_shift, teeth_period, teeth_shift,
lips_period, lips_shift, ma_method='SMMA', applied_price='Median'):
AG = iAlligator(df, jaw_period, jaw_shift, teeth_period, teeth_shift,
lips_period, lips_shift, ma_method, applied_price)
Upper = (AG['Jaw'] - AG['Teeth']).abs()
Lower = -(AG['Teeth'] - AG['Lips']).abs()
return pd.DataFrame({'Upper': Upper, 'Lower': Lower},
columns=['Upper', 'Lower'])
# iADX()関数
def iADX(df, adx_period):
dP = df[_HIGH].diff().clip_lower(0).values
dM = -df[_LOW].diff().clip_upper(0).values
dM[dP > dM] = 0
dP[dP < dM] = 0
dP[0] = dP[1]
dM[0] = dM[1]
TR = np.max(np.vstack((df[_HIGH].values, shift(df[_CLOSE].values))).T, axis=1) \
- np.min(np.vstack((df[_LOW].values, shift(df[_CLOSE].values))).T, axis=1)
PlusDI = 100 * MAonArray(dP / TR, adx_period, 'EMA')
MinusDI = 100 * MAonArray(dM / TR, adx_period, 'EMA')
Main = MAonArray(100 * np.abs(PlusDI - MinusDI) / (PlusDI + MinusDI), adx_period, 'EMA')
return pd.DataFrame({'Main': Main, 'PlusDI': PlusDI, 'MinusDI': MinusDI},
columns=['Main', 'PlusDI', 'MinusDI'], index=df.index)
# iADXWilder()関数
def iADXWilder(df, adx_period):
dP = df[_HIGH].diff().clip_lower(0).values
dM = -df[_LOW].diff().clip_upper(0).values
dM[dP > dM] = 0
dP[dP < dM] = 0
dP[0] = dP[1]
dM[0] = dM[1]
ATR = iATR(df, adx_period, 'SMMA').values
PlusDI = 100 * MAonArray(dP, adx_period, 'SMMA') / ATR
MinusDI = 100 * MAonArray(dM, adx_period, 'SMMA') / ATR
Main = MAonArray(100 * np.abs(PlusDI - MinusDI) / (PlusDI + MinusDI), adx_period, 'SMMA')
return pd.DataFrame({'Main': Main, 'PlusDI': PlusDI, 'MinusDI': MinusDI},
columns=['Main', 'PlusDI', 'MinusDI'], index=df.index)
# iSAR()関数
@jit
def iSAR(df, step, maximum):
dir_long = True
ACC = step
SAR = df[_CLOSE].values.copy()
High = df[_HIGH].values
Low = df[_LOW].values
Ep1 = High[0]
for i in range(1, len(SAR)):
if dir_long == True:
Ep1 = max(Ep1, High[i - 1])
SAR[i] = SAR[i - 1] + ACC * (Ep1 - SAR[i - 1])
if High[i] > Ep1: ACC = min(ACC + step, maximum)
if SAR[i] > Low[i]:
dir_long = False
SAR[i] = Ep1
ACC = step
Ep1 = Low[i]
else:
Ep1 = min(Ep1, Low[i - 1])
SAR[i] = SAR[i - 1] + ACC * (Ep1 - SAR[i - 1])
if Low[i] < Ep1: ACC = min(ACC + step, maximum)
if SAR[i] < High[i]:
dir_long = True
SAR[i] = Ep1
ACC = step
Ep1 = High[i]
return pd.Series(SAR, index=df.index)
# iIchimoku()関数
def iIchimoku(df, tenkan_period, kijun_period, senkoub_period):
high = df[_HIGH]
low = df[_LOW]
Tenkan = (high.rolling(tenkan_period).max() + low.rolling(tenkan_period).min()) / 2
Kijun = (high.rolling(kijun_period).max() + low.rolling(kijun_period).min()) / 2
SenkouA = (Tenkan + Kijun).shift(kijun_period) / 2
SenkouB = (high.rolling(senkoub_period).max() + low.rolling(senkoub_period).min()).shift(kijun_period) / 2
Chikou = df[_CLOSE].shift(-kijun_period)
return pd.DataFrame({'Tenkan': Tenkan, 'Kijun': Kijun, 'SenkouA': SenkouA,
'SenkouB': SenkouB, 'Chikou': Chikou},
columns=['Tenkan', 'Kijun', 'SenkouA', 'SenkouB', 'Chikou'],
index=df.index)
# 各関数のテスト
if __name__ == '__main__':
file = 'USDJPY16385.txt'
ohlc = pd.read_csv(file, index_col='Time', parse_dates=True)
ohlc_ext = ext_ohlc(ohlc)
# x = iMA(ohlc_ext, 14, ma_method='SMA')
# x = iMA(ohlc_ext, 14, ma_method='EMA')
# x = iMA(ohlc_ext, 14, ma_method='SMMA', applied_price='Median')
# x = iMA(ohlc_ext, 14, ma_method='LWMA', applied_price='Typical')
# x = iATR(ohlc_ext, 14)
# x = iDEMA(ohlc_ext, 14, applied_price=_CLOSE)
# x = iTEMA(ohlc_ext, 14, applied_price=_CLOSE)
# x = iMomentum(ohlc_ext, 14)
# x = iRSI(ohlc_ext, 14)
# x = iStdDev(ohlc_ext, 14, ma_shift=3, applied_price='Weighted')
# x = iAO(ohlc_ext)
# x = iAC(ohlc_ext)
# x = iBearsPower(ohlc_ext, 13)
# x = iBullsPower(ohlc_ext, 13)
# x = iCCI(ohlc_ext, 14)
# x = iDeMarker(ohlc_ext, 14)
# x = iEnvelopes(ohlc_ext, 10, 1)
# x = iMACD(ohlc_ext, 12, 26, 9)
# x = iOsMA(ohlc_ext, 12, 26, 9)
# x = iTriX(ohlc_ext, 14)
# x = iAMA(ohlc_ext, 15, 2, 30)
# x = iFrAMA(ohlc_ext, 14)
# x = iRVI(ohlc_ext, 10)
# x = iWPR(ohlc_ext, 14)
# x = iVIDyA(ohlc_ext, 15, 12)
# x = iBands(ohlc_ext, 20, 2, bands_shift=5)
# x = iStochastic(ohlc_ext, 10, 3, 5, ma_method='SMA', price_field='LOWHIGH')
# x = iHLBand(ohlc, 20)
# x = iAlligator(ohlc_ext, 13, 8, 8, 5, 5, 3)
# x = iGator(ohlc_ext, 13, 8, 8, 5, 5, 3)
# x = iADX(ohlc_ext, 14)
# x = iADXWilder(ohlc_ext, 14)
# x = iSAR(ohlc_ext, 0.02, 0.2)
x = iIchimoku(ohlc_ext, 9, 26, 52)
# dif = ohlc['Ind0'] - x
# dif0 = ohlc['Ind0'] - x['Main']
# dif1 = ohlc['Ind1'] - x['Signal']
# dif1 = ohlc['Ind1'] - x['PlusDI']
# dif2 = ohlc['Ind2'] - x['MinusDI']
dif0 = ohlc['Ind0'] - x['Tenkan']
dif1 = ohlc['Ind1'] - x['Kijun']
dif2 = ohlc['Ind2'] - x['SenkouA']
dif3 = ohlc['Ind3'] - x['SenkouB']
dif4 = ohlc['Ind4'] - x['Chikou']
| 2.296875
| 2
|
model_loader.py
|
donikv/IlluminationBase
| 0
|
12779830
|
import tensorflow as tf
def create_pb_model(pb_path, sz, bs):
def load_graph(frozen_graph_filename):
# We load the protobuf file from the disk and parse it to retrieve the
# unserialized graph_def
with tf.compat.v1.gfile.GFile(frozen_graph_filename, "rb") as f:
graph_def = tf.compat.v1.GraphDef()
graph_def.ParseFromString(f.read())
# Then, we can use again a convenient built-in function to import a graph_def into the
# current default Graph
with tf.Graph().as_default() as graph:
tf.import_graph_def(graph_def,input_map=None,return_elements=None,name="prefix",op_dict=None,producer_op_list=None)
return graph, graph_def
def wrap_frozen_graph(graph_def, inputs, outputs):
def _imports_graph_def():
tf.compat.v1.import_graph_def(graph_def, name="")
wrapped_import = tf.compat.v1.wrap_function(_imports_graph_def, [])
import_graph = wrapped_import.graph
return wrapped_import.prune(
tf.nest.map_structure(import_graph.as_graph_element, inputs),
tf.nest.map_structure(import_graph.as_graph_element, outputs))
graph, graph_def = load_graph(pb_path)
model_fn = wrap_frozen_graph(graph_def, 'data:0', 'prob:0')
lam = tf.keras.layers.Lambda(model_fn)
inpt = tf.keras.layers.Input(sz, batch_size=bs)
out = lam(inpt)
model = tf.keras.models.Model(inpt, out)
return model
| 2.453125
| 2
|
yc102/201.py
|
c-yan/yukicoder
| 0
|
12779831
|
SA, PA, XA = input().split()
SB, PB, XB = input().split()
PA = int(PA)
PB = int(PB)
if PA > PB:
print(SA)
elif PA < PB:
print(SB)
elif PA == PB:
print(-1)
| 3.3125
| 3
|
Aula 12/ex040P.py
|
alaanlimaa/Python_CVM1-2-3
| 0
|
12779832
|
'''Crie um programa que leia duas notas de um aluno e calcule sua média,
mostrando uma mensagem no final, de acordo com a média atingida:'''
p1 = float(input('Nota da P1: '))
p2 = float(input('Nota da P2: '))
media = (p1 + p2) / 2
if media < 5.0:
print('Sua média foi {:.1f}, REPROVADO!'.format(media))
elif media >= 5.0 and media <= 6.9:
print('Sua média foi {:.1f}, RECUPERAÇÃO'.format(media))
elif media >= 7.0:
print('Sua média fou {:.1f}, APROVADO'.format(media))
| 4
| 4
|
trade.py
|
pjwarner/m-oney-an
| 0
|
12779833
|
import dbConnection as data
import calcCost as cost
from decimal import *
import getMtGoxRequest as req
class trade:
def __init__(self):
print 'Initialized...'
self.info = req.get_res('0/info.php', {})
self.priceInfo = req.get_res('1/BTCUSD/public/ticker', {})
def buy(self):
"""
Current implementation of this method buys ALL the bitcoins you can at market price
TODO: Add a way to tell it how many you want to buy for hedging purposes
"""
print 'Initiating a buy...'
cashpool = int(self.info['Wallets']['USD']['Balance']['value_int'])
sellPrice = int(self.priceInfo['return']['sell']['value_int'])
btcpool = Decimal(cashpool / sellPrice).quantize(Decimal('1.0'), rounding=ROUND_DOWN)
print 'Buying:', btcpool, 'btc'
#Actual Trade Code
# trade = {'amount': 10, 'Currency':'USD'}
# tmp = req.get_res('0/buyBTC.php', trade)
# print tmp
print 'Removing MtGoxCost...'
tmp = cost.calcCost(int(btcpool))
actual = tmp.removeCost()
print 'ActualInPool:', actual
return actual
def sell(self):
"""
Current implementation of this method sells ALL your bitcoins at the market price.
"""
print 'Initiating a sell...'
btcpool = int(self.info['Wallets']['BTC']['Balance']['value_int'])
sellPrice = int(self.priceInfo['return']['buy']['value_int'])
print 'Selling:', btcpool, 'btc'
print 'Removing MtGoxCost...'
tmp = cost.calcCost(btcpool * sellPrice)
actual = tmp.removeCost() * Decimal('.00001')
print 'ActualInPool:', actual
return actual
| 2.875
| 3
|
pointnet/dataset.py
|
shnhrtkyk/semantic3dnet
| 0
|
12779834
|
<filename>pointnet/dataset.py
import numpy as np
import laspy
import os
from scipy.spatial import KDTree
from sklearn.preprocessing import normalize
import logging
from p2v import voxelize
from p2v_pyntcloud import voxelization
import torch
class Dataset():
ATTR_EXLUSION_LIST = ['X', 'Y', 'Z', 'raw_classification', 'Classification',
'flag_byte', 'scan_angle_rank', 'user_data',
'pt_src_id', 'gps_time']
ATTR_EXTRA_LIST = ['num_returns', 'return_num']
def __init__(self, file, load=True, undersampling=False, normalize=False, shuffle=False):
self.file = file
self._features = self._xyz = self._classes = self._names = None
self.xmax = self.xmin = self.ymax = self.ymin = None
self._header = None
self.undersampling = undersampling
self.normalize = normalize
self.shuffle = shuffle
if load:
self.load_data()
def load_data(self):
print(self.file)
file_h = laspy.file.File(self.file, mode='r')
self._xyz = np.vstack([file_h.x, file_h.y, file_h.z]).transpose()
self._classes = file_h.classification
self.index_for_train = np.array(range(len(self._classes)))
self.length = len(self.index_for_train)
self._classes = self._classes - 1
# self._classes = np.where(self._classes == 1, 0, self._classes)
# self._classes = np.where(self._classes == 6, 0, self._classes)
# self._classes = np.where(self._classes > 1, 1, self._classes)
lbl = np.unique(self._classes[self._classes >= 0])
print(lbl)
points = file_h.points['point']
attr_names = [a for a in points.dtype.names] + Dataset.ATTR_EXTRA_LIST
self._features = np.array([getattr(file_h, name) for name in attr_names
if name not in Dataset.ATTR_EXLUSION_LIST]).transpose()
self._names = [name for name in attr_names if name not in Dataset.ATTR_EXLUSION_LIST]
# 地面クラスに合わせたアンダーサンプル
if (self.undersampling == True):
ind_of_ground = np.where(self._classes == 0)[0]
# class_g = self._classes[ind_of_ground]
# points_g = self._xyz[ind_of_ground]
ind_of_build = np.where(self._classes == 1)[0]
print(ind_of_ground)
ind_of_build = ind_of_build[:len(ind_of_ground)]
# print(ind_of_build)
self.index_for_train = ind_of_ground
np.insert(self.index_for_train , 0, ind_of_build)
# print(self.index_for_train.shape)
self.length = len(self.index_for_train)
if (self.shuffle == True): np.random.shuffle(self.index_for_train)
print(self.index_for_train)
# class_b = self._classes[ind_of_build]
self.xmin = file_h.header.min[0]
self.ymin = file_h.header.min[1]
self.xmax = file_h.header.max[0]
self.ymax = file_h.header.max[1]
self._header = file_h.header
file_h.close()
def statistics(self):
stats = {'absolute': {},
'relative': {}}
for i in range(np.max(self.labels)):
count = np.count_nonzero(self.labels == i)
stats['absolute'][i] = count
stats['relative'][i] = count / len(self)
return stats
@property
def labels(self):
if self._xyz is None:
self.load_data()
ret_val = self._classes
return ret_val
@property
def names(self):
return self._names
@property
def points_and_features(self):
if self._xyz is None:
self.load_data()
ret_val = np.hstack((self._xyz, self._features))
if self.normalize:
normalize(ret_val)
return ret_val
@property
def filename(self):
return os.path.basename(self.file)
def points_and_features_f(self):
return self.points_and_features
def labels_f(self):
return self.labels
def unload(self):
self._features = self._xyz = self._classes = self._names = None
self.xmax = self.xmin = self.ymax = self.ymin = None
self._header = None
def get_label_unique_count(self):
return len(np.unique(self._classes))
def get_feature_count(self):
return self._features.shape[1]
def __len__(self):
return self.labels.shape[0]
# get voxel data
def getBatch(self, start_idx, batch_size, idx_randomizer=None):
if idx_randomizer is not None:
idx_range = idx_randomizer[start_idx:start_idx + batch_size]
else:
idx_range = range(start_idx, start_idx + batch_size)
data = self.points_and_features[idx_range]
labels = self.labels[idx_range]
def getVoxelBatch(self, start_idx, batch_size, idx_randomizer=None):
if idx_randomizer is not None:
idx_range = idx_randomizer[start_idx:start_idx + batch_size]
else:
idx_range = range(start_idx, start_idx + batch_size)
data = self.points_and_features[idx_range]
labels = self.labels[idx_range]
def save_with_new_classes(self, outFile, new_classes):
inFile = laspy.file.File(self.file)
outFile = laspy.file.File(outFile, mode='w', header=inFile.header)
outFile.points = inFile.points
outFile.Classification = new_classes[0]
outFile.close()
@staticmethod
def Save(path, points_and_features, names=None, labels=None, new_classes=None, probs=None):
hdr = laspy.header.Header()
outfile = laspy.file.File(path, mode="w", header=hdr)
if new_classes is not None:
outfile.define_new_dimension(name="estim_class", data_type=5, description="estimated class")
if labels is not None and new_classes is not None:
outfile.define_new_dimension(name="class_correct", data_type=5,
description="correctness of estimated class")
if probs is not None:
for classid in range(probs.shape[1]):
outfile.define_new_dimension(name="prob_class%02d" % classid, data_type=9,
description="p of estimated class %02d" % classid)
allx = points_and_features[:, 0]
ally = points_and_features[:, 1]
allz = points_and_features[:, 2]
xmin = np.floor(np.min(allx))
ymin = np.floor(np.min(ally))
zmin = np.floor(np.min(allz))
outfile.header.offset = [xmin, ymin, zmin]
outfile.header.scale = [0.001, 0.001, 0.001]
outfile.x = allx
outfile.y = ally
outfile.z = allz
for featid in range(points_and_features.shape[1] - 3):
try:
data = points_and_features[:, 3 + featid]
if names[featid] in ['num_returns', 'return_num']: # hack to treat int-values
data = data.astype('int8')
setattr(outfile, names[featid], data)
except Exception as e:
logging.warning("Could not save attribute %s to file %s: \n%s" % (names[featid], path, e))
# raise
if probs is not None:
for classid in range(probs.shape[1]):
setattr(outfile, "prob_class%02d" % classid, probs[:, classid])
if labels is not None:
outfile.classification = labels
if new_classes is not None:
outfile.estim_class = new_classes
if labels is not None and new_classes is not None:
outfile.class_correct = np.equal(labels, new_classes) * -1 + 6 # so that equal =5 --> green (veg)
# and not equal =6 --> red (building)
outfile.close()
class ChunkedDataset(Dataset):
def __init__(self, chunk_size, overlap, *args, **kwargs):
super(ChunkedDataset, self).__init__(*args, **kwargs)
self.chunk_size = chunk_size
self.overlap = overlap
self.curr_chunk = 0
self.num_cols = (self.xmax - self.xmin) // (self.chunk_size - self.overlap) + 1
self.num_rows = (self.ymax - self.ymin) // (self.chunk_size - self.overlap) + 1
def idx_to_lims(self, idx):
if idx >= self.num_cols * self.num_rows:
return None
row_idx = idx // self.num_cols
col_idx = idx % self.num_cols
return [self.xmin + (self.chunk_size - self.overlap) * col_idx,
self.xmin + (self.chunk_size - self.overlap) * (col_idx + 1) + self.overlap,
self.ymin + (self.chunk_size - self.overlap) * row_idx,
self.ymin + (self.chunk_size - self.overlap) * (row_idx + 1) + self.overlap,
]
def getNextChunk(self):
lims = self.idx_to_lims(self.curr_chunk)
if not lims: # no more chunks
return None, None
idxes = self._xyz[:, 0] >= lims[0]
idxes &= self._xyz[:, 0] < lims[1]
idxes &= self._xyz[:, 1] >= lims[2]
idxes &= self._xyz[:, 1] < lims[3]
self.curr_chunk += 1
return self.points_and_features[idxes, :], self.labels[idxes]
@staticmethod
def chunkStatistics(labels, max):
stats = {'absolute': {},
'relative': {}}
for i in range(max):
count = np.count_nonzero(labels == i)
stats['absolute'][i] = count
stats['relative'][i] = count / len(labels)
return stats
class kNNBatchDataset(Dataset):
def __init__(self, *args, **kwargs):
super(kNNBatchDataset, self).__init__(*args, **kwargs)
self.tree = None
self.buildKD()
self.center_idx = 0
def buildKD(self):
logging.info(" -- Building kD-Tree with %d points..." % len(self))
self.tree = KDTree(self._xyz[:, :2], leafsize=100) # build only on x/y
logging.info(" --- kD-Tree built.")
def getBatches(self, batch_size=1):
centers = []
for i in range(batch_size):
if self.currIdx >= self.num_batches:
break
centers.append([self.xmin + self.spacing / 2 + (self.currIdx // self.num_rows) * self.spacing,
self.ymin + self.spacing / 2 + (self.currIdx % self.num_rows) * self.spacing])
self.currIdx += 1
print(centers)
if centers:
_, idx = self.tree.query(centers, k=self.k)
return self.points_and_features[idx, :], self.labels[idx]
else:
return None, None
def getBatches_Point(self, batch_size=1, num_point= 1024, num_grid=32):
batch_points = []
batch_labels = []
batch_labels = torch.zeros([batch_size, 1], dtype=torch.float) # batch size * 1
batch_voxels = []
batch_points = torch.zeros([batch_size, 3, num_point], dtype=torch.float)
for i in range(batch_size):
points = []
voxels = []
tmp_index = self.center_idx
if (self.shuffle == True):
tmp_index = self.index_for_train[self.center_idx]
_, idx = self.tree.query(self.points_and_features[tmp_index, :2], k=num_point)
label = self.labels[tmp_index]
# print(label)
batch_labels[i] = torch.from_numpy(np.array(label).astype(np.float32))
point_knn = np.full((num_point, 3), 1)
point_knn[:, :] = self.points_and_features[idx, :3]
batch_points[i, :, :] = torch.from_numpy(point_knn.astype(np.float32).T)
self.center_idx += 1
# for j in range(len(num_point)):
# point_knn = np.full((num_point[j], 3), 1)
# point_knn[:, :] = self.points_and_features[idx[:num_point[j]], :3]
# # point_knn = torch.from_numpy(np.array(point_knn).astype(np.float32))
# # print(point_knn.shape)
# batch_voxels[j, i, :, :, :, :] = torch.from_numpy(np.array(voxelization(point_knn,
# vox_size=num_grid)).astype(np.float32)) # resolution * batchsize * ch * grid * grid * grid
# points.append(point_knn)
# print(point_knn)
# print(point_knn.shape)
# print(batch_voxels.shape)
# batch_points.append(points) # batchsize * resolution * num.of points * xyz
# batch_points = torch.from_numpy(np.array(batch_points).astype(np.float32)) # batchsize * resolution * num.of points * xyz
# for i in range(batch_size):
# batch_voxels.append(voxels)
# batch_voxels =np.array(batch_voxels)# resolution * batchsize grid * grid * grid
# batch_labels = torch.from_numpy(np.array(batch_labels).astype(np.float32)) # batch size * 1
return batch_points, batch_labels
# append for inference KUDO
def getBatchsWithIdx(self, batch_size=1):
centers = []
for i in range(batch_size):
if self.currIdx >= self.num_batches:
break
centers.append([self.xmin + self.spacing / 2 + (self.currIdx // self.num_rows) * self.spacing,
self.ymin + self.spacing / 2 + (self.currIdx % self.num_rows) * self.spacing])
self.currIdx += 1
# print(centers)
if centers:
_, idx = self.tree.query(centers, k=self.k)
return self.points_and_features[idx, :], self.labels[idx], np.array(idx)
else:
return None, None, None
def getBatchByIdx(self, batch_idx):
centers = [[self.xmin + self.spacing / 2 + (batch_idx // self.num_rows) * self.spacing,
self.ymin + self.spacing / 2 + (batch_idx % self.num_rows) * self.spacing]]
_, idx = self.tree.query(centers, k=self.k)
return self.points_and_features[idx, :], self.labels[idx]
if __name__ == '__main__':
d = kNNBatchDataset(file="../data/test.las",
undersampling=False, shuffle=False)
for idx_range in range(d.length):
voxels, labels = d.getBatches_Point(batch_size=12, num_point=1024, num_grid=32)
print(str(voxels.size()) + " , " + str(labels) + " , " + str(d.center_idx))
d.center_idx += 1
| 2.375
| 2
|
migrations/versions/bcb167cb67ef_create_pitch_table.py
|
John-Kimani/Blue_Chip_Pitch_App
| 0
|
12779835
|
<reponame>John-Kimani/Blue_Chip_Pitch_App
"""Create pitch table
Revision ID: bcb167cb67ef
Revises: <KEY>
Create Date: 2022-03-07 11:38:54.009444
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'bcb167cb67ef'
down_revision = '<KEY>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('pitch', sa.Column('body', sa.String(length=140), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('pitch', 'body')
# ### end Alembic commands ###
| 1.609375
| 2
|
cfn_policy_validator/parsers/resource/sqs.py
|
awslabs/aws-cloudformation-iam-policy-validator
| 41
|
12779836
|
"""
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
SPDX-License-Identifier: MIT-0
"""
from urllib.parse import urlparse
from cfn_policy_validator.application_error import ApplicationError
from cfn_policy_validator.parsers.output import Policy, Resource
class SqsQueuePolicyParser:
""" AWS::SQS::QueuePolicy
"""
def __init__(self):
self.queue_policies = []
def parse(self, _, resource):
evaluated_resource = resource.eval(sqs_queue_policy_schema)
properties = evaluated_resource['Properties']
queue_urls = properties['Queues']
policy_document = properties['PolicyDocument']
for queue in queue_urls:
parsed_url = urlparse(queue)
try:
queue_name = parsed_url.path.split('/')[2]
except IndexError:
raise ApplicationError(f'Invalid queue URL. Unable to parse name from URL. Invalid value: "{queue}"')
policy = Policy('QueuePolicy', policy_document)
resource = Resource(queue_name, 'AWS::SQS::Queue', policy)
self.queue_policies.append(resource)
def get_policies(self):
return self.queue_policies
sqs_queue_policy_schema = {
'type': 'object',
'properties': {
'Properties': {
'type': 'object',
'properties': {
'PolicyDocument': {
'type': 'object'
},
'Queues': {
'type': 'array',
'minItems': 1,
'items': {
'type': 'string'
}
}
},
'required': ['PolicyDocument', 'Queues']
}
},
'required': ['Properties']
}
| 2.46875
| 2
|
examples/wmc-1.py
|
gfrances/PySDD
| 34
|
12779837
|
<reponame>gfrances/PySDD
#!/usr/bin/env python3
from pathlib import Path
import math
from pysdd.sdd import SddManager, Vtree, WmcManager
here = Path(__file__).parent
def main():
# Start from a given CNF and VTREE file
vtree = Vtree.from_file(bytes(here / "input" / "simple.vtree"))
sdd = SddManager.from_vtree(vtree)
print(f"Created an SDD with {sdd.var_count()} variables")
root = sdd.read_cnf_file(bytes(here / "input" / "simple.cnf"))
# For DNF functions use `read_dnf_file`
# If the vtree is not given, you can also use 'from_cnf_file`
# Model Counting
wmc = root.wmc(log_mode=True)
w = wmc.propagate()
print(f"Model count: {int(math.exp(w))}")
# Weighted Model Counting
lits = [None] + [sdd.literal(i) for i in range(1, sdd.var_count() + 1)]
# Positive literal weight
wmc.set_literal_weight(lits[1], math.log(0.5))
# Negative literal weight
wmc.set_literal_weight(-lits[1], math.log(0.5))
w = wmc.propagate()
print(f"Weighted model count: {math.exp(w)}")
# Visualize SDD and VTREE
print("saving sdd and vtree ... ", end="")
with open(here / "output" / "sdd.dot", "w") as out:
print(sdd.dot(), file=out)
with open(here / "output" / "vtree.dot", "w") as out:
print(vtree.dot(), file=out)
print("done")
if __name__ == "__main__":
main()
| 2.265625
| 2
|
pype/ftrack/actions/action_create_folders.py
|
tws0002/pype
| 0
|
12779838
|
<reponame>tws0002/pype
import os
import sys
import logging
import argparse
import re
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction
from avalon import lib as avalonlib
from pype.ftrack.lib.io_nonsingleton import DbConnector
from pypeapp import config, Anatomy
class CreateFolders(BaseAction):
'''Custom action.'''
#: Action identifier.
identifier = 'create.folders'
#: Action label.
label = 'Create Folders'
#: Action Icon.
icon = '{}/ftrack/action_icons/CreateFolders.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
db = DbConnector()
def discover(self, session, entities, event):
''' Validation '''
if len(entities) != 1:
return False
not_allowed = ['assetversion', 'project']
if entities[0].entity_type.lower() in not_allowed:
return False
return True
def interface(self, session, entities, event):
if event['data'].get('values', {}):
return
entity = entities[0]
without_interface = True
for child in entity['children']:
if child['object_type']['name'].lower() != 'task':
without_interface = False
break
self.without_interface = without_interface
if without_interface:
return
title = 'Create folders'
entity_name = entity['name']
msg = (
'<h2>Do you want create folders also'
' for all children of "{}"?</h2>'
)
if entity.entity_type.lower() == 'project':
entity_name = entity['full_name']
msg = msg.replace(' also', '')
msg += '<h3>(Project root won\'t be created if not checked)</h3>'
items = []
item_msg = {
'type': 'label',
'value': msg.format(entity_name)
}
item_label = {
'type': 'label',
'value': 'With all chilren entities'
}
item = {
'name': 'children_included',
'type': 'boolean',
'value': False
}
items.append(item_msg)
items.append(item_label)
items.append(item)
if len(items) == 0:
return {
'success': False,
'message': 'Didn\'t found any running jobs'
}
else:
return {
'items': items,
'title': title
}
def launch(self, session, entities, event):
'''Callback method for custom action.'''
with_childrens = True
if self.without_interface is False:
if 'values' not in event['data']:
return
with_childrens = event['data']['values']['children_included']
entity = entities[0]
if entity.entity_type.lower() == 'project':
proj = entity
else:
proj = entity['project']
project_name = proj['full_name']
project_code = proj['name']
if entity.entity_type.lower() == 'project' and with_childrens == False:
return {
'success': True,
'message': 'Nothing was created'
}
data = {
"root": os.environ["AVALON_PROJECTS"],
"project": {
"name": project_name,
"code": project_code
}
}
all_entities = []
all_entities.append(entity)
if with_childrens:
all_entities = self.get_notask_children(entity)
av_project = None
try:
self.db.install()
self.db.Session['AVALON_PROJECT'] = project_name
av_project = self.db.find_one({'type': 'project'})
template_work = av_project['config']['template']['work']
template_publish = av_project['config']['template']['publish']
self.db.uninstall()
except Exception:
templates = Anatomy().templates
template_work = templates["avalon"]["work"]
template_publish = templates["avalon"]["publish"]
collected_paths = []
presets = config.get_presets()['tools']['sw_folders']
for entity in all_entities:
if entity.entity_type.lower() == 'project':
continue
ent_data = data.copy()
asset_name = entity['name']
ent_data['asset'] = asset_name
parents = entity['link']
hierarchy_names = [p['name'] for p in parents[1:-1]]
hierarchy = ''
if hierarchy_names:
hierarchy = os.path.sep.join(hierarchy_names)
ent_data['hierarchy'] = hierarchy
tasks_created = False
if entity['children']:
for child in entity['children']:
if child['object_type']['name'].lower() != 'task':
continue
tasks_created = True
task_type_name = child['type']['name'].lower()
task_data = ent_data.copy()
task_data['task'] = child['name']
possible_apps = presets.get(task_type_name, [])
template_work_created = False
template_publish_created = False
apps = []
for app in possible_apps:
try:
app_data = avalonlib.get_application(app)
app_dir = app_data['application_dir']
except ValueError:
app_dir = app
apps.append(app_dir)
# Template wok
if '{app}' in template_work:
for app in apps:
template_work_created = True
app_data = task_data.copy()
app_data['app'] = app
collected_paths.append(
self.compute_template(
template_work, app_data
)
)
if template_work_created is False:
collected_paths.append(
self.compute_template(template_work, task_data)
)
# Template publish
if '{app}' in template_publish:
for app in apps:
template_publish_created = True
app_data = task_data.copy()
app_data['app'] = app
collected_paths.append(
self.compute_template(
template_publish, app_data, True
)
)
if template_publish_created is False:
collected_paths.append(
self.compute_template(
template_publish, task_data, True
)
)
if not tasks_created:
# create path for entity
collected_paths.append(
self.compute_template(template_work, ent_data)
)
collected_paths.append(
self.compute_template(template_publish, ent_data)
)
if len(collected_paths) > 0:
self.log.info('Creating folders:')
for path in set(collected_paths):
self.log.info(path)
if not os.path.exists(path):
os.makedirs(path)
return {
'success': True,
'message': 'Created Folders Successfully!'
}
def get_notask_children(self, entity):
output = []
if entity.get('object_type', {}).get(
'name', entity.entity_type
).lower() == 'task':
return output
else:
output.append(entity)
if entity['children']:
for child in entity['children']:
output.extend(self.get_notask_children(child))
return output
def template_format(self, template, data):
partial_data = PartialDict(data)
# remove subdict items from string (like 'project[name]')
subdict = PartialDict()
count = 1
store_pattern = 5*'_'+'{:0>3}'
regex_patern = "\{\w*\[[^\}]*\]\}"
matches = re.findall(regex_patern, template)
for match in matches:
key = store_pattern.format(count)
subdict[key] = match
template = template.replace(match, '{'+key+'}')
count += 1
# solve fillind keys with optional keys
solved = self._solve_with_optional(template, partial_data)
# try to solve subdict and replace them back to string
for k, v in subdict.items():
try:
v = v.format_map(data)
except (KeyError, TypeError):
pass
subdict[k] = v
return solved.format_map(subdict)
def _solve_with_optional(self, template, data):
# Remove optional missing keys
pattern = re.compile(r"(<.*?[^{0]*>)[^0-9]*?")
invalid_optionals = []
for group in pattern.findall(template):
try:
group.format(**data)
except KeyError:
invalid_optionals.append(group)
for group in invalid_optionals:
template = template.replace(group, "")
solved = template.format_map(data)
# solving after format optional in second round
for catch in re.compile(r"(<.*?[^{0]*>)[^0-9]*?").findall(solved):
if "{" in catch:
# remove all optional
solved = solved.replace(catch, "")
else:
# Remove optional symbols
solved = solved.replace(catch, catch[1:-1])
return solved
def compute_template(self, str, data, task=False):
first_result = self.template_format(str, data)
if first_result == first_result.split('{')[0]:
return os.path.normpath(first_result)
if task:
return os.path.normpath(first_result.split('{')[0])
index = first_result.index('{')
regex = '\{\w*[^\}]*\}'
match = re.findall(regex, first_result[index:])[0]
without_missing = str.split(match)[0].split('}')
output_items = []
for part in without_missing:
if '{' in part:
output_items.append(part + '}')
return os.path.normpath(
self.template_format(''.join(output_items), data)
)
class PartialDict(dict):
def __getitem__(self, item):
out = super().__getitem__(item)
if isinstance(out, dict):
return '{'+item+'}'
return out
def __missing__(self, key):
return '{'+key+'}'
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
CreateFolders(session, plugins_presets).register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))
| 1.96875
| 2
|
extractor/open163.py
|
pwh19920920/spiders
| 390
|
12779839
|
<filename>extractor/open163.py
# pylint: disable=W0123
import re
import requests
def get(url: str) -> dict:
"""
videos
"""
data = {}
data["videos"] = []
headers = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36"
}
re_url = r'mid:(.*?),.*?mp4SdUrlOrign:(.*?),.*?mp4HdUrlOrign:(.*?),.*?mp4ShdUrlOrign:(.*?),'
rep = requests.get(url, headers=headers, timeout=10)
items = re.findall(re_url, rep.text)
for item in items:
# 倒序取最高画质
for video_url in item[::-1]: # type: str
# print(url)
if "http" in video_url:
video_url = eval(video_url).replace("\\u002F", "/")
data["videos"].append(video_url)
break
return data
if __name__ == "__main__":
url = "http://open.163.com/newview/movie/free?pid=M8LI1JCE6&mid=M8LI3BQ60"
print(get(url))
| 2.765625
| 3
|
Lab02_final_Group2_05/PenguinPiC.py
|
evantancy/ece4078-team2-05
| 0
|
12779840
|
<reponame>evantancy/ece4078-team2-05
import numpy as np
import requests
import cv2
class PenguinPi:
def __init__(self, ip="localhost"):
"""
Args:
ip (str, optional): IP address. Defaults to "localhost".
"""
self.ip = ip
self.port = 40000
def set_velocity(self, lvel: int, rvel: int, time=0) -> None:
"""
Args:
lvel (int): Target left wheel speed
rvel (int): Target right wheel speed
time (int, optional): Defaults to 0.
"""
success = False
while not success:
if time == 0:
r = requests.get(
f"http://{self.ip}:{self.port}/robot/set/velocity?value="
+ str(lvel)
+ ","
+ str(rvel)
)
success = r.ok
else:
assert time > 0, "Time must be positive."
assert time < 20, "Time must be less than network timeout (20s)."
r = requests.get(
"http://"
+ self.ip
+ ":"
+ str(self.port)
+ "/robot/set/velocity?value="
+ str(lvel)
+ ","
+ str(rvel)
+ "&time="
+ str(time)
)
success = r.ok
def get_image(self) -> np.ndarray:
"""
Returns:
np.ndarray: OpenCV image object. Check using type(img)
"""
success = False
while not success:
try:
r = requests.get(f"http://{self.ip}:{self.port}/camera/get")
img = cv2.imdecode(np.frombuffer(r.content, np.uint8), cv2.IMREAD_COLOR)
success = r.ok
except (
requests.exceptions.ConnectTimeout,
requests.exceptions.ConnectionError,
requests.exceptions.ReadTimeout,
):
print("PenguinPiC: Image retrieval timed out.")
img = np.zeros((240, 320, 3), dtype=np.uint8)
return img
| 3.09375
| 3
|
temporal_model.py
|
rashidhaffadi/EGT
| 0
|
12779841
|
import torch
import torch.nn as nn
from torch.functional import F
from utils import *
class SequenceModel(nn.Module):
"""docstring for SequenceModel"""
def __init__(self, input_size, hidden_size, n_layers, **kwargs):
super(SequenceModel, self).__init__()
self.rnn = nn.LSTM(input_size, hidden_size, n_layers, **kwargs)
self.linear = nn.Sequential(linear(hidden_size, hidden_size, True, 0.5),
linear(hidden_size, hidden_size, True, 0.5))
self.fc = nn.Linear(hidden_size, 128)
# load
def forward(self, x):
x, _ = self.rnn(x)
x = x.mean(1)
x = self.linear(x)
return self.fc(x)
| 2.890625
| 3
|
mollie/api/resources/payments.py
|
wegroupwolves/async-mollie-api-python
| 0
|
12779842
|
from ..error import IdentifierError
from ..objects.payment import Payment
from .base import Base
class Payments(Base):
RESOURCE_ID_PREFIX = "tr_"
def get_resource_object(self, result):
return Payment(result, self)
async def get(self, payment_id, **params):
if not payment_id or not payment_id.startswith(self.RESOURCE_ID_PREFIX):
raise IdentifierError(
"Invalid payment ID: '{id}'. A payment ID should start with '{prefix}'.".format(
id=payment_id, prefix=self.RESOURCE_ID_PREFIX
)
)
result = await super(Payments, self).get(payment_id, **params)
return result
async def delete(self, payment_id, data=None):
"""Cancel payment and return the payment object.
Deleting a payment causes the payment status to change to canceled.
The updated payment object is returned.
"""
if not payment_id or not payment_id.startswith(self.RESOURCE_ID_PREFIX):
raise IdentifierError(
"Invalid payment ID: '{id}'. A payment ID should start with '{prefix}'.".format(
id=payment_id, prefix=self.RESOURCE_ID_PREFIX
)
)
result = await super(Payments, self).delete(payment_id, data)
return self.get_resource_object(result)
| 2.515625
| 3
|
train.py
|
SuriyaNitt/deepdrive
| 0
|
12779843
|
import networks.tflearn.deepdrivenet as ddn #threeD_conv_net
import ml_utils.calc_optical_flow as cop #calc_opticalflow
import ml_utils.handle_data as hd
import os
import numpy as np
import tflearn
import tensorflow as tf
debug = 0
calc_flow = 0
load_model_on_start = 0
depth = 16
batchSize = 16
modelBatchSize = 2
rows = 224
cols = 224
colorSpace = 3
tflearn.init_graph(num_cores=8, gpu_memory_fraction=0.8)
h5FilePath = '/home/suriya/Documents/hard_disk/DeepDrive'
h5FileName = 'train_0001.h5'
h5File = os.path.join(h5FilePath, h5FileName)
video = hd.read_h5_key_value(h5File, 'images')
video = hd.swap_axes(video, 1, 3)
targets = hd.read_h5_key_value(h5File, 'targets')
targetSpeed = targets[:, 2]
targetSpeed = targetSpeed.reshape((targetSpeed.shape[0],1))
targetSteering = targets[:, 4]
targetSteering = targetSteering.reshape((targetSteering.shape[0], 1))
myTargets = np.append(targetSpeed, targetSteering, axis=1)
if debug:
print video.shape
print targets.shape
print targets[0]
print myTargets.shape
if calc_flow:
cop.calc_opticalflow(video, video.shape[0])
myNet = ddn.threeD_conv_net_single_stream(depth, rows, cols, modelBatchSize)
model = tflearn.DNN(myNet, checkpoint_path='./model_resnet',
max_checkpoints=10, tensorboard_verbose=3, tensorboard_dir='./tflearn_logs')
if load_model_on_start:
model.load('./model_resnet/model1')
testX = np.ndarray((0, cols, rows, colorSpace), dtype='float32')
if debug:
print testX.shape
for i in range(batchSize):
testX = np.append(testX, video[i:i+depth, :cols, :rows, :], axis=0)
testX = testX.reshape((batchSize, depth, cols, rows, colorSpace))
if debug:
print testX.shape
testY = myTargets[depth-1:depth-1+batchSize, :]
testY = np.array(testY)
testY = testY.reshape((testY.shape[0], 2))
train_count = 1
if debug:
train_count = 1
while train_count < ((targetSpeed.shape[0]-depth+1) / batchSize): # 970 / 32
trainX = np.ndarray((0, cols, rows, colorSpace), dtype='float32')
for i in range(batchSize):
trainX = np.append(trainX, video[i+(train_count)*batchSize:i+(train_count)*batchSize+depth, :cols, :rows, :], axis=0)
trainX = trainX.reshape(batchSize, depth, rows, cols, colorSpace)
trainY = myTargets[depth-1+(train_count)*batchSize:depth-1+(train_count+1)*batchSize, :]
trainY = np.array(trainY)
trainY = trainY.reshape((testY.shape[0], 2))
if debug:
print trainX.shape
print trainY.shape
print testX.shape
print testY.shape
# Training the neural net
with tf.device('/gpu:0'):
model.fit(trainX, trainY, n_epoch=10, validation_set=(testX, testY), show_metric=True, batch_size=modelBatchSize, run_id='resnet')
model.save('./model_resnet/model1')
train_count += 1
| 2.09375
| 2
|
toad/stats_test.py
|
Padfoot-ted/toad
| 1
|
12779844
|
import pytest
import numpy as np
import pandas as pd
from .stats import IV, WOE, gini, gini_cond, entropy_cond, quality, _IV, VIF
np.random.seed(1)
feature = np.random.rand(500)
target = np.random.randint(2, size = 500)
A = np.random.randint(100, size = 500)
B = np.random.randint(100, size = 500)
mask = np.random.randint(8, size = 500)
df = pd.DataFrame({
'feature': feature,
'target': target,
'A': A,
'B': B,
})
def test_woe():
value = WOE(0.2, 0.3)
assert value == -0.4054651081081643
def test_iv_priv():
value, _ = _IV(df['feature'], df['target'])
assert value == 0.010385942643745403
def test_iv():
value = IV(df['feature'], df['target'], n_bins = 10, method = 'dt')
assert value == 0.2735917707743619
def test_iv_return_sub():
_, sub = IV(mask, df['target'], return_sub = True, n_bins = 10, method = 'dt')
assert len(sub) == 8
assert sub[4] == 0.006449386778057019
def test_iv_frame():
res = IV(df, 'target', n_bins = 10, method = 'chi')
assert res.loc[0, 'A'] == 0.226363832867123
def test_gini():
value = gini(df['target'])
assert value == 0.499352
def test_gini_cond():
value = gini_cond(df['feature'], df['target'])
assert value == 0.4970162601626016
def test_entropy_cond():
value = entropy_cond(df['feature'], df['target'])
assert value == 0.6924990371522171
def test_quality():
result = quality(df, 'target')
assert result.loc['feature', 'iv'] == 0.2735917707743619
assert result.loc['A', 'gini'] == 0.49284164671885444
assert result.loc['B', 'entropy'] == 0.6924956879070063
assert result.loc['feature', 'unique'] == 500
def test_quality_iv_only():
result = quality(df, 'target', iv_only = True)
assert np.isnan(result.loc['feature', 'gini'])
def test_quality_object_type_array_with_nan():
feature = np.array([np.nan, 'A', 'B', 'C', 'D', 'E', 'F', 'G'], dtype = 'O')[mask]
df = pd.DataFrame({
'feature': feature,
'target': target,
})
result = quality(df)
assert result.loc['feature', 'iv'] == 0.016379338180530334
def test_vif():
vif = VIF(df)
assert vif['A'] == 2.969336442640111
| 1.984375
| 2
|
plugins/dlab_deployment/dlab_deployment/infrastructure/command_executor.py
|
mediapills/dlab
| 0
|
12779845
|
# *****************************************************************************
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# *****************************************************************************
import abc
from contextlib import contextmanager
import os
from shutil import copyfile
import subprocess
from time import sleep
from scp import SCPClient
import paramiko
import six
from dlab_core.domain.helper import break_after
@six.add_metaclass(abc.ABCMeta)
class BaseCommandExecutor(object):
@abc.abstractmethod
def run(self, command):
"""Run cli command
:type command: str
:param command: cli command
"""
raise NotImplementedError
@abc.abstractmethod
def sudo(self, command):
"""Run cli sudo command
:type command: str
:param command: cli command
"""
raise NotImplementedError
@abc.abstractmethod
def cd(self, path):
"""Change work directory to path
:type path: str
:param path: directory location
"""
raise NotImplementedError
@abc.abstractmethod
def put(self, local_path, remote_path):
"""Copy file
:type local_path: str
:param local_path: path to local object
:type remote_path: str
:param remote_path: path to remote object
"""
raise NotImplementedError
class LocalCommandExecutor(BaseCommandExecutor):
def run(self, command): # pragma: no cover
"""Run cli command
:type command: str
:param command: cli command
:rtype: str
:return execution result
"""
lines = []
process = subprocess.Popen(
command, shell=True, universal_newlines=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
while process.poll() is None:
line = process.stdout.readline()
lines.append(line)
# TODO: Add logging
return ' '.join(lines)
def sudo(self, command):
"""Run cli sudo command
:type command: str
:param command: cli command
:rtype: str
:return execution result
"""
raise NotImplementedError
@contextmanager
def cd(self, path):
"""Change work directory to path
:type path: str
:param path: directory location
"""
current_dir = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(current_dir)
def put(self, local_path, remote_path):
"""Copy file
:type local_path: str
:param local_path: path to local object
:type remote_path: str
:param remote_path: path to remote object
"""
copyfile(local_path, remote_path)
class ParamikoCommandExecutor(BaseCommandExecutor):
def __init__(self, host, name, identity_file):
"""
:type host: str
:param host: ip address or host name
:type name: str
:param name: user name
:type: str
:param identity_file: path to file
"""
self.current_dir = None
self._connection = None
self.host = host
self.name = name
self.identity_file = identity_file
@property
def connection(self):
"""Return paramiko connection"""
return self._connection or self.init_connection()
@break_after(180)
def init_connection(self):
"""Init connection"""
connection = paramiko.SSHClient()
connection.set_missing_host_key_policy(
paramiko.AutoAddPolicy())
while True:
try:
connection.connect(self.host, username=self.name,
key_filename=self.identity_file)
connection.exec_command('ls')
return connection
except Exception:
sleep(10)
@property
def current_dir(self):
"""Default directory"""
return self._current_dir
@current_dir.setter
def current_dir(self, val):
"""Set default directory
:type val: str
:param val: new directory
"""
self._current_dir = val
def run(self, command):
"""Run cli command
:type command: str
:param command: cli command
:rtype: str
:return execution result
"""
if self.current_dir:
command = 'cd {}; {}'.format(self.current_dir, command)
stdin, stdout, stderr = self.connection.exec_command(command)
return stdout.read().decode('ascii').strip("\n")
def sudo(self, command):
"""Run sudo cli command
:type command: str
:param command: cli command
:rtype: str
:return execution result
"""
command = 'sudo {}'.format(command)
return self.run(command)
@contextmanager
def cd(self, path):
try:
self.current_dir = path
yield
finally:
self.current_dir = None
def put(self, local_path, remote_path):
"""Copy file
:type local_path: str
:param local_path: path to local object
:type remote_path: str
:param remote_path: path to remote object
"""
scp = SCPClient(self.connection.get_transport())
scp.put(local_path, recursive=True, remote_path=remote_path)
scp.close()
| 1.84375
| 2
|
local-runner-en/replay-dump.py
|
leloykun/russian-ai-cup
| 0
|
12779846
|
<filename>local-runner-en/replay-dump.py
#pylint: disable=missing-docstring, invalid-name
import urllib
import re
import sys
import contextlib
import os
import errno
import subprocess
import zipfile
import json
RUN_PLAYER_RE = re.compile(r'''<span\s+class\s*=\s*["']?run-player["']?\s*(.*)>''')
TOKEN_RE = re.compile(r'''.*data-token\s*=\s*["']?([^'"\s]+)['"]?''')
CONTENT_LENGTH_RE = re.compile(r'Content-Length:\s*(\d+)')
CHUNK = 1 * 1024 * 1024
MAP_NAME_RE = re.compile(r'maps/([^.]+)\.map')
def readPage(url):
with contextlib.closing(urllib.urlopen(url)) as target:
headers = target.info().headers
for header in headers:
try:
size = int(CONTENT_LENGTH_RE.search(header).group(1))
except AttributeError:
continue
else:
break
else:
size = None
readBytes = 0
if size is not None and size >= CHUNK:
print 'Downloading: 00.0%',
while readBytes < size:
chunk = target.read(CHUNK)
readBytes += len(chunk)
print '\rDownloading: %4.1f%%' % (100.0 * readBytes / size),
yield chunk
else:
print 'Downloading: ',
while True:
chunk = target.read(CHUNK)
if not chunk:
break
readBytes += len(chunk)
print '\rDownloading: %0.1fM' % (readBytes / 1024.0 / 1024),
yield chunk
print '\rDownloading: done%s' % (' ' * 10)
def getReplayAddress(gameUrl):
with contextlib.closing(urllib.urlopen(gameUrl)) as gamePage:
game = gamePage.read()
runPlayerSpan = RUN_PLAYER_RE.search(game).group(1)
token = TOKEN_RE.search(runPlayerSpan).group(1)
return 'http://russianaicup.ru/boombox/data/games/%s?offset=0' % token
def downloadReplay(replayUrl, targetPath):
with open(targetPath, 'w') as out:
for chunk in readPage(replayUrl):
out.write(chunk)
def ensureReplay(gameUrl):
targetFile = os.path.join('games', gameUrl.split('/')[-1] + '.json')
if os.path.exists(targetFile):
print 'File already downloaded'
return targetFile
replayUrl = getReplayAddress(gameUrl)
try:
downloadReplay(replayUrl, targetFile)
print 'Downloaded to: %s' % targetFile
except KeyboardInterrupt:
print 'Interrupted, removing partial file'
os.remove(targetFile)
return None
return targetFile
def ensureGameUrl(gameUrl):
try:
gameNumber = int(gameUrl.strip())
except ValueError:
return gameUrl
return 'http://russianaicup.ru/game/view/%d' % gameNumber
def checkMap(replayFile):
with open(replayFile) as inp:
data = json.loads(inp.readline())
try:
mapName = data['mapName']
except KeyError:
# no maps this year, skip all the fuss
return
if mapName.endswith('.map'):
mapName = mapName[:-4]
knownMaps = set()
with zipfile.ZipFile('local-runner.jar') as localRunner:
for name in localRunner.namelist():
if MAP_NAME_RE.match(name):
knownMaps.add(MAP_NAME_RE.match(name).group(1))
if mapName not in knownMaps:
print 'unknown map'
unpackMap(mapName, data)
TILE_NAMES = 'EMPTY VERTICAL HORIZONTAL LEFT_TOP_CORNER RIGHT_TOP_CORNER LEFT_BOTTOM_CORNER RIGHT_BOTTOM_CORNER LEFT_HEADED_T RIGHT_HEADED_T TOP_HEADED_T BOTTOM_HEADED_T CROSSROADS'.split()
TILE_CHARS = u'\u2588 \u2551 \u2550 \u2554 \u2557 \u255a \u255d \u2563 \u2560 \u2569 \u2566 \u256c'.split()
def unpackMap(mapName, data):
with open('%s.map' % mapName, 'w') as mapFile:
mapFile.write('%s %s\n' % (data['width'], data['height']))
for y in xrange(int(data['height'])):
for x in xrange(int(data['width'])):
tile = data['tilesXY'][x][y]
char = TILE_CHARS[TILE_NAMES.index(tile)]
mapFile.write(char.encode('utf8'))
mapFile.write('\n')
mapFile.write('%s\n' % len(data['waypoints']))
for wx, wy in data['waypoints']:
mapFile.write('%s %s\n' % (wx, wy))
mapFile.write('%s\n' % data['startingDirection'])
def main(gameUrl):
os.chdir(os.path.dirname(os.path.abspath(__file__)))
try:
os.makedirs('games')
except OSError as ex:
if ex.errno != errno.EEXIST:
raise
replayFile = ensureReplay(gameUrl)
if not replayFile:
sys.exit('Cannot run replay')
props = []
with open('local-runner-replay.properties', 'r') as inp:
for line in inp:
if line.startswith('replay-file='):
props.append('replay-file=%s\n' % replayFile.replace('\\', '/'))
else:
props.append(line)
with open('local-runner-replay.properties', 'w') as out:
out.write(''.join(props))
checkMap(replayFile)
subprocess.check_call(['java', '-jar', "local-runner.jar", 'local-runner-replay.properties'], shell=False)
if __name__ == '__main__':
if len(sys.argv) != 2:
sys.exit('Usage: %s game-url-or-number' % sys.argv[0])
main(ensureGameUrl(sys.argv[1]))
| 2.4375
| 2
|
group_admin/files/group-admin-monthly-report.py
|
hpfilho/FlickrTasks
| 1
|
12779847
|
<reponame>hpfilho/FlickrTasks
#!/usr/bin/python3
# This generates a report with photos that need to be removed
# or kept in a groups. It is useful for groups based in cameras,
# lenses or anything exif related.
#
# Author: <NAME>
# Date : Jan 01, 2018
#++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
####################################################
# !!!DO NOT MODIFY THIS FILE!!! #
# Implement the procedures in file procs.py #
# Include the rules in file group_data.py #
####################################################
import flickrapi
import api_credentials
import json
import time
import group_data
import procs
#===== CONSTANTS =================================#
api_key = api_credentials.api_key
api_secret = api_credentials.api_secret
user_id = api_credentials.user_id
group_url = group_data.group_url
group_alias = group_data.group_alias
lens_models = group_data.lens_models
focal_lengths = group_data.focal_lengths
# Flickr api access
flickr = flickrapi.FlickrAPI(api_key, api_secret, format='parsed-json')
#===== MAIN CODE ==============================================================#
# get group id and name from group url
group_id = flickr.urls.lookupGroup(api_key=api_key, url=group_data.group_url)['group']['id']
group_name = flickr.groups.getInfo(group_id=group_id)['group']['name']['_content']
# get photos from group pool
pool = flickr.groups.pools.getPhotos(api_key=api_key, group_id=group_id)
total_of_photos = int(pool['photos']['total'])
number_of_pages = int(pool['photos']['pages'])
photos_per_page = int(pool['photos']['perpage'])
# set output files names
report_file_name = '/home/pi/flickr_tasks/group_admin/{0}/{1}.photos.admin.monthly.txt'.format(group_alias, group_name).replace(' ','_')
remove_file_name = '/home/pi/flickr_tasks/group_admin/{0}/remove-photos.py'.format(group_alias)
review_file_name = '/home/pi/flickr_tasks/group_admin/{0}/review-photos.txt'.format(group_alias)
html_file_name = '/home/pi/github/hpfilho.github.io/reports/{0}.monthly.html'.format(group_name).replace(' ','_')
# create and add header to report file
procs.addReportHeader(report_file_name, html_file_name, group_name, total_of_photos)
# create remove script
procs.createRemoveScript(remove_file_name)
# iterate over each pool page
for page_number in range(1, number_of_pages+1):
pool = flickr.groups.pools.getPhotos(api_key=api_key, group_id=group_id, page=page_number)
photos_per_page = len(pool['photos']['photo'])
# add header to photos page
procs.addPageHeader(report_file_name, html_file_name, page_number, number_of_pages, photos_per_page)
# iterate over each photo in page
for photo_number in range(photos_per_page):
# add photo to report with action to be performed
# add also to remove script in case should be removed
procs.addPhoto(report_file_name, html_file_name, remove_file_name, pool, page_number, photo_number)
# add page footer
procs.addPageFooter(report_file_name, html_file_name)
procs.addLastRemoveRunProcedure(remove_file_name, group_id)
##### MEMBERS #####
# get members from group
members = flickr.groups.members.getList(api_key=api_key, group_id=group_id)
total_of_members = int(members['members']['total'])
number_of_pages_ = int(members['members']['pages'])
members_per_page = int(members['members']['perpage'])
# set output files names
current_members_file_name = '/home/pi/flickr_tasks/group_admin/{0}/{1}.members.current.txt'.format(group_alias, group_name).replace(' ','_')
new_members_file_name = '/home/pi/flickr_tasks/group_admin/{0}/{1}.members.new.txt'.format(group_alias, group_name).replace(' ','_')
# read the current members file
try:
last_members = open(current_members_file_name, "r")
last_members_list = last_members.readlines()
last_members.close()
except:
last_members_list = []
# create the list for current members and open the file for writing
current_members_list = []
current_members_file = open(current_members_file_name, "w")
# iterate over each members page
for page_number in range(1, number_of_pages_+1):
members = flickr.groups.members.getList(api_key=api_key, group_id=group_id, page=page_number, per_page=members_per_page)
# iterate over each member in page
for member_number in range(members_per_page):
try:
member_name = members['members']['member'][member_number]['username']
# add member to list and write to file
current_members_list.append(member_name)
current_members_file.write("{0}\n".format(member_name))
except:
pass
# close the current members file
current_members_file.close()
# open the new members file for writing
new_members = open(new_members_file_name, "w")
# strip the newline character from last members list
last_members_list = [m.strip("\n") for m in last_members_list]
# for each current member,
# if member is not in the last members list
# add it to the new members file
for member in current_members_list:
if member not in last_members_list:
new_members.write("{0}\n".format(member))
# closer the new members file
new_members.close()
| 2.40625
| 2
|
HackerRank/Mathematics/Fundamentals/Even_Odd_Query.py
|
AdityaChirravuri/CompetitiveProgramming
| 1
|
12779848
|
<reponame>AdityaChirravuri/CompetitiveProgramming<filename>HackerRank/Mathematics/Fundamentals/Even_Odd_Query.py
#!/bin/python3
import os
import sys
# Complete the solve function below.
def solve(arr, queries):
result = []
for i,j in queries:
if(i<len(arr) and arr[i]==0 and i!=j):
result.append('Odd')
else:
if(arr[i-1]%2==0):
result.append('Even')
else:
result.append('Odd')
return result
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
arr_count = int(input())
arr = list(map(int, input().rstrip().split()))
q = int(input())
queries = []
for _ in range(q):
queries.append(list(map(int, input().rstrip().split())))
result = solve(arr, queries)
fptr.write('\n'.join(result))
fptr.write('\n')
fptr.close()
| 4.0625
| 4
|
acq4/modules/MultiPatch/pipetteTemplate_pyqt5.py
|
aleonlein/acq4
| 1
|
12779849
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'acq4/modules/MultiPatch/pipetteTemplate.ui'
#
# Created by: PyQt5 UI code generator 5.8.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_PipetteControl(object):
def setupUi(self, PipetteControl):
PipetteControl.setObjectName("PipetteControl")
PipetteControl.resize(333, 75)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(PipetteControl.sizePolicy().hasHeightForWidth())
PipetteControl.setSizePolicy(sizePolicy)
self.gridLayout = QtWidgets.QGridLayout(PipetteControl)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setSpacing(3)
self.gridLayout.setObjectName("gridLayout")
self.targetBtn = QtWidgets.QPushButton(PipetteControl)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.targetBtn.sizePolicy().hasHeightForWidth())
self.targetBtn.setSizePolicy(sizePolicy)
self.targetBtn.setMaximumSize(Qt.QSize(40, 16777215))
self.targetBtn.setObjectName("targetBtn")
self.gridLayout.addWidget(self.targetBtn, 1, 4, 1, 1)
self.stateCombo = QtWidgets.QComboBox(PipetteControl)
self.stateCombo.setObjectName("stateCombo")
self.stateCombo.addItem("")
self.stateCombo.addItem("")
self.stateCombo.addItem("")
self.stateCombo.addItem("")
self.stateCombo.addItem("")
self.stateCombo.addItem("")
self.stateCombo.addItem("")
self.stateCombo.addItem("")
self.gridLayout.addWidget(self.stateCombo, 0, 3, 1, 2)
self.plotLayoutWidget = QtWidgets.QWidget(PipetteControl)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.plotLayoutWidget.sizePolicy().hasHeightForWidth())
self.plotLayoutWidget.setSizePolicy(sizePolicy)
self.plotLayoutWidget.setObjectName("plotLayoutWidget")
self.plotLayout = QtWidgets.QHBoxLayout(self.plotLayoutWidget)
self.plotLayout.setContentsMargins(0, 0, 0, 0)
self.plotLayout.setSpacing(0)
self.plotLayout.setObjectName("plotLayout")
self.gridLayout.addWidget(self.plotLayoutWidget, 0, 5, 4, 1)
self.selectBtn = QtWidgets.QPushButton(PipetteControl)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.selectBtn.sizePolicy().hasHeightForWidth())
self.selectBtn.setSizePolicy(sizePolicy)
self.selectBtn.setMaximumSize(Qt.QSize(30, 16777215))
font = Qt.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.selectBtn.setFont(font)
self.selectBtn.setCheckable(True)
self.selectBtn.setObjectName("selectBtn")
self.gridLayout.addWidget(self.selectBtn, 0, 0, 4, 1)
self.tipBtn = QtWidgets.QPushButton(PipetteControl)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tipBtn.sizePolicy().hasHeightForWidth())
self.tipBtn.setSizePolicy(sizePolicy)
self.tipBtn.setMaximumSize(Qt.QSize(40, 16777215))
self.tipBtn.setObjectName("tipBtn")
self.gridLayout.addWidget(self.tipBtn, 2, 4, 1, 1)
self.soloBtn = QtWidgets.QPushButton(PipetteControl)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.soloBtn.sizePolicy().hasHeightForWidth())
self.soloBtn.setSizePolicy(sizePolicy)
self.soloBtn.setMaximumSize(Qt.QSize(30, 16777215))
self.soloBtn.setCheckable(True)
self.soloBtn.setObjectName("soloBtn")
self.gridLayout.addWidget(self.soloBtn, 2, 3, 1, 1)
self.lockBtn = QtWidgets.QPushButton(PipetteControl)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lockBtn.sizePolicy().hasHeightForWidth())
self.lockBtn.setSizePolicy(sizePolicy)
self.lockBtn.setMaximumSize(Qt.QSize(30, 16777215))
self.lockBtn.setCheckable(True)
self.lockBtn.setObjectName("lockBtn")
self.gridLayout.addWidget(self.lockBtn, 1, 3, 1, 1)
self.retranslateUi(PipetteControl)
Qt.QMetaObject.connectSlotsByName(PipetteControl)
def retranslateUi(self, PipetteControl):
_translate = Qt.QCoreApplication.translate
PipetteControl.setWindowTitle(_translate("PipetteControl", "Form"))
self.targetBtn.setText(_translate("PipetteControl", "target"))
self.stateCombo.setItemText(0, _translate("PipetteControl", "out"))
self.stateCombo.setItemText(1, _translate("PipetteControl", "bath"))
self.stateCombo.setItemText(2, _translate("PipetteControl", "approach"))
self.stateCombo.setItemText(3, _translate("PipetteControl", "seal"))
self.stateCombo.setItemText(4, _translate("PipetteControl", "attached"))
self.stateCombo.setItemText(5, _translate("PipetteControl", "break in"))
self.stateCombo.setItemText(6, _translate("PipetteControl", "whole cell"))
self.stateCombo.setItemText(7, _translate("PipetteControl", "outside-out"))
self.selectBtn.setText(_translate("PipetteControl", "1"))
self.tipBtn.setText(_translate("PipetteControl", "tip"))
self.soloBtn.setText(_translate("PipetteControl", "Solo"))
self.lockBtn.setText(_translate("PipetteControl", "Lock"))
| 1.6875
| 2
|
qihui/data_processing/decode_preprocessing.py
|
Nelsonvon/transformers
| 0
|
12779850
|
import sys
import string
import datetime
import logging
from nltk.tokenize import word_tokenize
from transformers import BertTokenizer
logger = logging.getLogger(__name__)
def decode_preprocessing(dataset, output_file, tokenizer, max_len, mode):
with open(dataset, 'r') as fin:
input_lines = fin.readlines()
starttime = datetime.datetime.now()
if mode == 'sent_line':
lines = []
for line in input_lines:
tok_line = word_tokenize(line.strip())
lines += [token + ' O\n' for token in tok_line] + ['\n']
else:
lines = input_lines
subword_len_counter = 0
last_punc_buffer = ""
output = ""
for line in lines:
line_copy = line
line = line.rstrip()
if not line:
# print(line)
output += line + '\n'
last_punc_buffer = ""
subword_len_counter = 0
continue
token = line.split()[0]
current_subwords_len = len(tokenizer.tokenize(token))
# Token contains strange control characters like \x96 or \x95
# Just filter out the complete line
if current_subwords_len == 0:
continue
if all(char in string.punctuation for char in token) and line.split()[1] == 'O':
last_punc_buffer = ""
else:
last_punc_buffer += line_copy
if (subword_len_counter + current_subwords_len) > max_len:
# print("")
output += '\n'
# print(last_punc_buffer.rstrip())
output += last_punc_buffer.rstrip() + '\n'
subword_len_counter = len(last_punc_buffer.split('\n'))
last_punc_buffer = ""
continue
subword_len_counter += current_subwords_len
# print(line)
output += line + '\n'
endtime = datetime.datetime.now()
duration = (endtime-starttime).total_seconds()
logger.info(duration)
with open(output_file, 'w') as fout:
fout.write(output)
return
| 2.890625
| 3
|