hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c43704aafbacbc4c468d75623400e2f129cb8ef2
| 6,544
|
py
|
Python
|
panku/lambdaCollect.py
|
mccartney/panku-gdzie-jestes
|
50a677170162c5255a24eacdbf8062ad876bee3f
|
[
"MIT"
] | null | null | null |
panku/lambdaCollect.py
|
mccartney/panku-gdzie-jestes
|
50a677170162c5255a24eacdbf8062ad876bee3f
|
[
"MIT"
] | null | null | null |
panku/lambdaCollect.py
|
mccartney/panku-gdzie-jestes
|
50a677170162c5255a24eacdbf8062ad876bee3f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import requests
import boto3
import time
import geopy.distance
import xml.etree.ElementTree as ET
import itertools
import sys
import pickle
S3_BUCKET = "panku-gdzie-jestes-latest-storage"
class LatestPositionStorage(object):
def __init__(self, service):
self.objectName = "%s.latest" % service
def getLatestPositionsForService(self):
s3 = boto3.resource('s3')
try:
obj = s3.Object(S3_BUCKET, self.objectName)
ret = pickle.loads(obj.get()['Body'].read())
print("Read %d positions from S3" % len(ret))
return ret
except:
print("Unexpected error:", sys.exc_info())
return {}
def saveLatestPositionsForService(self, positions):
s3 = boto3.resource('s3')
print("Saving %d positions to S3" % (len(positions)))
pickle_byte_obj = pickle.dumps(positions)
s3.Object(S3_BUCKET, self.objectName).put(Body=pickle_byte_obj)
class Service(object):
def getSecretName(self):
pass
def getCredentials(self):
secret_name = self.getSecretName()
region_name = "eu-west-1"
session = boto3.session.Session()
client = session.client(service_name='secretsmanager', region_name=region_name)
get_secret_value_response = client.get_secret_value(SecretId=secret_name)
return get_secret_value_response["SecretString"].replace('"', '').replace("{","").replace("}", "").split(":")
def identifierPerRegistration(self, registration):
pass
def serviceId(self):
# TODO: reverse it, let subclasses override this, and implement `identifierPerRegistration` here in the superclass
return self.identifierPerRegistration("").strip()
def getLatestPositions(self):
return LatestPositionStorage(self.serviceId()).getLatestPositionsForService()
def saveLatestPositions(self, positions):
return LatestPositionStorage(self.serviceId()).saveLatestPositionsForService(positions)
def saveLocations(self, cars):
now = int(time.time())
latestPositions = self.getLatestPositions()
newPositions = latestPositions.copy()
table = boto3.resource('dynamodb', region_name='eu-west-1').Table('cars')
dynamodb = boto3.client('dynamodb', region_name='eu-west-1')
for (registration, position) in cars:
key = self.identifierPerRegistration(registration)
latestPosition = latestPositions.get(key)
shouldAdd = True
existedBefore = latestPosition is not None
if existedBefore:
prevPosition = (latestPosition['long'], latestPosition['lat'])
currentPosition = (position['lng'], position['lat'])
distance = geopy.distance.vincenty(prevPosition, currentPosition).km
if distance < 0.1:
shouldAdd = False
if shouldAdd:
print("%s moved" % key)
if existedBefore:
r = table.put_item(Item = {'carId' : key, 'date' : now-1,'long': prevPosition[0], 'lat': prevPosition[1]})
r = table.put_item(Item = {'carId' : key, 'date' : now, 'long': "%8.6f" % position['lng'], 'lat': "%8.6f" % position['lat']})
newPositions[key] = {'long': "%8.6f" % position['lng'], 'lat': "%8.6f" % position['lat']}
self.saveLatestPositions(newPositions)
def getAndSaveLocations(self):
self.saveLocations(self.getLocations())
class Panek(Service):
def getSecretName(self):
return "panek/login"
def identifierPerRegistration(self, registration):
return "PANEK " + registration
def getLocations(self):
s = requests.Session()
s.headers.update({"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.13; rv:64.0) Gecko/20100101 Firefox/64.0",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Language": "en-GB,en;q=0.7,en-US;q=0.3",
"Upgrade-Insecure-Requests": "1",
"DNT": "1",
})
s.get("https://panel.panekcs.pl/security/login")
username, password = self.getCredentials()
r = s.post(url = "https://panel.panekcs.pl/security/login", data={"UserName": username, "Password": password})
assert r.status_code == 200
r = s.post(url = "https://panel.panekcs.pl/Home/GetLocations", data = {})
assert r.status_code == 200
locations = r.json()
# Under Vehicles: [u'Category', u'FuelRange', u'Ids', u'Coordinates', u'RegistrationNumber', u'Fuel']
count = len(locations['Vehicles']['Ids'])
coordinates = locations['Vehicles']['Coordinates']
registrations = locations['Vehicles']['RegistrationNumber']
return zip(registrations, coordinates)
class Veturilo(Service):
def identifierPerRegistration(self, registration):
return "VETURILO " + registration
def getLocations(self):
s = requests.Session()
s.headers.update({"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.13; rv:64.0) Gecko/20100101 Firefox/64.0",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Language": "en-GB,en;q=0.7,en-US;q=0.3",
"Upgrade-Insecure-Requests": "1",
"DNT": "1",
})
r = s.get("https://nextbike.net/maps/nextbike-official.xml?city=372,210,475")
assert r.status_code == 200
root = ET.fromstring(r.content)
ret = []
for place in root.findall(".//place"):
for bike in place:
ret.append((bike.get("number"), {"lng" : float(place.get("lng")), "lat": float(place.get("lat"))}))
return ret
class Traficar(Service):
def identifierPerRegistration(self, registration):
return "TRAFICAR " + registration
def getLocations(self):
s = requests.Session()
s.headers.update({"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.13; rv:64.0) Gecko/20100101 Firefox/64.0",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Language": "en-GB,en;q=0.7,en-US;q=0.3",
"Upgrade-Insecure-Requests": "1",
"DNT": "1",
})
r = s.get("https://api.traficar.pl/eaw-rest-api/car?shapeId=2")
data = r.json()
return [(car['regNumber'], {"lng": car['longitude'], "lat":car['latitude']}) for car in data['cars']]
def lambda_handler(event, context):
services = [Traficar, Veturilo, Panek]
for service in services:
print("==== Service %s" % service)
service().getAndSaveLocations()
return "OK"
| 38.046512
| 137
| 0.636461
| 6,134
| 0.937347
| 0
| 0
| 0
| 0
| 0
| 0
| 1,850
| 0.282702
|
c4372286ca07457197e0279205b6dabde1342c8d
| 1,412
|
py
|
Python
|
data/migrations/0039_2_data_update_questionnaires_vmsettings.py
|
Duke-GCB/bespin-api
|
cea5c20fb2ff592adabe6ebb7ca934939aa11a34
|
[
"MIT"
] | null | null | null |
data/migrations/0039_2_data_update_questionnaires_vmsettings.py
|
Duke-GCB/bespin-api
|
cea5c20fb2ff592adabe6ebb7ca934939aa11a34
|
[
"MIT"
] | 137
|
2016-12-09T18:59:45.000Z
|
2021-06-10T18:55:47.000Z
|
data/migrations/0039_2_data_update_questionnaires_vmsettings.py
|
Duke-GCB/bespin-api
|
cea5c20fb2ff592adabe6ebb7ca934939aa11a34
|
[
"MIT"
] | 3
|
2017-11-14T16:05:58.000Z
|
2018-12-28T18:07:43.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-12-08 18:42
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
def update_questionnaires(apps, schema_editor):
"""
Forward migration function to normalize settings into VMSettings and CloudSettings models
:param apps: Django apps
:param schema_editor: unused
:return: None
"""
VMSettings = apps.get_model("data", "VMSettings")
CloudSettings = apps.get_model("data", "CloudSettings")
JobQuestionnaire = apps.get_model("data", "JobQuestionnaire")
Job = apps.get_model("data", "Job")
for q in JobQuestionnaire.objects.all():
# Create a cloud settings object with the VM project from the questionnaire.
# Object initially just has the project name as its name
cloud_settings, _ = CloudSettings.objects.get_or_create(name=q.vm_project.name, vm_project=q.vm_project)
vm_settings, _ = VMSettings.objects.get_or_create(name=q.vm_project.name, cloud_settings=cloud_settings)
q.vm_settings = vm_settings
q.save()
class Migration(migrations.Migration):
dependencies = [
('data', '0039_1_schema_add_questionnare_vmsettings'),
]
operations = [
# Populate VMSettings and CloudSettings objects from JobQuesetionnaire
migrations.RunPython(update_questionnaires),
]
| 36.205128
| 112
| 0.71813
| 287
| 0.203258
| 0
| 0
| 0
| 0
| 0
| 0
| 581
| 0.411473
|
c438178586df87a3168fc1363cc17cdd53b3728e
| 4,872
|
py
|
Python
|
app/models.py
|
maxnovais/Flapy_Blog
|
e543faa4c8f99ef3a2cdb1470de507d9cfb330bf
|
[
"Apache-2.0"
] | null | null | null |
app/models.py
|
maxnovais/Flapy_Blog
|
e543faa4c8f99ef3a2cdb1470de507d9cfb330bf
|
[
"Apache-2.0"
] | null | null | null |
app/models.py
|
maxnovais/Flapy_Blog
|
e543faa4c8f99ef3a2cdb1470de507d9cfb330bf
|
[
"Apache-2.0"
] | null | null | null |
from datetime import datetime
from . import db
from config import COMMENTS_INITIAL_ENABLED
from flask.ext.security import UserMixin, RoleMixin
from markdown import markdown
import bleach
# Define models
roles_users = db.Table(
'roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id')))
class Role(db.Model, RoleMixin):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
def __repr__(self):
return '<Role %r>' % self.name
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
first_name = db.Column(db.String(255))
last_name = db.Column(db.String(255))
about = db.Column(db.Text)
about_html = db.Column(db.Text)
location = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=roles_users,
backref=db.backref('users', lazy='dynamic'))
last_login_at = db.Column(db.DateTime())
current_login_at = db.Column(db.DateTime())
last_login_ip = db.Column(db.String(40))
current_login_ip = db.Column(db.String(40))
login_count = db.Column(db.Integer())
objects = db.relationship('Object', backref='author', lazy='dynamic')
def __repr__(self):
return '<User %r>' % self.email
@staticmethod
def on_changed_body(target, value, oldvalue, initiator):
allowed_tags = ['a', 'abbr', 'acronym', 'b', 'blockquote', 'code',
'em', 'i', 'li', 'ol', 'pre', 'strong', 'ul',
'h1', 'h2', 'h3', 'h4', 'h5', 'hr', 'p']
target.about_html = bleach.linkify(bleach.clean(
markdown(value, output_format='html'),
tags=allowed_tags, strip=True))
db.event.listen(User.about, 'set', User.on_changed_body)
objects_tags = db.Table(
'object_tags',
db.Column('object_id', db.Integer, db.ForeignKey('object.id')),
db.Column('tag_id', db.Integer, db.ForeignKey('tag.id')))
class Tag(db.Model):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
created_on = db.Column(db.DateTime, index=True, default=datetime.now)
def __init__(self, name):
self.name = name
def __repr__(self):
return '<Tag %r>' % self.name
class Object(db.Model):
id = db.Column(db.Integer(), primary_key=True)
object_type = db.Column(db.String(30))
title = db.Column(db.String(100), unique=True)
slug_title = db.Column(db.String(255), unique=True)
headline = db.Column(db.String(255))
body = db.Column(db.Text)
body_html = db.Column(db.Text)
created_on = db.Column(db.DateTime, index=True, default=datetime.now)
last_update = db.Column(db.DateTime, index=True)
enabled = db.Column(db.Boolean, default=True)
author_id = db.Column(db.Integer, db.ForeignKey('user.id'))
comments = db.relationship('Comment', backref='object', lazy='dynamic')
tags = db.relationship('Tag', secondary=objects_tags,
backref=db.backref('object', lazy='dynamic'))
@staticmethod
def on_changed_body(target, value, oldvalue, initiator):
allowed_tags = ['a', 'abbr', 'acronym', 'b', 'blockquote', 'code',
'em', 'i', 'li', 'ol', 'pre', 'strong', 'ul',
'h1', 'h2', 'h3', 'h4', 'h5', 'hr', 'p']
target.body_html = bleach.linkify(bleach.clean(
markdown(value, output_format='html'),
tags=allowed_tags, strip=True))
def __repr__(self):
return '<Page %r, Tags %r>' % (self.title, self.tags)
db.event.listen(Object.body, 'set', Object.on_changed_body)
class Comment(db.Model):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(255))
email = db.Column(db.String(255))
publish_email = db.Column(db.Boolean)
body = db.Column(db.Text)
body_html = db.Column(db.Text)
created_on = db.Column(db.DateTime, index=True, default=datetime.now)
enabled = db.Column(db.Boolean, default=COMMENTS_INITIAL_ENABLED)
object_id = db.Column(db.Integer, db.ForeignKey('object.id'))
def __repr__(self):
return '<Comment %r>' % (self.name)
@staticmethod
def on_changed_body(target, value, oldvalue, initiator):
allowed_tags = ['a', 'b', 'blockquote', 'code', 'strong', 'i']
target.body_html = bleach.linkify(bleach.clean(
markdown(value, output_format='html'),
tags=allowed_tags, strip=True))
db.event.listen(Comment.body, 'set', Comment.on_changed_body)
| 36.909091
| 77
| 0.632389
| 4,118
| 0.845238
| 0
| 0
| 1,199
| 0.2461
| 0
| 0
| 560
| 0.114943
|
c43aafbe58eb02eba9cd936508eecb607d118824
| 751
|
py
|
Python
|
8.1-triple-step.py
|
rithvikp1998/ctci
|
52068e94449e61aef6bac9646a7863260acc7a05
|
[
"MIT"
] | null | null | null |
8.1-triple-step.py
|
rithvikp1998/ctci
|
52068e94449e61aef6bac9646a7863260acc7a05
|
[
"MIT"
] | null | null | null |
8.1-triple-step.py
|
rithvikp1998/ctci
|
52068e94449e61aef6bac9646a7863260acc7a05
|
[
"MIT"
] | null | null | null |
'''
If the child is currently on the nth step,
then there are three possibilites as to how
it reached there:
1. Reached (n-3)th step and hopped 3 steps in one time
2. Reached (n-2)th step and hopped 2 steps in one time
3. Reached (n-1)th step and hopped 2 steps in one time
The total number of possibilities is the sum of these 3
'''
def count_possibilities(n, store):
if store[n]!=0:
return
count_possibilities(n-1, store)
count_possibilities(n-2, store)
count_possibilities(n-3, store)
store[n]=store[n-1]+store[n-2]+store[n-3]
n=int(input())
store=[0 for i in range(n+1)] # Stores the number of possibilites for every i<n
store[0]=0
store[1]=1
store[2]=2
store[3]=4
count_possibilities(n, store)
print(store[n])
| 25.896552
| 79
| 0.701731
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 384
| 0.511318
|
c43e6b9c823f200efcc9e2b9380194f0c4a67a27
| 9,604
|
py
|
Python
|
terrain_relative_navigation/peak_extractor_algorithm.py
|
rschwa6308/Landmark-Based-TRN
|
5d712221138ec6250ed5bd19caed49810f17014e
|
[
"Apache-2.0"
] | null | null | null |
terrain_relative_navigation/peak_extractor_algorithm.py
|
rschwa6308/Landmark-Based-TRN
|
5d712221138ec6250ed5bd19caed49810f17014e
|
[
"Apache-2.0"
] | null | null | null |
terrain_relative_navigation/peak_extractor_algorithm.py
|
rschwa6308/Landmark-Based-TRN
|
5d712221138ec6250ed5bd19caed49810f17014e
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
PeakExtractor
A QGIS plugin
This plugin procedurally extracts morphological peaks from a given DEM.
Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/
-------------------
begin : 2021-03-10
copyright : (C) 2021 by NASA JPL
email : russells@jpl.nasa.gov
***************************************************************************/
"""
__author__ = "NASA JPL"
__date__ = "2021-03-10"
__copyright__ = "(C) 2021 by NASA JPL"
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = "$Format:%H$"
from qgis.PyQt.QtCore import QCoreApplication
from qgis.core import (QgsProcessing,
QgsProcessingAlgorithm,
QgsProcessingParameterRasterLayer,
QgsProcessingParameterNumber,
QgsProcessingParameterFeatureSink,
QgsFields,
QgsWkbTypes)
import processing
# import grass.script as grass
import math
def round_up_to_odd(x: float) -> int:
"""round the given float up to the nearest odd integer"""
n = math.ceil(x)
return n + (1 - n%2)
class PeakExtractorAlgorithm(QgsProcessingAlgorithm):
"""
This is an example algorithm that takes a vector layer and
creates a new identical one.
It is meant to be used as an example of how to create your own
algorithms and explain methods and variables used to do it. An
algorithm like this will be available in all elements, and there
is not need for additional work.
All Processing algorithms should extend the QgsProcessingAlgorithm
class.
"""
# Constants used to refer to parameters and outputs. They will be
# used when calling the algorithm from another algorithm, or when
# calling from the QGIS console.
INPUT = "INPUT"
ANALYSIS_WINDOW_SIZE = "ANALYSIS_WINDOW_SIZE"
PEAK_SPACING = "PEAK_SPACING"
OUTPUT = "OUTPUT"
def initAlgorithm(self, config):
"""
Here we define the inputs and output of the algorithm, along
with some other properties.
"""
# Elevation Map
self.addParameter(
QgsProcessingParameterRasterLayer(
self.INPUT,
self.tr("DEM"),
)
)
self.addParameter(
QgsProcessingParameterNumber(
self.ANALYSIS_WINDOW_SIZE,
self.tr("Size of analysis window, meters"),
QgsProcessingParameterNumber.Double,
defaultValue=500.0
),
)
self.addParameter(
QgsProcessingParameterNumber(
self.PEAK_SPACING,
self.tr("Minimum distance between distinct peaks, meters"),
QgsProcessingParameterNumber.Double,
defaultValue=100.0
)
)
self.addParameter(
QgsProcessingParameterFeatureSink(
self.OUTPUT,
self.tr("Extracted Peaks")
)
)
def processAlgorithm(self, parameters, context, feedback):
"""
Here is where the processing itself takes place.
"""
dem = self.parameterAsRasterLayer(parameters, self.INPUT, context)
x_size, y_size = dem.rasterUnitsPerPixelX(), dem.rasterUnitsPerPixelY()
window_size_meters = self.parameterAsDouble(parameters, self.ANALYSIS_WINDOW_SIZE, context)
window_size_pixels = round_up_to_odd(window_size_meters / x_size) # grass requires a "center" pixel
feedback.pushInfo(f"Using analysis window of size {window_size_pixels}px")
if window_size_pixels <= 1:
raise ValueError(f"Analysis window must be >1px per side (given DEM has pixel size {x_size}m x {y_size}m)")
dem_size = dem.width() * dem.height()
if window_size_pixels >= 50 and dem_size >= 10**6:
feedback.pushInfo("WARNING: large raster + large analysis window can be extremely slow. Consider downsampling the DEM first.")
feedback.pushInfo("Classifying terrain. . .")
morpho_param_layer_name = processing.run(
"grass7:r.param.scale",
{
"input": parameters[self.INPUT],
"size": window_size_pixels,
'method' : 9, # 'feature'
"output": QgsProcessing.TEMPORARY_OUTPUT,
# --- defaults ---
'-c' : False,
'GRASS_RASTER_FORMAT_META': '',
'GRASS_RASTER_FORMAT_OPT': '',
'GRASS_REGION_CELLSIZE_PARAMETER': 0,
'GRASS_REGION_PARAMETER': None,
'curvature_tolerance' : 0.0001,
'exponent' : 0,
'slope_tolerance': 1,
'zscale': 1
},
context=context, feedback=feedback, is_child_algorithm=True
)["output"]
if feedback.isCanceled(): return {}
feedback.pushInfo("Vectorizing. . .")
polygons_layer_name = processing.run(
"native:pixelstopolygons",
{
"INPUT_RASTER": morpho_param_layer_name,
"FIELD_NAME" : "VALUE",
"RASTER_BAND": 1,
"OUTPUT": QgsProcessing.TEMPORARY_OUTPUT
},
context=context, feedback=feedback, is_child_algorithm=True
)["OUTPUT"]
if feedback.isCanceled(): return {}
feedback.pushInfo("Extracting peak pixels. . .")
filtered_polygons_layer_name = processing.run(
"native:extractbyattribute",
{
"INPUT": polygons_layer_name,
"FIELD": "VALUE",
"OPERATOR": 0, # '='
"VALUE": 6, # peaks
"OUTPUT": QgsProcessing.TEMPORARY_OUTPUT
},
context=context, feedback=feedback, is_child_algorithm=True
)["OUTPUT"]
if feedback.isCanceled(): return {}
buffer_distance = self.parameterAsDouble(parameters, self.PEAK_SPACING, context) / 2.0
feedback.pushInfo("Buffering peaks. . .")
buffered_polygons_layer_name = processing.run(
"native:buffer",
{
"INPUT": filtered_polygons_layer_name,
"DISTANCE": buffer_distance,
"DISSOLVE": True,
"OUTPUT": QgsProcessing.TEMPORARY_OUTPUT
},
context=context, feedback=feedback, is_child_algorithm=True
)["OUTPUT"]
feedback.pushInfo("Dissolving peaks. . .")
dissolved_polygons_layer_name = processing.run(
"native:dissolve",
{
"INPUT": buffered_polygons_layer_name,
"OUTPUT": QgsProcessing.TEMPORARY_OUTPUT
},
context=context, feedback=feedback, is_child_algorithm=True
)["OUTPUT"]
if feedback.isCanceled(): return {}
feedback.pushInfo("Computing peak centers. . .")
centroids_layer_name = processing.run(
"native:centroids",
{
"INPUT": dissolved_polygons_layer_name,
"ALL_PARTS": True,
"OUTPUT": QgsProcessing.TEMPORARY_OUTPUT
},
context=context, feedback=feedback, is_child_algorithm=True
)["OUTPUT"]
centroids_layer = context.takeResultLayer(centroids_layer_name)
if feedback.isCanceled(): return {}
(sink, dest_id) = self.parameterAsSink(
parameters,
self.OUTPUT,
context,
QgsFields(), # no fields
QgsWkbTypes.Point,
dem.crs()
)
n = 0
for p in centroids_layer.getFeatures():
if feedback.isCanceled(): return {}
sink.addFeature(p)
n += 1
feedback.pushInfo(f"Number of peaks detected: {n}")
return {
self.OUTPUT: dest_id
}
def name(self):
"""
Returns the algorithm name, used for identifying the algorithm. This
string should be fixed for the algorithm, and must not be localised.
The name should be unique within each provider. Names should contain
lowercase alphanumeric characters only and no spaces or other
formatting characters.
"""
return "Extract Peaks"
def displayName(self):
"""
Returns the translated algorithm name, which should be used for any
user-visible display of the algorithm name.
"""
return self.tr(self.name())
def group(self):
"""
Returns the name of the group this algorithm belongs to. This string
should be localised.
"""
return self.tr(self.groupId())
def groupId(self):
"""
Returns the unique ID of the group this algorithm belongs to. This
string should be fixed for the algorithm, and must not be localised.
The group id should be unique within each provider. Group id should
contain lowercase alphanumeric characters only and no spaces or other
formatting characters.
"""
return ""
def tr(self, string):
return QCoreApplication.translate("Processing", string)
def createInstance(self):
return PeakExtractorAlgorithm()
| 32.890411
| 138
| 0.570596
| 8,233
| 0.857247
| 0
| 0
| 0
| 0
| 0
| 0
| 3,816
| 0.397334
|
c43f47ff2e792fe2c4acc6424f3c4c0fdde3ecb2
| 3,657
|
py
|
Python
|
manila/tests/api/views/test_quota_class_sets.py
|
openstack/manila
|
1ebae738c235c6f1874ac7b11307e0d5fb567dba
|
[
"Apache-2.0"
] | 159
|
2015-01-02T09:35:15.000Z
|
2022-01-04T11:51:34.000Z
|
manila/tests/api/views/test_quota_class_sets.py
|
openstack/manila
|
1ebae738c235c6f1874ac7b11307e0d5fb567dba
|
[
"Apache-2.0"
] | 5
|
2015-07-24T09:28:21.000Z
|
2020-11-20T04:33:51.000Z
|
manila/tests/api/views/test_quota_class_sets.py
|
openstack/manila
|
1ebae738c235c6f1874ac7b11307e0d5fb567dba
|
[
"Apache-2.0"
] | 128
|
2015-01-05T22:52:28.000Z
|
2021-12-29T14:00:58.000Z
|
# Copyright (c) 2017 Mirantis, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
from manila.api.openstack import api_version_request as api_version
from manila.api.views import quota_class_sets
from manila import test
from manila.tests.api import fakes
@ddt.ddt
class ViewBuilderTestCase(test.TestCase):
def setUp(self):
super(ViewBuilderTestCase, self).setUp()
self.builder = quota_class_sets.ViewBuilder()
def test__collection_name(self):
self.assertEqual('quota_class_set', self.builder._collection_name)
@ddt.data(
("fake_quota_class", "2.40"), (None, "2.40"),
("fake_quota_class", "2.39"), (None, "2.39"),
("fake_quota_class", "2.53"), (None, "2.53"),
("fake_quota_class", "2.62"), (None, "2.62"),
)
@ddt.unpack
def test_detail_list_with_share_type(self, quota_class, microversion):
req = fakes.HTTPRequest.blank('/quota-sets', version=microversion)
quota_class_set = {
"shares": 13,
"gigabytes": 31,
"snapshots": 14,
"snapshot_gigabytes": 41,
"share_groups": 15,
"share_group_snapshots": 51,
"share_networks": 16,
}
expected = {self.builder._collection_name: {
"shares": quota_class_set["shares"],
"gigabytes": quota_class_set["gigabytes"],
"snapshots": quota_class_set["snapshots"],
"snapshot_gigabytes": quota_class_set["snapshot_gigabytes"],
"share_networks": quota_class_set["share_networks"],
}}
if quota_class:
expected[self.builder._collection_name]['id'] = quota_class
if (api_version.APIVersionRequest(microversion) >= (
api_version.APIVersionRequest("2.40"))):
expected[self.builder._collection_name][
"share_groups"] = quota_class_set["share_groups"]
expected[self.builder._collection_name][
"share_group_snapshots"] = quota_class_set[
"share_group_snapshots"]
if req.api_version_request >= api_version.APIVersionRequest("2.53"):
fake_share_replicas_value = 46
fake_replica_gigabytes_value = 100
expected[self.builder._collection_name]["share_replicas"] = (
fake_share_replicas_value)
expected[self.builder._collection_name][
"replica_gigabytes"] = fake_replica_gigabytes_value
quota_class_set['share_replicas'] = fake_share_replicas_value
quota_class_set['replica_gigabytes'] = fake_replica_gigabytes_value
if req.api_version_request >= api_version.APIVersionRequest("2.62"):
fake_per_share_gigabytes = 10
expected[self.builder._collection_name][
"per_share_gigabytes"] = fake_per_share_gigabytes
quota_class_set['per_share_gigabytes'] = fake_per_share_gigabytes
result = self.builder.detail_list(
req, quota_class_set, quota_class=quota_class)
self.assertEqual(expected, result)
| 41.089888
| 79
| 0.654908
| 2,826
| 0.772765
| 0
| 0
| 2,835
| 0.775226
| 0
| 0
| 1,212
| 0.331419
|
c4401d23a46ea6a328e85a6fd337eb170ed7e08b
| 212
|
py
|
Python
|
P1480.py
|
Muntaha-Islam0019/Leetcode-Solutions
|
0bc56ce43a6d8ad10461b69078166a2a5b913e7f
|
[
"MIT"
] | null | null | null |
P1480.py
|
Muntaha-Islam0019/Leetcode-Solutions
|
0bc56ce43a6d8ad10461b69078166a2a5b913e7f
|
[
"MIT"
] | null | null | null |
P1480.py
|
Muntaha-Islam0019/Leetcode-Solutions
|
0bc56ce43a6d8ad10461b69078166a2a5b913e7f
|
[
"MIT"
] | null | null | null |
class Solution:
def runningSum(self, nums: List[int]) -> List[int]:
for index in range(1, len(nums)):
nums[index] = nums[index - 1] + nums[index]
return nums
| 26.5
| 55
| 0.514151
| 211
| 0.995283
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
c441a8d53ebaea6e35e7d68f0992cf2efeee375b
| 2,429
|
py
|
Python
|
tests/sequence_utils_test.py
|
rmcolq/genofunk
|
ffa031fb361fc736e839d0e36d36f8ed7ade30dc
|
[
"MIT"
] | 1
|
2021-01-09T23:25:02.000Z
|
2021-01-09T23:25:02.000Z
|
tests/sequence_utils_test.py
|
rmcolq/genofunk
|
ffa031fb361fc736e839d0e36d36f8ed7ade30dc
|
[
"MIT"
] | null | null | null |
tests/sequence_utils_test.py
|
rmcolq/genofunk
|
ffa031fb361fc736e839d0e36d36f8ed7ade30dc
|
[
"MIT"
] | null | null | null |
import os
import unittest
import json
import filecmp
from genofunk.sequence_utils import *
this_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
class TestSequenceUtils(unittest.TestCase):
def test_get_coordinates_from_json_simple_pairs(self):
json_value = {
"start": 30,
"end": 40,
"strand": 1
}
coordinates = get_coordinates_from_json(json_value, pairs=True)
expected = [[30, 40]]
self.assertEqual(expected, coordinates)
def test_get_coordinates_from_json_simple_no_pairs(self):
json_value = {
"start": 30,
"end": 40,
"strand": 1
}
coordinates = get_coordinates_from_json(json_value, pairs=False)
expected = [30, 40]
self.assertEqual(expected, coordinates)
def test_get_coordinates_from_json_join_pairs(self):
json_value = {
"join": [
{ "start": 0, "end": 11, "strand": 1 },
{ "start": 10, "end": 20, "strand": 1 }
]
}
coordinates = get_coordinates_from_json(json_value, pairs=True)
expected = [[0,11],[10,20]]
self.assertEqual(expected, coordinates)
def test_get_coordinates_from_json_join_no_pairs(self):
json_value = {
"join": [
{ "start": 0, "end": 11, "strand": 1 },
{ "start": 10, "end": 20, "strand": 1 }
]
}
coordinates = get_coordinates_from_json(json_value, pairs=False)
expected = [0,11,10,20]
self.assertEqual(expected, coordinates)
def test_is_open_reading_frame_wrong_start(self):
amino_acid_sequence = "NATIL*"
result = is_open_reading_frame(amino_acid_sequence)
self.assertFalse(result)
def test_is_open_reading_frame_wrong_end(self):
amino_acid_sequence = "MNATIL*S"
result = is_open_reading_frame(amino_acid_sequence)
self.assertFalse(result)
def test_is_open_reading_frame_stop_in_middle(self):
amino_acid_sequence = "MNATIL*S*"
result = is_open_reading_frame(amino_acid_sequence, allow_stop_codons_in_middle=False)
self.assertFalse(result)
def test_is_open_reading_frame_stop_in_middle_allowed(self):
amino_acid_sequence = "MNATIL*S*"
result = is_open_reading_frame(amino_acid_sequence, allow_stop_codons_in_middle=True)
self.assertTrue(result)
| 34.211268
| 94
| 0.645533
| 2,264
| 0.932071
| 0
| 0
| 0
| 0
| 0
| 0
| 172
| 0.070811
|
c442b7615909101f05f7c648d2d237c13e312b98
| 1,630
|
py
|
Python
|
Modules/Biophotonics/python/iMC/msi/test/test_nrrdwriter.py
|
SVRTK/MITK
|
52252d60e42702e292d188e30f6717fe50c23962
|
[
"BSD-3-Clause"
] | 5
|
2015-02-05T10:58:41.000Z
|
2019-04-17T15:04:07.000Z
|
Modules/Biophotonics/python/iMC/msi/test/test_nrrdwriter.py
|
wyyrepo/MITK
|
d0837f3d0d44f477b888ec498e9a2ed407e79f20
|
[
"BSD-3-Clause"
] | 141
|
2015-03-03T06:52:01.000Z
|
2020-12-10T07:28:14.000Z
|
Modules/Biophotonics/python/iMC/msi/test/test_nrrdwriter.py
|
wyyrepo/MITK
|
d0837f3d0d44f477b888ec498e9a2ed407e79f20
|
[
"BSD-3-Clause"
] | 4
|
2015-02-19T06:48:13.000Z
|
2020-06-19T16:20:25.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Thu Aug 13 09:52:47 2015
@author: wirkert
"""
import unittest
import os
import numpy as np
import msi.msimanipulations as msimani
from msi.io.nrrdreader import NrrdReader
from msi.io.nrrdwriter import NrrdWriter
from msi.test import helpers
class TestNrrdWriter(unittest.TestCase):
def setUp(self):
# setup file and the path where it shall be written to
self.msi = helpers.getFakeMsi()
self.fileUriToWrite = "testfile.nrrd"
def tearDown(self):
# remove the hopefully written file
os.remove(self.fileUriToWrite)
def test_imageWriterCreatesFile(self):
writer = NrrdWriter(self.msi)
writer.write(self.fileUriToWrite)
self.assertTrue(os.path.isfile(self.fileUriToWrite),
"file was written to disk")
def test_imageWriterCreatesCorrectFile(self):
writer = NrrdWriter(self.msi)
writer.write(self.fileUriToWrite)
reader = NrrdReader()
msi = reader.read(self.fileUriToWrite)
self.assertTrue(msi == helpers.getFakeMsi(),
"image correctly written and read")
def test_write_one_d_image_works(self):
writer = NrrdWriter(self.msi)
msimani.calculate_mean_spectrum(self.msi)
writer.write(self.fileUriToWrite)
reader = NrrdReader()
msi = reader.read(self.fileUriToWrite)
np.testing.assert_array_equal(msi.get_image(),
np.array([1, 2, 3, 4, 5]),
"1d image correctly written and read")
| 29.636364
| 76
| 0.633129
| 1,344
| 0.82454
| 0
| 0
| 0
| 0
| 0
| 0
| 285
| 0.174847
|
c4438dbc98a70b3fe8296d0282cdfe5e4623856b
| 3,369
|
py
|
Python
|
crossplatformshell/__init__.py
|
ryanpdwyer/crossplatformshell
|
d6239ae362cff42faffc85714f7a5e1b56dc6463
|
[
"MIT"
] | null | null | null |
crossplatformshell/__init__.py
|
ryanpdwyer/crossplatformshell
|
d6239ae362cff42faffc85714f7a5e1b56dc6463
|
[
"MIT"
] | null | null | null |
crossplatformshell/__init__.py
|
ryanpdwyer/crossplatformshell
|
d6239ae362cff42faffc85714f7a5e1b56dc6463
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
============================
crossplatformshell
============================
"""
from __future__ import (print_function, division, absolute_import,
unicode_literals)
import pathlib
import io
import os
import shutil
import distutils.dir_util
import platform
# Use subprocess32 if available
try:
import subprocess32 as subprocess
except:
import subprocess as subprocess
def check_output(*args, **kwargs):
"""Subprocess check_output, but prints commands and output by default.
Also allows printing of error message for helpful debugging.
Use print_all=False to turn off all printing."""
print_all = kwargs.pop('print_all', None)
if print_all is not None:
print_in = print_all
print_out = print_all
else:
print_in = kwargs.pop('print_in', True)
print_out = kwargs.pop('print_out', True)
if print_in:
print('')
print(' '.join(args[0]))
try:
out_bytes = subprocess.check_output(*args, **kwargs)
out_lines = out_bytes.decode('utf-8').splitlines()
except subprocess.CalledProcessError as e:
# Wrap in try/except so that check_output can print
raise e
if print_out:
for line in out_lines:
print(line)
return out_lines
windows = platform.system() == 'Windows'
def find_git_cmd(windows):
git = 'git'
if windows:
try:
check_output([git, '--version'])
except subprocess.CalledProcessError:
try:
git = 'git.cmd'
check_output([git, '--version'])
except subprocess.CalledProcessError:
msg = "git does not appear to be on your path."
raise subprocess.CalledProcessError(msg)
return git
git = find_git_cmd(windows)
def new_path(path_string):
"""Return pathlib.Path, expanding '~' to a user's HOME directory"""
return pathlib.Path(os.path.expanduser(path_string))
def mkdir(*args):
"""Make directories for the specified paths."""
for arg in args:
os.mkdir(str(arg))
def remove(path):
"""Remove the specified path."""
os.remove(str(path))
def rmtree(path):
"""Recursively remove paths."""
shutil.rmtree(str(path))
def copy(src_path, dst_path):
shutil.copy(str(src_path), str(dst_path))
cp = copy
def copy_tree(src_path, dst_path):
"""Recursively copy all files and folders from src_path to dst_path"""
distutils.dir_util.copy_tree(str(src_path), str(dst_path))
cp_r = copy_tree
def rm(*args):
"""Delete files, if they exist.
Fail silently if a file doesn't exist."""
for path in args:
try:
os.remove(str(path))
except OSError:
pass
def rm_rf(*args):
"""Recursively delete directories, if they exist."""
for path in args:
try:
shutil.rmtree(str(path))
except OSError:
pass
def read_file(filename, encoding="utf-8"):
with io.open(str(filename), encoding=encoding) as f:
text = f.read()
return text
def write_file(filename, string, encoding="utf-8"):
with io.open(str(filename), 'w', encoding=encoding) as f:
f.write(string)
# Versioneer versioning
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
| 23.234483
| 74
| 0.62808
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 933
| 0.276937
|
c444346fedeae3b1a36842a83b1d34e2d12fa382
| 28,857
|
py
|
Python
|
collections/nemo_nlp/nemo_nlp/data/data_layers.py
|
Giuseppe5/NeMo
|
f946aca100c9a1bf22e6bd25fba9f80299722112
|
[
"Apache-2.0"
] | 2
|
2020-05-12T05:16:10.000Z
|
2021-12-01T02:30:45.000Z
|
collections/nemo_nlp/nemo_nlp/data/data_layers.py
|
Giuseppe5/NeMo
|
f946aca100c9a1bf22e6bd25fba9f80299722112
|
[
"Apache-2.0"
] | 3
|
2020-11-13T17:45:41.000Z
|
2022-03-12T00:28:59.000Z
|
collections/nemo_nlp/nemo_nlp/data/data_layers.py
|
Giuseppe5/NeMo
|
f946aca100c9a1bf22e6bd25fba9f80299722112
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2019 NVIDIA Corporation
# If you want to add your own data layer, you should put its name in
# __all__ so that it can be imported with 'from text_data_layers import *'
__all__ = ['TextDataLayer',
'BertSentenceClassificationDataLayer',
'BertJointIntentSlotDataLayer',
'BertJointIntentSlotInferDataLayer',
'LanguageModelingDataLayer',
'BertTokenClassificationDataLayer',
'BertTokenClassificationInferDataLayer',
'BertPretrainingDataLayer',
'BertPretrainingPreprocessedDataLayer',
'TranslationDataLayer',
'GlueDataLayerClassification',
'GlueDataLayerRegression']
# from abc import abstractmethod
import sys
import torch
from torch.utils import data as pt_data
import os
import h5py
import nemo
from nemo.backends.pytorch.nm import DataLayerNM
from nemo.core.neural_types import *
import random
import numpy as np
from .datasets import *
class TextDataLayer(DataLayerNM):
"""
Generic Text Data Layer NM which wraps PyTorch's dataset
Args:
dataset_type: type of dataset used for this datalayer
dataset_params (dict): all the params for the dataset
"""
def __init__(self, dataset_type, dataset_params, **kwargs):
super().__init__(**kwargs)
if isinstance(dataset_type, str):
dataset_type = getattr(sys.modules[__name__], dataset_type)
self._dataset = dataset_type(**dataset_params)
def __len__(self):
return len(self._dataset)
@property
def dataset(self):
return self._dataset
@property
def data_iterator(self):
return None
class BertSentenceClassificationDataLayer(TextDataLayer):
"""
Creates the data layer to use for the task of sentence classification
with pretrained model.
All the data processing is done BertSentenceClassificationDataset.
Args:
dataset (BertSentenceClassificationDataset):
the dataset that needs to be converted to DataLayerNM
"""
@staticmethod
def create_ports():
output_ports = {
"input_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_type_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"labels": NeuralType({
0: AxisType(BatchTag),
}),
}
return {}, output_ports
def __init__(self,
input_file,
tokenizer,
max_seq_length,
num_samples=-1,
shuffle=False,
batch_size=64,
dataset_type=BertSentenceClassificationDataset,
**kwargs):
kwargs['batch_size'] = batch_size
dataset_params = {'input_file': input_file,
'tokenizer': tokenizer,
'max_seq_length': max_seq_length,
'num_samples': num_samples,
'shuffle': shuffle}
super().__init__(dataset_type, dataset_params, **kwargs)
class BertJointIntentSlotDataLayer(TextDataLayer):
"""
Creates the data layer to use for the task of joint intent
and slot classification with pretrained model.
All the data processing is done in BertJointIntentSlotDataset.
input_mask: used to ignore some of the input tokens like paddings
loss_mask: used to mask and ignore tokens in the loss function
subtokens_mask: used to ignore the outputs of unwanted tokens in
the inference and evaluation like the start and end tokens
Args:
dataset (BertJointIntentSlotDataset):
the dataset that needs to be converted to DataLayerNM
"""
@staticmethod
def create_ports():
output_ports = {
"input_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_type_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"loss_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"subtokens_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"intents": NeuralType({
0: AxisType(BatchTag),
}),
"slots": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
}
return {}, output_ports
def __init__(self,
input_file,
slot_file,
pad_label,
tokenizer,
max_seq_length,
num_samples=-1,
shuffle=False,
batch_size=64,
ignore_extra_tokens=False,
ignore_start_end=False,
dataset_type=BertJointIntentSlotDataset,
**kwargs):
kwargs['batch_size'] = batch_size
dataset_params = {'input_file': input_file,
'slot_file': slot_file,
'pad_label': pad_label,
'tokenizer': tokenizer,
'max_seq_length': max_seq_length,
'num_samples': num_samples,
'shuffle': shuffle,
'ignore_extra_tokens': ignore_extra_tokens,
'ignore_start_end': ignore_start_end}
super().__init__(dataset_type, dataset_params, **kwargs)
class BertJointIntentSlotInferDataLayer(TextDataLayer):
"""
Creates the data layer to use for the task of joint intent
and slot classification with pretrained model. This is for
All the data processing is done in BertJointIntentSlotInferDataset.
input_mask: used to ignore some of the input tokens like paddings
loss_mask: used to mask and ignore tokens in the loss function
subtokens_mask: used to ignore the outputs of unwanted tokens in
the inference and evaluation like the start and end tokens
Args:
dataset (BertJointIntentSlotInferDataset):
the dataset that needs to be converted to DataLayerNM
"""
@staticmethod
def create_ports():
output_ports = {
"input_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_type_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"loss_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"subtokens_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
}
return {}, output_ports
def __init__(self,
queries,
tokenizer,
max_seq_length,
batch_size=1,
dataset_type=BertJointIntentSlotInferDataset,
**kwargs):
kwargs['batch_size'] = batch_size
dataset_params = {'queries': queries,
'tokenizer': tokenizer,
'max_seq_length': max_seq_length}
super().__init__(dataset_type, dataset_params, **kwargs)
class LanguageModelingDataLayer(TextDataLayer):
"""
Data layer for standard language modeling task.
Args:
dataset (str): path to text document with data
tokenizer (TokenizerSpec): tokenizer
max_seq_length (int): maximum allowed length of the text segments
batch_step (int): how many tokens to skip between two successive
segments of text when constructing batches
"""
@staticmethod
def create_ports():
"""
input_ids: indices of tokens which constitute batches of text segments
input_mask: bool tensor with 0s in place of tokens to be masked
labels: indices of tokens which should be predicted from each of the
corresponding tokens in input_ids; for left-to-right language
modeling equals to input_ids shifted by 1 to the right
"""
input_ports = {}
output_ports = {
"input_ids":
NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_mask":
NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"labels":
NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
})
}
return input_ports, output_ports
def __init__(self,
dataset,
tokenizer,
max_seq_length,
batch_step=128,
dataset_type=LanguageModelingDataset,
**kwargs):
dataset_params = {'dataset': dataset,
'tokenizer': tokenizer,
'max_seq_length': max_seq_length,
'batch_step': batch_step}
super().__init__(dataset_type, dataset_params, **kwargs)
class BertTokenClassificationDataLayer(TextDataLayer):
@staticmethod
def create_ports():
input_ports = {}
output_ports = {
"input_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_type_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"loss_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"subtokens_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"labels": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
})
}
return input_ports, output_ports
def __init__(self,
text_file,
label_file,
tokenizer,
max_seq_length,
pad_label='O',
label_ids=None,
num_samples=-1,
shuffle=False,
batch_size=64,
ignore_extra_tokens=False,
ignore_start_end=False,
use_cache=False,
dataset_type=BertTokenClassificationDataset,
**kwargs):
kwargs['batch_size'] = batch_size
dataset_params = {'text_file': text_file,
'label_file': label_file,
'max_seq_length': max_seq_length,
'tokenizer': tokenizer,
'num_samples': num_samples,
'shuffle': shuffle,
'pad_label': pad_label,
'label_ids': label_ids,
'ignore_extra_tokens': ignore_extra_tokens,
'ignore_start_end': ignore_start_end,
'use_cache': use_cache}
super().__init__(dataset_type, dataset_params, **kwargs)
class BertTokenClassificationInferDataLayer(TextDataLayer):
@staticmethod
def create_ports():
input_ports = {}
output_ports = {
"input_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_type_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"loss_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"subtokens_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
})
}
return input_ports, output_ports
def __init__(self,
queries,
tokenizer,
max_seq_length,
batch_size=1,
dataset_type=BertTokenClassificationInferDataset,
**kwargs):
kwargs['batch_size'] = batch_size
dataset_params = {'queries': queries,
'tokenizer': tokenizer,
'max_seq_length': max_seq_length}
super().__init__(dataset_type, dataset_params, **kwargs)
class BertPretrainingDataLayer(TextDataLayer):
"""
Data layer for masked language modeling task.
Args:
tokenizer (TokenizerSpec): tokenizer
dataset (str): directory or a single file with dataset documents
max_seq_length (int): maximum allowed length of the text segments
mask_probability (float): probability of masking input sequence tokens
batch_size (int): batch size in segments
short_seeq_prob (float): Probability of creating sequences which are
shorter than the maximum length.
Defualts to 0.1.
"""
@staticmethod
def create_ports():
"""
input_ids: indices of tokens which constitute batches of text segments
input_type_ids: indices of token types (e.g., sentences A & B in BERT)
input_mask: bool tensor with 0s in place of tokens to be masked
output_ids: indices of output tokens which should be predicted
output_mask: bool tensor with 0s in place of tokens to be excluded
from loss calculation
labels: indices of classes to be predicted from [CLS] token of text
segments (e.g, 0 or 1 in next sentence prediction task)
"""
input_ports = {}
output_ports = {
"input_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_type_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"output_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"output_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"labels": NeuralType({0: AxisType(BatchTag)}),
}
return input_ports, output_ports
def __init__(self,
tokenizer,
dataset,
max_seq_length,
mask_probability,
short_seq_prob=0.1,
batch_size=64,
**kwargs):
kwargs['batch_size'] = batch_size
dataset_params = {'tokenizer': tokenizer,
'dataset': dataset,
'max_seq_length': max_seq_length,
'mask_probability': mask_probability,
'short_seq_prob': short_seq_prob}
super().__init__(BertPretrainingDataset, dataset_params, **kwargs)
class BertPretrainingPreprocessedDataLayer(DataLayerNM):
"""
Data layer for masked language modeling task.
Args:
tokenizer (TokenizerSpec): tokenizer
dataset (str): directory or a single file with dataset documents
max_seq_length (int): maximum allowed length of the text segments
mask_probability (float): probability of masking input sequence tokens
batch_size (int): batch size in segments
short_seeq_prob (float): Probability of creating sequences which are
shorter than the maximum length.
Defualts to 0.1.
"""
@staticmethod
def create_ports():
"""
input_ids: indices of tokens which constitute batches of text segments
input_type_ids: indices of token types (e.g., sentences A & B in BERT)
input_mask: bool tensor with 0s in place of tokens to be masked
output_ids: indices of output tokens which should be predicted
output_mask: bool tensor with 0s in place of tokens to be excluded
from loss calculation
labels: indices of classes to be predicted from [CLS] token of text
segments (e.g, 0 or 1 in next sentence prediction task)
"""
input_ports = {}
output_ports = {
"input_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_type_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"output_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"output_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"labels": NeuralType({0: AxisType(BatchTag)}),
}
return input_ports, output_ports
def __init__(self,
dataset,
max_pred_length,
batch_size=64,
training=True,
**kwargs):
if os.path.isdir(dataset):
self.files = [os.path.join(dataset, f)
for f in os.listdir(dataset)
if os.path.isfile(os.path.join(dataset, f))]
else:
self.files = [dataset]
self.files.sort()
self.num_files = len(self.files)
self.batch_size = batch_size
self.max_pred_length = max_pred_length
self.training = training
total_length = 0
for f in self.files:
fp = h5py.File(f, 'r')
total_length += len(fp['input_ids'])
fp.close()
self.total_length = total_length
super().__init__(**kwargs)
def _collate_fn(self, x):
num_components = len(x[0])
components = [[] for _ in range(num_components)]
batch_size = len(x)
for i in range(batch_size):
for j in range(num_components):
components[j].append(x[i][j])
src_ids, src_segment_ids, src_mask, tgt_ids, tgt_mask, sent_ids = \
[np.stack(x, axis=0) for x in components]
src_ids = torch.Tensor(src_ids).long().to(self._device)
src_segment_ids = torch.Tensor(src_segment_ids).long().to(self._device)
src_mask = torch.Tensor(src_mask).float().to(self._device)
tgt_ids = torch.Tensor(tgt_ids).long().to(self._device)
tgt_mask = torch.Tensor(tgt_mask).float().to(self._device)
sent_ids = torch.Tensor(sent_ids).long().to(self._device)
return src_ids, src_segment_ids, src_mask, tgt_ids, tgt_mask, sent_ids
def __len__(self):
return self.total_length
@property
def dataset(self):
return None
@property
def data_iterator(self):
while True:
if self.training:
random.shuffle(self.files)
for f_id in range(self.num_files):
data_file = self.files[f_id]
train_data = BertPretrainingPreprocessedDataset(
input_file=data_file,
max_pred_length=self.max_pred_length)
train_sampler = pt_data.RandomSampler(train_data)
train_dataloader = pt_data.DataLoader(
dataset=train_data,
batch_size=self.batch_size,
collate_fn=self._collate_fn,
shuffle=train_sampler is None,
sampler=train_sampler)
for x in train_dataloader:
yield x
class TranslationDataLayer(TextDataLayer):
"""
Data layer for neural machine translation from source (src) language to
target (tgt) language.
Args:
tokenizer_src (TokenizerSpec): source language tokenizer
tokenizer_tgt (TokenizerSpec): target language tokenizer
dataset_src (str): path to source data
dataset_tgt (str): path to target data
tokens_in_batch (int): maximum allowed number of tokens in batches,
batches will be constructed to minimize the use of <pad> tokens
clean (bool): whether to use parallel data cleaning such as removing
pairs with big difference in sentences length, removing pairs with
the same tokens in src and tgt, etc; useful for training data layer
and should not be used in evaluation data layer
"""
@staticmethod
def create_ports():
"""
src_ids: indices of tokens which correspond to source sentences
src_mask: bool tensor with 0s in place of source tokens to be masked
tgt_ids: indices of tokens which correspond to target sentences
tgt_mask: bool tensor with 0s in place of target tokens to be masked
labels: indices of tokens which should be predicted from each of the
corresponding target tokens in tgt_ids; for standard neural
machine translation equals to tgt_ids shifted by 1 to the right
sent_ids: indices of the sentences in a batch; important for
evaluation with external metrics, such as SacreBLEU
"""
input_ports = {}
output_ports = {
"src_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"src_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"tgt_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"tgt_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"labels": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"sent_ids": NeuralType({
0: AxisType(BatchTag)
})
}
return input_ports, output_ports
def __init__(self,
tokenizer_src,
tokenizer_tgt,
dataset_src,
dataset_tgt,
tokens_in_batch=1024,
clean=False,
dataset_type=TranslationDataset,
**kwargs):
dataset_params = {'tokenizer_src': tokenizer_src,
'tokenizer_tgt': tokenizer_tgt,
'dataset_src': dataset_src,
'dataset_tgt': dataset_tgt,
'tokens_in_batch': tokens_in_batch,
'clean': clean}
super().__init__(dataset_type, dataset_params, **kwargs)
if self._placement == nemo.core.DeviceType.AllGpu:
sampler = pt_data.distributed.DistributedSampler(self._dataset)
else:
sampler = None
self._dataloader = pt_data.DataLoader(dataset=self._dataset,
batch_size=1,
collate_fn=self._collate_fn,
shuffle=sampler is None,
sampler=sampler)
def _collate_fn(self, x):
src_ids, src_mask, tgt_ids, tgt_mask, labels, sent_ids = x[0]
src_ids = torch.Tensor(src_ids).long().to(self._device)
src_mask = torch.Tensor(src_mask).float().to(self._device)
tgt_ids = torch.Tensor(tgt_ids).long().to(self._device)
tgt_mask = torch.Tensor(tgt_mask).float().to(self._device)
labels = torch.Tensor(labels).long().to(self._device)
sent_ids = torch.Tensor(sent_ids).long().to(self._device)
return src_ids, src_mask, tgt_ids, tgt_mask, labels, sent_ids
@property
def dataset(self):
return None
@property
def data_iterator(self):
return self._dataloader
class GlueDataLayerClassification(TextDataLayer):
"""
Creates the data layer to use for the GLUE classification tasks,
more details here: https://gluebenchmark.com/tasks
All the data processing is done in GLUEDataset.
Args:
dataset_type (GLUEDataset):
the dataset that needs to be converted to DataLayerNM
"""
@staticmethod
def create_ports():
output_ports = {
"input_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_type_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"labels": NeuralType({
0: AxisType(CategoricalTag),
}),
}
return {}, output_ports
def __init__(self,
data_dir,
tokenizer,
max_seq_length,
processor,
evaluate=False,
token_params={},
num_samples=-1,
shuffle=False,
batch_size=64,
dataset_type=GLUEDataset,
**kwargs):
kwargs['batch_size'] = batch_size
dataset_params = {'data_dir': data_dir,
'output_mode': 'classification',
'processor': processor,
'evaluate': evaluate,
'token_params': token_params,
'tokenizer': tokenizer,
'max_seq_length': max_seq_length}
super().__init__(dataset_type, dataset_params, **kwargs)
class GlueDataLayerRegression(TextDataLayer):
"""
Creates the data layer to use for the GLUE STS-B regression task,
more details here: https://gluebenchmark.com/tasks
All the data processing is done in GLUEDataset.
Args:
dataset_type (GLUEDataset):
the dataset that needs to be converted to DataLayerNM
"""
@staticmethod
def create_ports():
output_ports = {
"input_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_type_ids": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"input_mask": NeuralType({
0: AxisType(BatchTag),
1: AxisType(TimeTag)
}),
"labels": NeuralType({
0: AxisType(RegressionTag),
}),
}
return {}, output_ports
def __init__(self,
data_dir,
tokenizer,
max_seq_length,
processor,
evaluate=False,
token_params={},
num_samples=-1,
shuffle=False,
batch_size=64,
dataset_type=GLUEDataset,
**kwargs):
kwargs['batch_size'] = batch_size
dataset_params = {'data_dir': data_dir,
'output_mode': 'regression',
'processor': processor,
'evaluate': evaluate,
'token_params': token_params,
'tokenizer': tokenizer,
'max_seq_length': max_seq_length}
super().__init__(dataset_type, dataset_params, **kwargs)
| 34.851449
| 79
| 0.537547
| 27,845
| 0.964931
| 901
| 0.031223
| 11,834
| 0.410091
| 0
| 0
| 8,924
| 0.309249
|
c446129e206d55ad3a8c2ed465762b2ddf662a3e
| 12,208
|
py
|
Python
|
h2o-py/h2o/automl/_base.py
|
vishalbelsare/h2o-3
|
9322fb0f4c0e2358449e339a434f607d524c69fa
|
[
"Apache-2.0"
] | null | null | null |
h2o-py/h2o/automl/_base.py
|
vishalbelsare/h2o-3
|
9322fb0f4c0e2358449e339a434f607d524c69fa
|
[
"Apache-2.0"
] | 58
|
2021-10-01T12:43:37.000Z
|
2021-12-08T22:58:43.000Z
|
h2o-py/h2o/automl/_base.py
|
vishalbelsare/h2o-3
|
9322fb0f4c0e2358449e339a434f607d524c69fa
|
[
"Apache-2.0"
] | null | null | null |
import h2o
from h2o.base import Keyed
from h2o.exceptions import H2OValueError
from h2o.job import H2OJob
from h2o.model import ModelBase
from h2o.utils.typechecks import assert_is_type, is_type
class H2OAutoMLBaseMixin:
def predict(self, test_data):
"""
Predict on a dataset.
:param H2OFrame test_data: Data on which to make predictions.
:returns: A new H2OFrame of predictions.
:examples:
>>> # Set up an H2OAutoML object
>>> aml = H2OAutoML(max_runtime_secs=30)
>>> # Launch an H2OAutoML run
>>> aml.train(y=y, training_frame=train)
>>> # Predict with top model from AutoML Leaderboard on a H2OFrame called 'test'
>>> aml.predict(test)
>>>
>>> # Get AutoML object by `project_name`
>>> get_aml = h2o.automl.get_automl(aml.project_name)
>>> # Predict with top model from AutoML Leaderboard on a H2OFrame called 'test'
>>> get_aml.predict(test)
"""
return self.leader.predict(test_data)
# ---------------------------------------------------------------------------
# Download POJO/MOJO with AutoML
# ---------------------------------------------------------------------------
def download_pojo(self, path="", get_genmodel_jar=False, genmodel_name=""):
"""
Download the POJO for the leader model in AutoML to the directory specified by path.
If path is an empty string, then dump the output to screen.
:param path: An absolute path to the directory where POJO should be saved.
:param get_genmodel_jar: if True, then also download h2o-genmodel.jar and store it in folder ``path``.
:param genmodel_name: Custom name of genmodel jar
:returns: name of the POJO file written.
"""
return h2o.download_pojo(self.leader, path, get_jar=get_genmodel_jar, jar_name=genmodel_name)
def download_mojo(self, path=".", get_genmodel_jar=False, genmodel_name=""):
"""
Download the leader model in AutoML in MOJO format.
:param path: the path where MOJO file should be saved.
:param get_genmodel_jar: if True, then also download h2o-genmodel.jar and store it in folder ``path``.
:param genmodel_name: Custom name of genmodel jar
:returns: name of the MOJO file written.
"""
return ModelBase.download_mojo(self.leader, path, get_genmodel_jar, genmodel_name)
@property
def project_name(self):
"""
Retrieve a string indicating the project_name of the automl instance to retrieve.
:return: a string containing the project_name
"""
pass
@property
def leader(self):
"""
Retrieve the top model from an H2OAutoML object
:return: an H2O model
:examples:
>>> # Set up an H2OAutoML object
>>> aml = H2OAutoML(max_runtime_secs=30)
>>> # Launch an AutoML run
>>> aml.train(y=y, training_frame=train)
>>> # Get the best model in the AutoML Leaderboard
>>> aml.leader
>>>
>>> # Get AutoML object by `project_name`
>>> get_aml = h2o.automl.get_automl(aml.project_name)
>>> # Get the best model in the AutoML Leaderboard
>>> get_aml.leader
"""
pass
@property
def leaderboard(self):
"""
Retrieve the leaderboard from an H2OAutoML object
:return: an H2OFrame with model ids in the first column and evaluation metric in the second column sorted
by the evaluation metric
:examples:
>>> # Set up an H2OAutoML object
>>> aml = H2OAutoML(max_runtime_secs=30)
>>> # Launch an AutoML run
>>> aml.train(y=y, training_frame=train)
>>> # Get the AutoML Leaderboard
>>> aml.leaderboard
>>>
>>> # Get AutoML object by `project_name`
>>> get_aml = h2o.automl.get_automl(aml.project_name)
>>> # Get the AutoML Leaderboard
>>> get_aml.leaderboard
"""
pass
@property
def training_info(self):
"""
Expose the name/value columns of `event_log` as a simple dictionary, for example `start_epoch`, `stop_epoch`, ...
See :func:`event_log` to obtain a description of those key/value pairs.
:return: a dictionary with event_log['name'] column as keys and event_log['value'] column as values.
"""
pass
@property
def event_log(self):
"""
Retrieve the backend event log from an H2OAutoML object
:return: an H2OFrame with detailed events occurred during the AutoML training.
"""
pass
def get_leaderboard(self, extra_columns=None):
"""
Retrieve the leaderboard.
Contrary to the default leaderboard attached to the instance, this one can return columns other than the metrics.
:param extra_columns: a string or a list of string specifying which optional columns should be added to the leaderboard. Defaults to None.
Currently supported extensions are:
- 'ALL': adds all columns below.
- 'training_time_ms': column providing the training time of each model in milliseconds (doesn't include the training of cross validation models).
- 'predict_time_per_row_ms`: column providing the average prediction time by the model for a single row.
- 'algo': column providing the algorithm name for each model.
:return: An H2OFrame representing the leaderboard.
:examples:
>>> aml = H2OAutoML(max_runtime_secs=30)
>>> aml.train(y=y, training_frame=train)
>>> lb_all = aml.get_leaderboard('ALL')
>>> lb_custom = aml.get_leaderboard(['predict_time_per_row_ms', 'training_time_ms'])
>>> lb_custom_sorted = lb_custom.sort(by='predict_time_per_row_ms')
"""
assert isinstance(self, Keyed)
return _fetch_leaderboard(self.key, extra_columns)
def get_best_model(self, algorithm=None, criterion=None):
"""
Get best model of a given family/algorithm for a given criterion from an AutoML object.
:param algorithm: One of "basemodel", "deeplearning", "drf", "gbm", "glm", "stackedensemble", "xgboost".
If None, pick the best model regardless of the algorithm.
:param criterion: Criterion can be one of the metrics reported in leaderboard. If set to None, the same ordering
as in the leaderboard will be used.
Avaliable criteria:
- Regression metrics: deviance, rmse, mse, mae, rmsle
- Binomial metrics: auc, logloss, aucpr, mean_per_class_error, rmse, mse
- Multinomial metrics: mean_per_class_error, logloss, rmse, mse
The following additional leaderboard information can be also used as a criterion:
- 'training_time_ms': column providing the training time of each model in milliseconds (doesn't include the training of cross validation models).
- 'predict_time_per_row_ms`: column providing the average prediction time by the model for a single row.
:return: An H2OModel or None if no model of a given family is present
:examples:
>>> # Set up an H2OAutoML object
>>> aml = H2OAutoML(max_runtime_secs=30)
>>> # Launch an AutoML run
>>> aml.train(y=y, training_frame=train)
>>> gbm = aml.get_best_model("gbm")
"""
from h2o.exceptions import H2OValueError
def _get_models(leaderboard):
return [m[0] for m in
leaderboard["model_id"].as_data_frame(use_pandas=False, header=False)]
higher_is_better = ["auc", "aucpr"]
assert_is_type(algorithm, None, str)
assert_is_type(criterion, None, str)
if criterion is not None:
criterion = criterion.lower()
if "deviance" == criterion:
criterion = "mean_residual_deviance"
if algorithm is not None:
if algorithm.lower() not in ("basemodel", "deeplearning", "drf", "gbm",
"glm", "stackedensemble", "xgboost"):
raise H2OValueError("Algorithm \"{}\" is not supported!".format(algorithm))
algorithm = algorithm.lower()
extra_cols = ["algo"]
if criterion in ("training_time_ms", "predict_time_per_row_ms"):
extra_cols.append(criterion)
leaderboard = h2o.automl.get_leaderboard(self, extra_columns=extra_cols)
leaderboard = leaderboard if algorithm is None else (
leaderboard[leaderboard["algo"].tolower() == algorithm, :] if algorithm != "basemodel"
else leaderboard[leaderboard["algo"].tolower() != "stackedensemble", :])
if leaderboard.nrow == 0:
return None
if criterion is None:
return h2o.get_model(leaderboard[0, "model_id"])
if criterion not in leaderboard.columns:
raise H2OValueError("Criterion \"{}\" is not present in the leaderboard!".format(criterion))
models_in_default_order = _get_models(leaderboard)
sorted_lb = leaderboard.sort(by=criterion, ascending=criterion not in higher_is_better)
selected_models = _get_models(sorted_lb[sorted_lb[criterion] == sorted_lb[0, criterion]])
picked_model = [model for model in models_in_default_order if model in selected_models][0]
return h2o.get_model(picked_model)
def _fetch_leaderboard(aml_id, extensions=None):
assert_is_type(extensions, None, str, [str])
extensions = ([] if extensions is None
else [extensions] if is_type(extensions, str)
else extensions)
resp = h2o.api("GET /99/Leaderboards/%s" % aml_id, data=dict(extensions=extensions))
dest_key = resp['project_name'].split('@', 1)[0]+"_custom_leaderboard"
return _fetch_table(resp['table'], key=dest_key, progress_bar=False)
def _fetch_table(table, key=None, progress_bar=True):
try:
# Intentionally mask the progress bar here since showing multiple progress bars is confusing to users.
# If any failure happens, revert back to user's original setting for progress and display the error message.
ori_progress_state = H2OJob.__PROGRESS_BAR__
H2OJob.__PROGRESS_BAR__ = progress_bar
# Parse leaderboard H2OTwoDimTable & return as an H2OFrame
fr = h2o.H2OFrame(table.cell_values, destination_frame=key, column_names=table.col_header, column_types=table.col_types)
return h2o.assign(fr[1:], key) # removing index and reassign id to ensure persistence on backend
finally:
H2OJob.__PROGRESS_BAR__ = ori_progress_state
def _fetch_state(aml_id, properties=None, verbosity=None):
state_json = h2o.api("GET /99/AutoML/%s" % aml_id, data=dict(verbosity=verbosity))
project_name = state_json["project_name"]
if project_name is None:
raise H2OValueError("No AutoML instance with id {}.".format(aml_id))
leaderboard_list = [key["name"] for key in state_json['leaderboard']['models']]
leader_id = leaderboard_list[0] if (leaderboard_list is not None and len(leaderboard_list) > 0) else None
should_fetch = lambda prop: properties is None or prop in properties
leader = None
if should_fetch('leader'):
leader = h2o.get_model(leader_id) if leader_id is not None else None
leaderboard = None
if should_fetch('leaderboard'):
leaderboard = _fetch_table(state_json['leaderboard_table'], key=project_name+"_leaderboard", progress_bar=False)
event_log = None
if should_fetch('event_log'):
event_log = _fetch_table(state_json['event_log_table'], key=project_name+"_eventlog", progress_bar=False)
return dict(
project_name=project_name,
json=state_json,
leader_id=leader_id,
leader=leader,
leaderboard=leaderboard,
event_log=event_log,
)
| 42.096552
| 173
| 0.63442
| 9,531
| 0.780718
| 0
| 0
| 2,236
| 0.183159
| 0
| 0
| 7,351
| 0.602146
|
c44725a87dd7a0e5d3208fe6f2ccd197531d2ad1
| 2,687
|
py
|
Python
|
Pistol.py
|
KRHS-GameProgramming-2014/survival-island
|
375b2710a2bc29551170b18638e2c00c6b2dc7c5
|
[
"BSD-3-Clause"
] | 1
|
2015-04-01T12:46:26.000Z
|
2015-04-01T12:46:26.000Z
|
Pistol.py
|
KRHS-GameProgramming-2014/survival-island
|
375b2710a2bc29551170b18638e2c00c6b2dc7c5
|
[
"BSD-3-Clause"
] | null | null | null |
Pistol.py
|
KRHS-GameProgramming-2014/survival-island
|
375b2710a2bc29551170b18638e2c00c6b2dc7c5
|
[
"BSD-3-Clause"
] | null | null | null |
import math,sys,pygame
class Pistol(pygame.sprite.Sprite):
def __init__(self,player):
self.facing = player.facing
if self.facing == "up":
self.image = pygame.image.load("rsc/Projectiles/gustu.png")
self.speed = [0, -5]
elif self.facing == "down":
self.image = pygame.image.load("rsc/Projectiles/gustd.png")
self.speed = [0, 5]
elif self.facing == "right":
self.image = pygame.image.load("rsc/Projectiles/gustr.png")
self.speed = [5, 0]
elif self.facing == "left":
self.image = pygame.image.load("rsc/Projectiles/gustl.png")
self.speed = [-5, 0]
self.rect = self.image.get_rect()
self.damage = 20
self.place(player.rect.center)
self.radius = 20
self.move()
self.living = True
def move(self):
self.rect = self.rect.move(self.speed)
def collideWall(self, width, height):
if self.rect.left < 0 or self.rect.right > width:
self.speedx = 0
#print "hit xWall"
if self.rect.top < 0 or self.rect.bottom > height:
self.speedy = 0
def collidePistol(self, other):
if self != other:
if self.rect.right > other.rect.left and self.rect.left < other.rect.right:
if self.rect.bottom > other.rect.top and self.rect.top < other.rect.bottom:
if (self.radius + other.radius) > self.distance(other.rect.center):
self.living = False
def place(self, pt):
self.rect.center = pt
def update(self, width, height):
#self.speed = [self.speedx, self.speedy]
self.move()
def distance(self, pt):
x1 = self.rect.center[0]
y1 = self.rect.center[1]
x2 = pt[0]
y2 = pt[1]
return math.sqrt(((x2-x1)**2) + ((y2-y1)**2))
def animate(self):
if self.waitCount < self.maxWait:
self.waitCount += 1
else:
self.waitCount = 0
self.facingChanged = True
if self.frame < self.maxFrame:
self.frame += 1
else:
self.frame = 0
if self.changed:
if self.facing == "up":
self.images = self.upImages
elif self.facing == "down":
self.images = self.downImages
elif self.facing == "right":
self.images = self.rightImages
elif self.facing == "left":
self.images = self.leftImages
self.image = self.images[self.frame]
| 33.17284
| 79
| 0.519166
| 2,662
| 0.990696
| 0
| 0
| 0
| 0
| 0
| 0
| 212
| 0.078898
|
c447c656ac034795409e4bb710eaaca13a84688c
| 3,388
|
py
|
Python
|
appdaemon/apps/common/common.py
|
Mithras/ha
|
d37f8673eed27a85f76c97ee3e924d2ddc033ee5
|
[
"MIT"
] | 3
|
2019-10-27T06:10:26.000Z
|
2020-07-21T01:27:11.000Z
|
appdaemon/apps/common/common.py
|
Mithras/ha
|
d37f8673eed27a85f76c97ee3e924d2ddc033ee5
|
[
"MIT"
] | null | null | null |
appdaemon/apps/common/common.py
|
Mithras/ha
|
d37f8673eed27a85f76c97ee3e924d2ddc033ee5
|
[
"MIT"
] | null | null | null |
import hassapi as hass
import csv
from collections import namedtuple
Profile = namedtuple(
"Profile", ["profile", "x_color", "y_color", "brightness"])
with open("/config/light_profiles.csv") as profiles_file:
profiles_reader = csv.reader(profiles_file)
next(profiles_reader)
LIGHT_PROFILES = [Profile(row[0], float(row[1]), float(
row[2]), int(row[3])) for row in profiles_reader]
class Common(hass.Hass):
async def initialize(self):
config = self.args["config"]
self.telegram_mithras = config["telegram_mithras"]
self.telegram_debug_chat = config["telegram_debug_chat"]
self.telegram_state_chat_mithras = config["telegram_state_chat_mithras"]
self.telegram_state_chat_diana = config["telegram_state_chat_diana"]
self.telegram_alarm_chat = config["telegram_alarm_chat"]
self.external_url = config["external_url"]
async def is_sleep_async(self):
return await self.get_state("input_boolean.sleep") == "on"
async def send_state_async(self, person: str, message: str, **kwargs):
if person == "person.mithras":
target = self.telegram_state_chat_mithras
elif person == "person.diana":
target = self.telegram_state_chat_diana
await self.call_service("telegram_bot/send_message",
target=[target],
message=message,
**kwargs)
async def send_alarm_async(self, message: str, **kwargs):
await self.call_service("telegram_bot/send_message",
target=[self.telegram_alarm_chat],
message=message,
**kwargs)
async def send_debug_async(self, message: str, **kwargs):
await self.call_service("telegram_bot/send_message",
target=[self.telegram_debug_chat],
message=message,
**kwargs)
async def turn_on_async(self, entity: str):
[domain, _] = entity.split(".")
await self.call_service(f"{domain}/turn_on",
entity_id=entity)
async def turn_off_async(self, entity: str):
[domain, _] = entity.split(".")
await self.call_service(f"{domain}/turn_off",
entity_id=entity)
async def light_turn_bright_async(self, light_group: str):
await self.light_turn_profile_async(light_group, "bright")
async def light_turn_dimmed_async(self, light_group: str):
await self.light_turn_profile_async(light_group, "dimmed")
async def light_turn_nightlight_async(self, light_group: str):
await self.light_turn_profile_async(light_group, "nightlight")
async def light_turn_profile_async(self, light_group: str, profile: str):
if profile == "off":
await self.turn_off_async(light_group)
else:
await self.call_service("light/turn_on",
entity_id=light_group,
profile=profile)
# TODO: test
async def light_flash(self, light_group: str, flash="short"):
await self.call_service("light/turn_on",
entity_id=light_group,
flash=flash)
| 41.317073
| 80
| 0.602715
| 2,978
| 0.878985
| 0
| 0
| 0
| 0
| 2,866
| 0.845927
| 477
| 0.140791
|
c448522cb4d655aac706a30087c1d285bd8f1d0f
| 3,133
|
py
|
Python
|
src/mongo_model.py
|
zxteloiv/curated-geokb-subsearcher
|
8f42dca4cb293ccf3baf25bb31ba9b6cd6a76c8d
|
[
"MIT"
] | null | null | null |
src/mongo_model.py
|
zxteloiv/curated-geokb-subsearcher
|
8f42dca4cb293ccf3baf25bb31ba9b6cd6a76c8d
|
[
"MIT"
] | null | null | null |
src/mongo_model.py
|
zxteloiv/curated-geokb-subsearcher
|
8f42dca4cb293ccf3baf25bb31ba9b6cd6a76c8d
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from pymongo import MongoClient
import conf
class MongoQuery(object):
def __init__(self):
self._conn = MongoClient(conf.mongodb_conn_str)
self._db = self._conn.geokb
def query(self, grounded, limit=15, sort_keys=None):
col = self._db[grounded['from']]
docs = col.find(grounded['where'],
limit=limit,
sort=([('popularity', -1)]
+ ['_sys_ranks.%s' % x[0] for x in sort_keys if x is not None])
)
if '*' in grounded['select']:
res = [dict((k, v) for k, v in doc.iteritems() if k != '_id') for doc in docs]
else:
res = []
for doc in docs:
selected = {}
for k in grounded['select']:
if k in doc:
selected[k] = doc[k]
res.append(selected)
return res
def coarse_query(self, grounded, limit=2000, sort_keys=None):
col = self._db[grounded['from']]
# docs = col.find(grounded['where'], limit=limit, sort=[('popularity', -1), ('_id', 1)])
docs = col.find(grounded['where'],
limit=limit,
sort=([('popularity', -1)]
+ [('_sys_ranks.%s' % x[0], -1) for x in sort_keys if x is not None])
)
return [dict((k, v) for k, v in doc.iteritems() if k != '_id') for doc in docs]
def project(self, docs, grounded, limit=15):
res = []
for doc in docs:
if len(res) >= 15:
break
try:
score = doc['_rerank']['TimeRanker']
if score < 1:
continue
except KeyError:
pass
if '*' in grounded['select']:
doc = dict((k, v) if type(v) != type([]) else (k, self._merge_obj_array(v))
for k, v in doc.iteritems() if k != '_id')
doc['src'] = 'geokb'
doc['score'] = 2.0 # fixed high score for nginx blender, in another module
res.append(doc)
else:
selected = {}
for k in grounded['select']:
if type(doc[k]) == type([]):
selected[k] = self._merge_obj_array(doc[k])
else:
selected[k] = doc[k]
selected['_sys_ranks'] = doc['_sys_ranks']
selected['src'] = 'geokb'
selected['score'] = 2.0 # fixed high score for nginx blender, in another module
res.append(selected)
return res
@staticmethod
def _merge_obj_array(arr):
if len(arr) == 0 or type(arr) != type([]):
return arr
if type(arr[0]) != type(dict()):
return arr
# [{u'推荐菜': u'AA"}, {u'推荐菜': u'BB'}, ...]
get_val_lst = lambda o: [v for _, v in o.iteritems()]
lst = []
for obj in arr:
lst += get_val_lst(obj)
return lst
| 34.054348
| 99
| 0.452601
| 3,080
| 0.979332
| 0
| 0
| 405
| 0.128776
| 0
| 0
| 482
| 0.153259
|
c448639417746f765b5ac2d5c6459142e8c6a83b
| 8,809
|
py
|
Python
|
src/dcm/agent/plugins/builtin/configure_server.py
|
JPWKU/unix-agent
|
8f1278fc8c2768a8d4d54af642a881bace43652f
|
[
"Apache-2.0"
] | null | null | null |
src/dcm/agent/plugins/builtin/configure_server.py
|
JPWKU/unix-agent
|
8f1278fc8c2768a8d4d54af642a881bace43652f
|
[
"Apache-2.0"
] | 22
|
2015-09-15T20:52:34.000Z
|
2016-03-11T22:44:24.000Z
|
src/dcm/agent/plugins/builtin/configure_server.py
|
JPWKU/unix-agent
|
8f1278fc8c2768a8d4d54af642a881bace43652f
|
[
"Apache-2.0"
] | 3
|
2015-09-11T20:21:33.000Z
|
2016-09-30T08:30:19.000Z
|
#
# Copyright (C) 2014 Dell, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import configparser
import json
import logging
import os
import urllib.parse
import dcm.agent.exceptions as exceptions
import dcm.agent.logger as dcm_logger
import dcm.agent.plugins.api.base as plugin_base
import dcm.agent.plugins.api.exceptions as plugin_exceptions
import dcm.agent.plugins.api.utils as plugin_utils
import dcm.agent.utils as utils
_g_logger = logging.getLogger(__name__)
class ConfigureServer(plugin_base.Plugin):
protocol_arguments = {
"configType":
("Which configuration management software to use (chef or puppet)",
True, str, None),
"authId":
("", False, str, None),
"configurationData":
("", False, plugin_utils.base64type_convertor, None),
"encryptedConfigToken":
("", False, plugin_utils.base64type_convertor, None),
"encryptedAuthSecret":
("", False, plugin_utils.base64type_convertor, None),
"endpoint":
("", False, str, None),
"providerRegionId":
("", False, str, None),
"runAsUser":
("", False, str, None),
"storageDelegate":
("", False, str, None),
"storageEndpoint":
("", False, str, None),
"storageAccount":
("", False, str, None),
"scriptFiles":
("", False, list, None),
"storagePublicKey":
("", False, plugin_utils.base64type_convertor, None),
"storagePrivateKey":
("", False, plugin_utils.base64type_convertor, None),
"environmentId":
("", False, str, None),
"personalityFiles":
("", False, list, None),
"configClientName":
("", False, str, None),
"configCert":
("", False, plugin_utils.base64type_convertor, None),
"configKey":
("", False, plugin_utils.base64type_convertor, None),
"runListIds":
("", False, list, None),
"parameterList":
("", False, plugin_utils.base64type_convertor, None),
}
def __init__(self, conf, job_id, items_map, name, arguments):
super(ConfigureServer, self).__init__(
conf, job_id, items_map, name, arguments)
if not self.args.runAsUser:
self.args.runAsUser = self.conf.system_user
def configure_server_with_chef(self):
chef_dir = self.conf.get_temp_file("chefconf", isdir=True)
run_list_file_name = os.path.join(chef_dir, "runList.cfg")
token_file_path = self.conf.get_temp_file("token.pem")
try:
if self.args.encryptedAuthSecret:
token = self.args.encryptedAuthSecret
else:
token = "NULL"
authId = self.args.authId
if authId is None:
authId = "NULL"
endpoint = self.args.endpoint
if endpoint is None:
endpoint = "NULL"
environmentId = self.args.environmentId
if environmentId is None:
environmentId = "NULL"
chef_json = {"run_list": self.args.runListIds}
with open(run_list_file_name, "w") as fptr:
fptr.write(json.dumps(chef_json))
with open(token_file_path, "w") as fptr:
fptr.write(token)
fptr.write(os.linesep)
exe = self.conf.get_script_location(
"runConfigurationManagement-CHEF")
cmd_list = [exe,
self.args.runAsUser,
self.args.configClientName,
token_file_path,
run_list_file_name,
authId,
endpoint,
environmentId,
self.conf.configuration_management_chef_client_version]
return plugin_utils.run_command(self.conf, cmd_list)
finally:
plugin_utils.safe_delete(run_list_file_name)
plugin_utils.safe_delete(token_file_path)
def _edit_puppet_conf(self, template_path, new_location, endpoint):
parser = configparser.SafeConfigParser()
parser.read(template_path)
if not parser.has_section("agent"):
parser.add_section("agent")
parser.set("agent", "certname", self.args.configClientName)
parser.set("agent", "server", endpoint)
with open(new_location, "w") as fptr:
parser.write(fptr)
def configure_server_with_puppet(self):
if self.args.endpoint is None:
raise exceptions.AgentOptionValueNotSetException("endpoint")
# XXX it will only work with the default port. There is no way for
# the user to configure anything else in the console
endpoint = urllib.parse.urlparse(self.args.endpoint).hostname
puppet_extras_base_path = os.path.join(self.conf.extra_base_path,
"puppetconf")
puppet_extras_bin = os.path.join(self.conf.extra_base_path,
"bin/puppet")
try:
utils.install_extras(
self.conf, package=self.conf.extra_package_name)
except exceptions.AgentExtrasNotInstalledException as ex:
_g_logger.exception("An error occurred trying to install puppet. "
"Exception message is %s" % str(ex))
raise
template_puppet_conf_path = os.path.join(puppet_extras_base_path,
"puppet.conf.template")
if not os.path.exists(template_puppet_conf_path):
raise exceptions.AgentExtrasNotInstalledException(
"The puppet.conf template did not install properly.")
if not os.path.exists(puppet_extras_bin):
raise exceptions.AgentExtrasNotInstalledException(
"The puppet binary did not install properly.")
puppet_conf_path = self.conf.get_temp_file("puppet.conf")
self._edit_puppet_conf(template_puppet_conf_path,
puppet_conf_path,
endpoint)
cert_file_path = self.conf.get_temp_file("cert.pem")
key_file_path = self.conf.get_temp_file("key.pem")
try:
with open(cert_file_path, "w") as fptr:
fptr.write(self.args.configCert)
with open(key_file_path, "w") as fptr:
fptr.write(self.args.configKey)
exe = self.conf.get_script_location(
"runConfigurationManagement-PUPPET")
cmd = [exe,
endpoint,
cert_file_path,
key_file_path,
self.args.configClientName,
self.conf.extra_base_path,
puppet_conf_path]
return plugin_utils.run_command(self.conf, cmd)
finally:
plugin_utils.safe_delete(cert_file_path)
plugin_utils.safe_delete(key_file_path)
plugin_utils.safe_delete(puppet_conf_path)
def run(self):
_g_logger.info("Running configuration management of type " +
self.args.configType)
if self.args.configType.upper() == "CHEF":
(stdout, stderr, rc) = self.configure_server_with_chef()
elif self.args.configType.upper() == "PUPPET":
(stdout, stderr, rc) = self.configure_server_with_puppet()
else:
raise plugin_exceptions.AgentPluginParameterBadValueException(
"configType", "CHEF or PUPPET")
if stderr:
dcm_logger.log_to_dcm_console_configuration_management_error(
stderr=stderr)
if stdout:
dcm_logger.log_to_dcm_console_configuration_management_output(
stdout=stdout)
if rc != 0:
return plugin_base.PluginReply(rc, message=stderr)
else:
return plugin_base.PluginReply(
rc, reply_type="string", reply_object=stdout)
def load_plugin(conf, job_id, items_map, name, arguments):
return ConfigureServer(conf, job_id, items_map, name, arguments)
| 38.635965
| 79
| 0.595187
| 7,700
| 0.874106
| 0
| 0
| 0
| 0
| 0
| 0
| 1,661
| 0.188557
|
c44957a976ba959e51bd70f903dcac90438fe807
| 17,184
|
py
|
Python
|
phy/plot/interact.py
|
ycanerol/phy
|
7a247f926dd5bf5d8ab95fe138e8f4a0db11b068
|
[
"BSD-3-Clause"
] | 118
|
2019-06-03T06:19:43.000Z
|
2022-03-25T00:05:26.000Z
|
phy/plot/interact.py
|
ycanerol/phy
|
7a247f926dd5bf5d8ab95fe138e8f4a0db11b068
|
[
"BSD-3-Clause"
] | 761
|
2015-01-08T11:17:41.000Z
|
2019-05-27T16:12:08.000Z
|
phy/plot/interact.py
|
ycanerol/phy
|
7a247f926dd5bf5d8ab95fe138e8f4a0db11b068
|
[
"BSD-3-Clause"
] | 70
|
2019-05-30T11:05:26.000Z
|
2022-03-30T11:51:23.000Z
|
# -*- coding: utf-8 -*-
"""Common layouts."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
import numpy as np
from phylib.utils import emit
from phylib.utils.geometry import get_non_overlapping_boxes, get_closest_box
from .base import BaseLayout
from .transform import Scale, Range, Subplot, Clip, NDC
from .utils import _get_texture, _in_polygon
from .visuals import LineVisual, PolygonVisual
logger = logging.getLogger(__name__)
#------------------------------------------------------------------------------
# Grid
#------------------------------------------------------------------------------
class Grid(BaseLayout):
"""Layout showing subplots arranged in a 2D grid.
Constructor
-----------
shape : tuple or str
Number of rows, cols in the grid.
shape_var : str
Name of the GLSL uniform variable that holds the shape, when it is variable.
box_var : str
Name of the GLSL variable with the box index.
has_clip : boolean
Whether subplots should be clipped.
Note
----
To be used in a grid, a visual must define `a_box_index` (by default) or another GLSL
variable specified in `box_var`.
"""
margin = .075
n_dims = 2
active_box = (0, 0)
_scaling = (1., 1.)
def __init__(self, shape=(1, 1), shape_var='u_grid_shape', box_var=None, has_clip=True):
super(Grid, self).__init__(box_var=box_var)
self.shape_var = shape_var
self._shape = shape
ms = 1 - self.margin
mc = 1 - self.margin
# Define the GPU transforms of the Grid layout.
# 1. Global scaling.
self.gpu_transforms.add(Scale(self._scaling, gpu_var='u_grid_scaling'))
# 2. Margin.
self.gpu_transforms.add(Scale((ms, ms)))
# 3. Clipping for the subplots.
if has_clip:
self.gpu_transforms.add(Clip([-mc, -mc, +mc, +mc]))
# 4. Subplots.
self.gpu_transforms.add(Subplot(
# The parameters of the subplots are callable as they can be changed dynamically.
shape=lambda: self._shape, index=lambda: self.active_box,
shape_gpu_var=self.shape_var, index_gpu_var=self.box_var))
def attach(self, canvas):
"""Attach the grid to a canvas."""
super(Grid, self).attach(canvas)
canvas.gpu_transforms += self.gpu_transforms
canvas.inserter.insert_vert(
"""
attribute vec2 {};
uniform vec2 {};
uniform vec2 u_grid_scaling;
""".format(self.box_var, self.shape_var),
'header', origin=self)
def add_boxes(self, canvas, shape=None):
"""Show subplot boxes."""
shape = shape or self.shape
assert isinstance(shape, tuple)
n, m = shape
n_boxes = n * m
a = 1 - .0001
pos = np.array([[-a, -a, +a, -a],
[+a, -a, +a, +a],
[+a, +a, -a, +a],
[-a, +a, -a, -a],
])
pos = np.tile(pos, (n_boxes, 1))
box_index = []
for i in range(n):
for j in range(m):
box_index.append([i, j])
box_index = np.vstack(box_index)
box_index = np.repeat(box_index, 8, axis=0)
boxes = LineVisual()
# We exclude this interact when adding the visual.
canvas.add_visual(boxes, clearable=False)
boxes.set_data(pos=pos)
boxes.set_box_index(box_index)
canvas.update()
def get_closest_box(self, pos):
"""Get the box index (i, j) closest to a given position in NDC coordinates."""
x, y = pos
rows, cols = self.shape
j = np.clip(int(cols * (1. + x) / 2.), 0, cols - 1)
i = np.clip(int(rows * (1. - y) / 2.), 0, rows - 1)
return i, j
def update_visual(self, visual):
"""Update a visual."""
super(Grid, self).update_visual(visual)
if self.shape_var in visual.program:
visual.program[self.shape_var] = self._shape
visual.program['u_grid_scaling'] = self._scaling
@property
def shape(self):
"""Return the grid shape."""
return self._shape
@shape.setter
def shape(self, value):
self._shape = value
self.update()
@property
def scaling(self):
"""Return the grid scaling."""
return self._scaling
@scaling.setter
def scaling(self, value):
self._scaling = value
self.update()
#------------------------------------------------------------------------------
# Boxed
#------------------------------------------------------------------------------
class Boxed(BaseLayout):
"""Layout showing plots in rectangles at arbitrary positions. Used by the waveform view.
The boxes are specified via their center positions and optional sizes, in which case
an iterative algorithm is used to find the largest box size that will not make them overlap.
Constructor
----------
box_pos : array-like (2D, shape[1] == 2)
Position of the centers of the boxes.
box_var : str
Name of the GLSL variable with the box index.
keep_aspect_ratio : boolean
Whether to keep the aspect ratio of the bounds.
Note
----
To be used in a boxed layout, a visual must define `a_box_index` (by default) or another GLSL
variable specified in `box_var`.
"""
margin = .1
n_dims = 1
active_box = 0
_box_scaling = (1., 1.)
_layout_scaling = (1., 1.)
_scaling_param_increment = 1.1
def __init__(self, box_pos=None, box_var=None, keep_aspect_ratio=False):
super(Boxed, self).__init__(box_var=box_var)
self._key_pressed = None
self.keep_aspect_ratio = keep_aspect_ratio
self.update_boxes(box_pos)
self.gpu_transforms.add(Range(
NDC, lambda: self.box_bounds[self.active_box],
from_gpu_var='vec4(-1, -1, 1, 1)', to_gpu_var='box_bounds'))
def attach(self, canvas):
"""Attach the boxed interact to a canvas."""
super(Boxed, self).attach(canvas)
canvas.gpu_transforms += self.gpu_transforms
canvas.inserter.insert_vert("""
#include "utils.glsl"
attribute float {};
uniform sampler2D u_box_pos;
uniform float n_boxes;
uniform vec2 u_box_size;
uniform vec2 u_layout_scaling;
""".format(self.box_var), 'header', origin=self)
canvas.inserter.insert_vert("""
// Fetch the box bounds for the current box (`box_var`).
vec2 box_pos = fetch_texture({}, u_box_pos, n_boxes).xy;
box_pos = (2 * box_pos - 1); // from [0, 1] (texture) to [-1, 1] (NDC)
box_pos = box_pos * u_layout_scaling;
vec4 box_bounds = vec4(box_pos - u_box_size, box_pos + u_box_size);
""".format(self.box_var), 'start', origin=self)
def update_visual(self, visual):
"""Update a visual."""
super(Boxed, self).update_visual(visual)
box_pos = _get_texture(self.box_pos, (0, 0), self.n_boxes, [-1, 1])
box_pos = box_pos.astype(np.float32)
if 'u_box_pos' in visual.program:
logger.log(5, "Update visual with interact Boxed.")
visual.program['u_box_pos'] = box_pos
visual.program['n_boxes'] = self.n_boxes
visual.program['u_box_size'] = np.array(self.box_size) * np.array(self._box_scaling)
visual.program['u_layout_scaling'] = self._layout_scaling
def update_boxes(self, box_pos):
"""Update the box positions and automatically-computed size."""
self.box_pos, self.box_size = get_non_overlapping_boxes(box_pos)
def add_boxes(self, canvas):
"""Show the boxes borders."""
n_boxes = len(self.box_pos)
a = 1 + .05
pos = np.array([[-a, -a, +a, -a],
[+a, -a, +a, +a],
[+a, +a, -a, +a],
[-a, +a, -a, -a],
])
pos = np.tile(pos, (n_boxes, 1))
boxes = LineVisual()
box_index = np.repeat(np.arange(n_boxes), 8)
canvas.add_visual(boxes, clearable=False)
boxes.set_data(pos=pos, color=(.5, .5, .5, 1))
boxes.set_box_index(box_index)
canvas.update()
# Change the box bounds, positions, or size
#--------------------------------------------------------------------------
@property
def n_boxes(self):
"""Total number of boxes."""
return len(self.box_pos)
@property
def box_bounds(self):
"""Bounds of the boxes."""
bs = np.array(self.box_size)
return np.c_[self.box_pos - bs, self.box_pos + bs]
def get_closest_box(self, pos):
"""Get the box closest to some position."""
return get_closest_box(pos, self.box_pos, self.box_size)
# Box scaling
#--------------------------------------------------------------------------
def _increment_box_scaling(self, cw=1., ch=1.):
self._box_scaling = (self._box_scaling[0] * cw, self._box_scaling[1] * ch)
self.update()
@property
def box_scaling(self):
return self._box_scaling
def expand_box_width(self):
return self._increment_box_scaling(cw=self._scaling_param_increment)
def shrink_box_width(self):
return self._increment_box_scaling(cw=1. / self._scaling_param_increment)
def expand_box_height(self):
return self._increment_box_scaling(ch=self._scaling_param_increment)
def shrink_box_height(self):
return self._increment_box_scaling(ch=1. / self._scaling_param_increment)
# Layout scaling
#--------------------------------------------------------------------------
def _increment_layout_scaling(self, cw=1., ch=1.):
self._layout_scaling = (self._layout_scaling[0] * cw, self._layout_scaling[1] * ch)
self.update()
@property
def layout_scaling(self):
return self._layout_scaling
def expand_layout_width(self):
return self._increment_layout_scaling(cw=self._scaling_param_increment)
def shrink_layout_width(self):
return self._increment_layout_scaling(cw=1. / self._scaling_param_increment)
def expand_layout_height(self):
return self._increment_layout_scaling(ch=self._scaling_param_increment)
def shrink_layout_height(self):
return self._increment_layout_scaling(ch=1. / self._scaling_param_increment)
class Stacked(Boxed):
"""Layout showing a number of subplots stacked vertically.
Parameters
----------
n_boxes : int
Number of boxes to stack vertically.
box_var : str
Name of the GLSL variable with the box index.
origin : str
top or bottom
Note
----
To be used in a boxed layout, a visual must define `a_box_index` (by default) or another GLSL
variable specified in `box_var`.
"""
margin = 0
_origin = 'bottom'
def __init__(self, n_boxes, box_var=None, origin=None):
self._origin = origin or self._origin
assert self._origin in ('top', 'bottom')
box_pos = self.get_box_pos(n_boxes)
super(Stacked, self).__init__(box_pos, box_var=box_var, keep_aspect_ratio=False)
@property
def n_boxes(self):
"""Number of boxes."""
return len(self.box_pos)
@n_boxes.setter
def n_boxes(self, n_boxes):
if n_boxes >= 1:
self.update_boxes(self.get_box_pos(n_boxes))
def get_box_pos(self, n_boxes):
"""Return the box bounds for a given number of stacked boxes."""
# Signal bounds.
b = np.zeros((n_boxes, 2))
b[:, 1] = np.linspace(-1, 1, n_boxes)
if self._origin == 'top':
b = b[::-1, :]
return b
@property
def origin(self):
"""Whether to show the channels from top to bottom (`top` option, the default), or from
bottom to top (`bottom`)."""
return self._origin
@origin.setter
def origin(self, value):
self._origin = value
self.update_boxes(self.get_box_pos(self.n_boxes))
self.update()
def attach(self, canvas):
"""Attach the stacked interact to a canvas."""
BaseLayout.attach(self, canvas)
canvas.gpu_transforms += self.gpu_transforms
canvas.inserter.insert_vert("""
#include "utils.glsl"
attribute float {};
uniform float n_boxes;
uniform bool u_top_origin;
uniform vec2 u_box_size;
""".format(self.box_var), 'header', origin=self)
canvas.inserter.insert_vert("""
float margin = .1 / n_boxes;
float a = 1 - 2. / n_boxes + margin;
float b = -1 + 2. / n_boxes - margin;
float u = (u_top_origin ? (n_boxes - 1. - {bv}) : {bv}) / max(1., n_boxes - 1.);
float y0 = -1 + u * (a + 1);
float y1 = b + u * (1 - b);
float ym = .5 * (y0 + y1);
float yh = u_box_size.y * (y1 - ym);
y0 = ym - yh;
y1 = ym + yh;
vec4 box_bounds = vec4(-1., y0, +1., y1);
""".format(bv=self.box_var), 'before_transforms', origin=self)
def update_visual(self, visual):
"""Update a visual."""
BaseLayout.update_visual(self, visual)
if 'n_boxes' in visual.program:
visual.program['n_boxes'] = self.n_boxes
visual.program['u_box_size'] = self._box_scaling
visual.program['u_top_origin'] = self._origin == 'top'
#------------------------------------------------------------------------------
# Interactive tools
#------------------------------------------------------------------------------
class Lasso(object):
"""Draw a polygon with the mouse and find the points that belong to the inside of the
polygon."""
def __init__(self):
self._points = []
self.canvas = None
self.visual = None
self.box = None
def add(self, pos):
"""Add a point to the polygon."""
x, y = pos.flat if isinstance(pos, np.ndarray) else pos
self._points.append((x, y))
logger.debug("Lasso has %d points.", len(self._points))
self.update_lasso_visual()
@property
def polygon(self):
"""Coordinates of the polygon vertices."""
l = self._points
# Close the polygon.
# l = l + l[0] if len(l) else l
out = np.array(l, dtype=np.float64)
out = np.reshape(out, (out.size // 2, 2))
assert out.ndim == 2
assert out.shape[1] == 2
return out
def clear(self):
"""Reset the lasso."""
self._points = []
self.box = None
self.update_lasso_visual()
@property
def count(self):
"""Number of vertices in the polygon."""
return len(self._points)
def in_polygon(self, pos):
"""Return which points belong to the polygon."""
return _in_polygon(pos, self.polygon)
def attach(self, canvas):
"""Attach the lasso to a canvas."""
canvas.attach_events(self)
self.canvas = canvas
self.create_lasso_visual()
def create_lasso_visual(self):
"""Create the lasso visual."""
self.visual = PolygonVisual()
self.canvas.add_visual(self.visual, clearable=False)
def update_lasso_visual(self):
"""Update the lasso visual with the current polygon."""
if not self.visual and self.count > 0:
return
# The following call updates a_box_index with the active box in BaseLayout.
self.visual.set_data(pos=self.polygon)
self.canvas.update()
def on_mouse_click(self, e):
"""Add a polygon point with ctrl+click."""
if 'Control' in e.modifiers:
if e.button == 'Left':
layout = getattr(self.canvas, 'layout', None)
if hasattr(layout, 'box_map'):
box, pos = layout.box_map(e.pos)
# Only update the box for the first click, so that the box containing
# the lasso is determined by the first click only.
if self.box is None:
self.box = box
# Avoid clicks outside the active box (box of the first click).
if box != self.box:
return
else: # pragma: no cover
pos = self.canvas.window_to_ndc(e.pos)
# Force the active box to be the box of the first click, not the box of the
# current click.
if layout:
layout.active_box = self.box
self.add(pos) # call update_lasso_visual
emit("lasso_updated", self.canvas, self.polygon)
else:
self.clear()
self.box = None
def __repr__(self):
return str(self.polygon)
| 33.694118
| 97
| 0.557437
| 16,076
| 0.935521
| 0
| 0
| 1,827
| 0.10632
| 0
| 0
| 6,447
| 0.375175
|
c44cda7d547bb9bf0fd8879defc0c14046119449
| 623
|
py
|
Python
|
AutocompleteHandler.py
|
codeforamerica/sheltraustin
|
a07ffd4b328a9d961347a85b49c95d8bf5ec1046
|
[
"BSD-3-Clause"
] | null | null | null |
AutocompleteHandler.py
|
codeforamerica/sheltraustin
|
a07ffd4b328a9d961347a85b49c95d8bf5ec1046
|
[
"BSD-3-Clause"
] | 1
|
2015-08-03T21:27:36.000Z
|
2015-08-03T21:27:36.000Z
|
AutocompleteHandler.py
|
codeforamerica/sheltraustin
|
a07ffd4b328a9d961347a85b49c95d8bf5ec1046
|
[
"BSD-3-Clause"
] | 1
|
2021-04-17T10:13:29.000Z
|
2021-04-17T10:13:29.000Z
|
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
import simplejson
from QueryHandler import QueryHandler
class AutocompleteHandler(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
if not self.request.arguments or self.request.arguments=={}:
self.render('index.html')
return
if not 'address' in self.request.arguments.keys():
self.render('index.html')
return
address = self.request.arguments['address'][0]
data = {
'address': address
}
output = QueryHandler.get_addresses(data)
self.write(output)
self.flush()
self.finish()
| 23.074074
| 62
| 0.746388
| 474
| 0.760835
| 0
| 0
| 418
| 0.670947
| 0
| 0
| 51
| 0.081862
|
c44d0eafae3c92e64f9041228d582ce1a1b6ed30
| 1,869
|
py
|
Python
|
mirari/SV/migrations/0052_auto_20190428_1522.py
|
gcastellan0s/mirariapp
|
24a9db06d10f96c894d817ef7ccfeec2a25788b7
|
[
"MIT"
] | null | null | null |
mirari/SV/migrations/0052_auto_20190428_1522.py
|
gcastellan0s/mirariapp
|
24a9db06d10f96c894d817ef7ccfeec2a25788b7
|
[
"MIT"
] | 18
|
2019-12-27T19:58:20.000Z
|
2022-02-27T08:17:49.000Z
|
mirari/SV/migrations/0052_auto_20190428_1522.py
|
gcastellan0s/mirariapp
|
24a9db06d10f96c894d817ef7ccfeec2a25788b7
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.5 on 2019-04-28 20:22
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('SV', '0051_ticketproducts_offerprice'),
]
operations = [
migrations.AddField(
model_name='product',
name='bar_code',
field=models.CharField(blank=True, help_text='(sugerido)', max_length=250, null=True, verbose_name='Código de Barras '),
),
migrations.AddField(
model_name='product',
name='ieps',
field=models.BooleanField(default=True, help_text='Graba IEPS? (sugerido)', verbose_name='IEPS. '),
),
migrations.AddField(
model_name='product',
name='is_dynamic',
field=models.BooleanField(default=False, help_text='Este producto tiene precio variable? (sugerido)', verbose_name='Precio dinámico '),
),
migrations.AddField(
model_name='product',
name='is_favorite',
field=models.BooleanField(default=False, help_text='Se muestra siempre este producto? (sugerido)', verbose_name='Es favorito? '),
),
migrations.AddField(
model_name='product',
name='iva',
field=models.BooleanField(default=True, help_text='Graba IVA? (sugerido)', verbose_name='I.V.A. '),
),
migrations.AddField(
model_name='product',
name='price',
field=models.FloatField(default=0, help_text='Graba IVA? (sugerido)', verbose_name='Precio en esta sucursal '),
),
migrations.AlterField(
model_name='ticketproducts',
name='ticket',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='SV.Ticket'),
),
]
| 37.38
| 147
| 0.604066
| 1,745
| 0.932656
| 0
| 0
| 0
| 0
| 0
| 0
| 499
| 0.266702
|
c44d2937a78223f5c0f6b30adbd02a5949d5f2e6
| 3,339
|
py
|
Python
|
svl/compiler/plot_validators.py
|
timothyrenner/svl
|
a74c09c49f2e14046acd4b0eeb861f8fef6bca96
|
[
"MIT"
] | 8
|
2019-03-27T12:49:21.000Z
|
2020-10-10T11:16:25.000Z
|
svl/compiler/plot_validators.py
|
timothyrenner/svl
|
a74c09c49f2e14046acd4b0eeb861f8fef6bca96
|
[
"MIT"
] | 65
|
2018-08-26T14:48:45.000Z
|
2020-03-17T12:24:42.000Z
|
svl/compiler/plot_validators.py
|
timothyrenner/svl
|
a74c09c49f2e14046acd4b0eeb861f8fef6bca96
|
[
"MIT"
] | 1
|
2019-09-13T19:39:07.000Z
|
2019-09-13T19:39:07.000Z
|
from toolz import get
PLOT_VALIDATORS = [
(
{"line", "scatter", "bar"},
lambda x: ("x" not in x) or ("y" not in x),
"XY plot does not have X and Y.",
),
(
{"histogram"},
lambda x: ("step" in x) and ("bins" in x),
"Histogram cannot have STEP and BINS.",
),
(
{"line", "scatter", "bar"},
lambda x: ("agg" in x["x"]) and ("agg" in x["y"]),
"XY plot cannot have an aggregation on X and Y.",
),
(
{"histogram", "pie"},
lambda x: ("agg" in get("x", x, {}))
or ("agg" in get("y", x, {}))
or ("agg" in get("axis", x, {})),
"Histograms and pie charts cannot have aggregations.",
),
(
{"histogram", "pie"},
lambda x: ("temporal" in get("x", x, {}))
or ("temporal" in get("y", x, {}))
or ("temporal" in get("axis", x, {})),
"Histograms and pie charts cannot have temporal axes.",
),
(
{"histogram"},
lambda x: ("x" in x) and ("y" in x),
"Histograms can have X or Y, not both.",
),
(
{"histogram"},
lambda x: ("x" not in x) and ("y" not in x),
"Histograms must have an X or Y.",
),
({"pie"}, lambda x: "axis" not in x, "Pie charts must have an axis."),
(
{"line", "bar"}, # SORT is a no-op for scatter.
lambda x: ("sort" in x["x"]) and ("sort" in x["y"]),
"Cannot sort by two axes.",
),
(
{"pie"},
lambda x: (get("hole", x, 0.0) < 0) or (get("hole", x, 0.0) > 1),
"HOLE must be between zero and one.",
),
(
{"histogram"},
lambda x: get("step", x, 1) <= 0,
"STEP must be greater than zero.",
),
(
{"histogram"},
lambda x: get("bins", x, 1) <= 0,
"BINS must be greater than zero.",
),
(
{"histogram", "pie"},
lambda x: "color_by" in x,
"Histograms and pie charts cannot have COLOR BY.",
),
({"pie"}, lambda x: "split_by" in x, "Pie charts cannot have SPLIT BY."),
(
{"line", "scatter", "bar"},
lambda x: ("split_by" in x) and ("color_by" in x),
"Cannot have COLOR BY and SPLIT BY on same plot.",
),
(
{"line", "scatter", "bar"},
lambda x: (
# If we don't include this it can throw exceptions for other
# validators.
("x" in x)
and ("y" in x)
)
and (("agg" in x["x"]) or ("agg" in x["y"]))
and (("color_by" in x) and ("agg" not in x["color_by"])),
"If there's an aggregation on X or Y, COLOR BY must also aggregate.",
),
]
def validate_plot(svl_plot):
""" Validates the SVL plot.
Parameters
----------
svl_plot : dict
The SVL plot specifier.
Returns
-------
Tuple[bool, str]
A boolean indicating whether the plot is valid and a message
indicating that the plot is either valid or which validations it
failed.
"""
ok = True
failure_messages = []
for plots, validator, message in PLOT_VALIDATORS:
if (svl_plot["type"] in plots) and validator(svl_plot):
ok = False
failure_messages.append(message)
return ok, "\n".join(failure_messages)
| 29.289474
| 77
| 0.481881
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,590
| 0.47619
|
c44dbf415c5fb9329410760b9f9c0e517b6fdb6f
| 7,421
|
py
|
Python
|
decision_tree.py
|
cjbayron/ml-models
|
b3171c9a82fe5ecdcdc5abcdc20af7c18f9f8ec4
|
[
"MIT"
] | 1
|
2018-12-15T16:36:41.000Z
|
2018-12-15T16:36:41.000Z
|
decision_tree.py
|
cjbayron/ml-models
|
b3171c9a82fe5ecdcdc5abcdc20af7c18f9f8ec4
|
[
"MIT"
] | null | null | null |
decision_tree.py
|
cjbayron/ml-models
|
b3171c9a82fe5ecdcdc5abcdc20af7c18f9f8ec4
|
[
"MIT"
] | null | null | null |
'''
Building a Decision Tree using CART (from scratch)
Note: Code was tested only on dataset with numerical features.
Categorical features are not yet fully supported.
'''
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.metrics import classification_report
from scikitplot.metrics import plot_confusion_matrix
import common.utils as ut
# get data from:
# https://www.kaggle.com/c/otto-group-product-classification-challenge
TRN_DATA_PATH = 'datasets/otto-group-product-classification/train.csv'
NUM_SAMPLES = 5000
NUM_FEATS = 93
def visualize_data(feats, true_labels, preds):
'''Display labeled data and clustered data
'''
print("Visualizing data...")
red_feats = ut.reduce_to_2D_by_tsne(feats)
label2col_map = ['red', 'orange', 'yellow', 'green', 'blue',
'violet', 'brown', 'gray', 'pink']
label_list = np.unique(true_labels)
_, ax = plt.subplots(ncols=2, figsize=(10, 5))
graph_label_pair = zip(ax, [true_labels, preds])
for graph, labels in graph_label_pair:
for label in label_list:
# get samples with label == label
idxs = np.where(labels == label)
# get components
pc1, pc2 = red_feats['pc1'].values[idxs], red_feats['pc2'].values[idxs]
# scatter plot w/ color based on labels
graph.scatter(x=pc1, y=pc2, color=label2col_map[label-1],
alpha=0.5, label=label)
graph.set_xlabel('PC1')
graph.set_ylabel('PC2')
ax[0].set_title('Labeled Products')
ax[1].set_title('Predicted Labels')
for graph in ax:
graph.legend() # show legend
graph.grid(True) # show gridlines
plt.show()
def get_impurity(labels):
'''Calculate Gini impurity
'''
num_labels = float(len(labels))
imp = 0.0
_, cnts = np.unique(labels, return_counts=True)
for cnt in cnts:
cnt = float(cnt)
imp += float((cnt/num_labels)*(1-(cnt/num_labels)))
return imp
def get_best_split_along_column(data, labels, feat_idx, categorical=False):
'''Get best split using features in a single column
'''
feat_col = data[:, feat_idx]
splitter_pool = np.unique(feat_col) # get splitters
min_im = np.inf
left_idxs = []
right_idxs = []
splitter = None
for val in splitter_pool:
if categorical:
left_labels = labels[feat_col == val]
right_labels = labels[feat_col != val]
else:
left_labels = labels[feat_col >= val]
right_labels = labels[feat_col < val]
# if all data is placed on only one side
# then it is not a meaningful split so we skip
if len(left_labels) == len(data) or len(right_labels) == len(data):
continue
avg_im = len(left_labels) * get_impurity(left_labels) + \
len(right_labels) * get_impurity(right_labels)
if avg_im < min_im:
min_im = avg_im
left_idxs = (feat_col >= val)
right_idxs = (feat_col < val)
splitter = val
if len(left_idxs) + len(right_idxs) > 0:
min_im /= (len(left_idxs) + len(right_idxs))
return min_im, splitter, left_idxs, right_idxs
class TreeNode():
'''Node for a Decision Tree
'''
def __init__(self):
self.labels = None
self.left_node = None
self.right_node = None
self.is_leaf = False
self.categorical = False
self.splitter = None
def build_tree(self, feats, labels):
'''Build tree recursively
'''
self.labels = labels
best_gain = 0
best_left_idxs = []
best_right_idxs = []
best_splitter = None
cur_imp = get_impurity(labels)
for col in range(len(feats[0])):
# Note: we assume all features are numerical instead of categorical
imp, splitter, left_idxs, right_idxs = \
get_best_split_along_column(feats, labels, col,
categorical=False)
gain = cur_imp - imp
if gain > best_gain:
best_gain = gain
best_left_idxs = left_idxs
best_right_idxs = right_idxs
best_splitter = {'col': col, 'val': splitter}
self.splitter = best_splitter
if self.splitter is None:
self.is_leaf = True
else:
self.left_node = TreeNode()
self.right_node = TreeNode()
self.left_node.build_tree(feats[best_left_idxs], labels[best_left_idxs])
self.right_node.build_tree(feats[best_right_idxs], labels[best_right_idxs])
return
def classify(self, feats):
'''Classify sample according to built tree
'''
if self.is_leaf is False and self.splitter is None:
raise Exception("Decision tree not built!")
if self.is_leaf:
return np.random.choice(self.labels)
else:
val = self.splitter['val']
col = self.splitter['col']
if self.categorical:
if feats[col] == val:
label = self.left_node.classify(feats)
else:
label = self.right_node.classify(feats)
else:
if feats[col] >= val:
label = self.left_node.classify(feats)
else:
label = self.right_node.classify(feats)
return label
def main():
'''Main
'''
global TRN_DATA_PATH, NUM_SAMPLES, NUM_FEATS
# no need to rescale for decision tree
feats, labels = ut.get_data_from_csv(TRN_DATA_PATH, rescale=False)
if NUM_SAMPLES < len(feats):
feats, labels = ut.sample(feats, labels, NUM_SAMPLES)
feats = feats.values
if NUM_FEATS < len(feats[0]):
idxs = np.random.choice(range(len(feats[0])), NUM_FEATS, replace=False)
feats = feats[:, idxs]
trn_feats, tst_feats, trn_labels, tst_labels = train_test_split(feats,
labels,
test_size=0.20,
stratify=labels)
# build tree
print("Building decision tree...")
decision_tree = TreeNode()
decision_tree.build_tree(trn_feats, trn_labels.values)
print("Done!")
print("Checking accuracy on training set...")
predictions = []
for sample in trn_feats:
result = decision_tree.classify(sample)
predictions.append(result)
# for checking only. must be 100% accuracy on training set
print("Training Set Results:\n", classification_report(trn_labels, predictions))
print("Using tree to predict labels...")
predictions = []
for sample in tst_feats:
result = decision_tree.classify(sample)
predictions.append(result)
print("Test Set Results:\n", classification_report(tst_labels, predictions))
visualize_data(pd.DataFrame(tst_feats), tst_labels, predictions)
# display confusion matrix
print("Plotting confusion matrix...")
plot_confusion_matrix(tst_labels, predictions, normalize=True)
plt.show()
return 0
if __name__ == "__main__":
main()
| 31.849785
| 87
| 0.597898
| 2,277
| 0.306832
| 0
| 0
| 0
| 0
| 0
| 0
| 1,392
| 0.187576
|
c44e49588a5ae8bdc21c7e3ab11388f043afd9f0
| 8,816
|
py
|
Python
|
codegen/codegen/fblas_routine.py
|
spcl/fblas
|
96425fbdbaeab6f43997d839836b8224a04f3b53
|
[
"BSD-3-Clause"
] | 68
|
2019-02-07T21:30:21.000Z
|
2022-02-16T20:09:27.000Z
|
codegen/codegen/fblas_routine.py
|
spcl/fblas
|
96425fbdbaeab6f43997d839836b8224a04f3b53
|
[
"BSD-3-Clause"
] | 2
|
2019-03-15T17:49:03.000Z
|
2019-07-24T14:05:35.000Z
|
codegen/codegen/fblas_routine.py
|
spcl/fblas
|
96425fbdbaeab6f43997d839836b8224a04f3b53
|
[
"BSD-3-Clause"
] | 25
|
2019-03-15T03:00:15.000Z
|
2021-08-04T10:21:43.000Z
|
"""
FBlas Routine class: it used to represent a routine definition, specified by the user using JSON file.
It is used by the Host and Module Codegen (specified by the _codegen variable). Accordingly,
some class members could be invalid.
"""
from codegen import fblas_types
from codegen import generator_definitions
class FBLASRoutine:
# name of the routine according to blas (without indication of the precision)
_blas_name = ""
# user name for the routine
_user_name = ""
# spatial parallelism (vectorization width)
_width = generator_definitions.DEFAULT_WIDTH
# data type used in routine
_type: fblas_types.RoutineType
_type_str: str
# if the routine has to use shift registers (e.g. double precision) or not
# and in case how big they should be
_uses_shift_registers = False
_size_shift_registers = 0
# The type of codegen:Host/Modules
_codegen = None
# inx/incy
_incx = 1
_incy = 1
# Level 2/3: tile sizes
_tile_n_size = generator_definitions.DEFAULT_TILE_SIZE
_tile_m_size = generator_definitions.DEFAULT_TILE_SIZE
# Matrix characteristics
_order = None
_diag = None
_transposeA = None
_transposeB = None
_side = None
_uplo = None
# input/output channels (useful for Module Codegen)
# these are instance member dictionaries "required_channel_name" -> "user_name"
_input_channels = None
_output_channels = None
# Tiles and element order (for level2/3 that works with matrices)
# The order is RowMajor if tiles/element are row streamed
# otherwise it is ColumnMajor
_tiles_A_order: fblas_types.FblasOrder = fblas_types.FblasOrder.FblasRowMajor
_elements_A_order: fblas_types.FblasOrder = fblas_types.FblasOrder.FblasRowMajor
# Indicates whether or not this routines has a 2D computatioal tile (e.g. GEMM)
_has_2D_computational_tile = False
# If yes, there are the two vectorization width
_width_x = 0
_width_y = 0
_tile_size = 0
_systolic = False
_vect_size = 0
def __init__(self, blas_name: str, user_name: str, type: fblas_types.RoutineType, platform: fblas_types.Platform, codegen: fblas_types.FblasCodegen):
self._blas_name = blas_name
self._user_name = user_name
self._type = type
self._type_str = fblas_types.ROUTINE_TYPE_TO_TYPE_STR[type]
self._platform = platform
self._codegen = codegen
self._width = generator_definitions.DEFAULT_WIDTH
# Declare all the instance variables
self._input_channels = {}
self._output_channels = {}
self._incx = 1
self._incy = 1
self._tile_n_size = generator_definitions.DEFAULT_TILE_SIZE
self._tile_m_size = generator_definitions.DEFAULT_TILE_SIZE
self._order = fblas_types.FblasOrder.FblasOrderUndef
self._diag = fblas_types.FblasDiag.FblasDiagUndef
self._transposeA = fblas_types.FblasTranspose.FblasTransUndef
self._transposeB = fblas_types.FblasTranspose.FblasTransUndef
self._side = fblas_types.FblasSide.FblasSideUndef
self._uplo = fblas_types.FblasUpLo.FblasUpLoUndef
if type == fblas_types.RoutineType.Double:
self._uses_shift_registers = True
self._size_shift_registers = fblas_types.SHIFT_REGISTER_SIZES[(type, platform)]
else:
self._uses_shift_registers = False
self._has_2D_computational_tile = False
self._width_x = self._width = generator_definitions.DEFAULT_2D_CTILE_WIDTH
self._width_y = self._width = generator_definitions.DEFAULT_2D_CTILE_WIDTH
self._tile_size = generator_definitions.DEFAULT_TILE_SIZE
self._systolic = False
self._vect_size = 4
def __str__(self):
return """Routine {} implements {} with type {}
Width: {} Incx: {} Incy: {}""".format(self._user_name, self._blas_name, self._type, self._width, self._incx, self._incy)
#Getter/setter
@property
def blas_name(self):
return self._blas_name
@property
def user_name(self):
return self._user_name
@property
def type(self):
return self._type
@property
def type_str(self):
return self._type_str
@property
def uses_shift_registers(self):
return self._uses_shift_registers
@uses_shift_registers.setter
def uses_shift_registers(self, value: bool):
#if the routine uses shift register, set the size
self._uses_shift_registers = value
if value:
self._size_shift_registers = fblas_types.SHIFT_REGISTER_SIZES[(self.type, self._platform)]
@property
def size_shift_registers(self):
return self._size_shift_registers
@property
def width(self):
return self._width
@width.setter
def width(self, width: int):
self._width = width
@property
def incx(self):
return self._incx
@incx.setter
def incx(self, incx: int):
self._incx = incx
@property
def incy(self):
return self._incy
@incy.setter
def incy(self, incy: int):
self._incy = incy
@property
def tile_n_size(self):
return self._tile_n_size
@tile_n_size.setter
def tile_n_size(self, tile_size: int):
self._tile_n_size = tile_size
@property
def tile_m_size(self):
return self._tile_m_size
@tile_m_size.setter
def tile_m_size(self, tile_size: int):
self._tile_m_size = tile_size
@property
def tile_size(self):
return self._tile_size
@tile_size.setter
def tile_size(self, tile_size: int):
self._tile_size = tile_size
@property
def order(self):
return self._order
@order.setter
def order(self, order: fblas_types.FblasOrder):
self._order = order
@property
def uplo(self):
return self._uplo
@uplo.setter
def uplo(self, uplo: fblas_types.FblasUpLo):
self._uplo = uplo
@property
def transposedA(self):
return self._transposeA
@transposedA.setter
def transposedA(self, trans: fblas_types.FblasTranspose):
self._transposeA = trans
@property
def transposedB(self):
return self._transposeB
@transposedB.setter
def transposedB(self, trans: fblas_types.FblasTranspose):
self._transposeB = trans
@property
def input_channels(self):
return self._input_channels
@property
def output_channels(self):
return self._output_channels
@property
def tiles_A_order(self):
return self._tiles_A_order
@tiles_A_order.setter
def tiles_A_order(self, order: fblas_types.FblasOrder):
self._tiles_A_order = order
@property
def elements_A_order(self):
return self._elements_A_order
@elements_A_order.setter
def elements_A_order(self, order : fblas_types.FblasOrder):
self._elements_A_order = order
@property
def has_2D_computational_tile(self):
return self._has_2D_computational_tile
@has_2D_computational_tile.setter
def has_2D_computational_tile(self, value: bool):
self._has_2D_computational_tile = value
@property
def width_x(self):
return self._width_x
@width_x.setter
def width_x(self, width: int):
self._width_x = width
@property
def width_y(self):
return self._width_y
@width_y.setter
def width_y(self, width: int):
self._width_y = width
@property
def systolic(self):
return self._systolic
@systolic.setter
def systolic(self, value: bool):
self._systolic = value
@property
def vect_size(self):
return self._vect_size
@vect_size.setter
def vect_size(self, value: int):
self._vect_size = value
def are_tiles_A_rowstreamed(self):
"""
:return: True if the tiles of A are rowstreamed
"""
return self._tiles_A_order == fblas_types.FblasOrder.FblasRowMajor
def are_elements_A_rowstreamed(self):
"""
:return: True if the elements of A are rowstreamed
"""
return self._elements_A_order == fblas_types.FblasOrder.FblasRowMajor
def add_input_channel(self, routine_channel_name, user_name):
'''
Add the channel to the dictionary of input channels
If already present, it will be overwritten
'''
self._input_channels[routine_channel_name] = user_name
def add_output_channel(self, routine_channel_name, user_name):
'''
Add the channel to the dictionary of input channels
If already present, it will be overwritten
'''
self._output_channels[routine_channel_name] = user_name
| 27.810726
| 153
| 0.678993
| 8,484
| 0.962341
| 0
| 0
| 3,613
| 0.409823
| 0
| 0
| 1,614
| 0.183076
|
c450bd09aaf8a942b6ab2b24bef1d3ae022e6398
| 485
|
py
|
Python
|
lib/prefab/errors.py
|
lexsca/docker-prefab
|
bb34f1382e307346d0a0a95bde56861c28e56ec9
|
[
"MIT"
] | 1
|
2021-04-06T21:37:59.000Z
|
2021-04-06T21:37:59.000Z
|
lib/prefab/errors.py
|
lexsca/docker-prefab
|
bb34f1382e307346d0a0a95bde56861c28e56ec9
|
[
"MIT"
] | 1
|
2020-11-03T01:12:06.000Z
|
2020-11-03T03:55:28.000Z
|
lib/prefab/errors.py
|
lexsca/prefab
|
bb34f1382e307346d0a0a95bde56861c28e56ec9
|
[
"MIT"
] | null | null | null |
class PrefabError(Exception):
pass
class HashAlgorithmNotFound(PrefabError):
pass
class ImageAccessError(PrefabError):
pass
class ImageBuildError(PrefabError):
pass
class ImageNotFoundError(PrefabError):
pass
class ImagePushError(PrefabError):
pass
class ImageValidationError(PrefabError):
pass
class InvalidConfigError(PrefabError):
pass
class TargetCyclicError(PrefabError):
pass
class TargetNotFoundError(PrefabError):
pass
| 12.435897
| 41
| 0.756701
| 457
| 0.942268
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
c451451a751c9fd2575b893cf89c5f54e2fd8166
| 840
|
py
|
Python
|
test_hoyolab.py
|
c3kay/hoyolab-json-feed
|
43839194a253271c9c2fcbb564eb4b3e6179c01e
|
[
"Unlicense"
] | 1
|
2021-09-17T12:40:40.000Z
|
2021-09-17T12:40:40.000Z
|
test_hoyolab.py
|
c3kay/hoyolab-json-feed
|
43839194a253271c9c2fcbb564eb4b3e6179c01e
|
[
"Unlicense"
] | null | null | null |
test_hoyolab.py
|
c3kay/hoyolab-json-feed
|
43839194a253271c9c2fcbb564eb4b3e6179c01e
|
[
"Unlicense"
] | null | null | null |
from hoyolab import main
from os import environ
from os.path import exists
import atoma
def init_environ(d):
environ['HOYOLAB_JSON_PATH'] = '{}/hoyolab.json'.format(d)
environ['HOYOLAB_ATOM_PATH'] = '{}/hoyolab.xml'.format(d)
environ['HOYOLAB_JSON_URL'] = 'hoyolab.json'
environ['HOYOLAB_ATOM_URL'] = 'hoyolab.xml'
environ['HOYOLAB_ENTRIES'] = '1'
def test_feeds(tmpdir):
init_environ(tmpdir)
json_path = environ['HOYOLAB_JSON_PATH']
atom_path = environ['HOYOLAB_ATOM_PATH']
num_entries = int(environ['HOYOLAB_ENTRIES']) * 3
main()
assert exists(json_path)
assert exists(atom_path)
json_feed = atoma.parse_json_feed_file(json_path)
assert len(json_feed.items) == num_entries
atom_feed = atoma.parse_atom_file(atom_path)
assert len(atom_feed.entries) == num_entries
| 24.705882
| 62
| 0.713095
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 209
| 0.24881
|
c453263bed6e1f5fabe0695622acd9caad9d7447
| 194,112
|
py
|
Python
|
thirdweb/abi/nft_collection/__init__.py
|
princetonwong/python-sdk
|
f35181d97620e29d055498fca75f3702f3bb2449
|
[
"Apache-2.0"
] | 1
|
2022-02-18T16:59:12.000Z
|
2022-02-18T16:59:12.000Z
|
thirdweb/abi/nft_collection/__init__.py
|
princetonwong/python-sdk
|
f35181d97620e29d055498fca75f3702f3bb2449
|
[
"Apache-2.0"
] | null | null | null |
thirdweb/abi/nft_collection/__init__.py
|
princetonwong/python-sdk
|
f35181d97620e29d055498fca75f3702f3bb2449
|
[
"Apache-2.0"
] | null | null | null |
"""Generated wrapper for NFTCollection Solidity contract."""
# pylint: disable=too-many-arguments
import json
from typing import ( # pylint: disable=unused-import
Any,
List,
Optional,
Tuple,
Union,
)
from eth_utils import to_checksum_address
from mypy_extensions import TypedDict # pylint: disable=unused-import
from hexbytes import HexBytes
from thirdweb_web3 import Web3
from thirdweb_web3.contract import ContractFunction
from thirdweb_web3.datastructures import AttributeDict
from thirdweb_web3.providers.base import BaseProvider
from zero_ex.contract_wrappers.bases import ContractMethod, Validator
from zero_ex.contract_wrappers.tx_params import TxParams
# Try to import a custom validator class definition; if there isn't one,
# declare one that we can instantiate for the default argument to the
# constructor for NFTCollection below.
try:
# both mypy and pylint complain about what we're doing here, but this
# works just fine, so their messages have been disabled here.
from . import ( # type: ignore # pylint: disable=import-self
NFTCollectionValidator,
)
except ImportError:
class NFTCollectionValidator(Validator): # type: ignore
"""No-op input validator."""
try:
from .middleware import MIDDLEWARE # type: ignore
except ImportError:
pass
class DefaultAdminRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the DEFAULT_ADMIN_ROLE method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> Union[bytes, str]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return Union[bytes, str](returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class MinterRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the MINTER_ROLE method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> Union[bytes, str]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return Union[bytes, str](returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class PauserRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the PAUSER_ROLE method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> Union[bytes, str]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return Union[bytes, str](returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class TransferRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the TRANSFER_ROLE method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> Union[bytes, str]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return Union[bytes, str](returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class ContractUri_Method(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the _contractURI method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> str:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return str(returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class BalanceOfMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the balanceOf method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, account: str, _id: int):
"""Validate the inputs to the balanceOf method."""
self.validator.assert_valid(
method_name="balanceOf",
parameter_name="account",
argument_value=account,
)
account = self.validate_and_checksum_address(account)
self.validator.assert_valid(
method_name="balanceOf",
parameter_name="id",
argument_value=_id,
)
# safeguard against fractional inputs
_id = int(_id)
return (account, _id)
def call(
self, account: str, _id: int, tx_params: Optional[TxParams] = None
) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(account, _id) = self.validate_and_normalize_inputs(account, _id)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(account, _id).call(
tx_params.as_dict()
)
return int(returned)
def send_transaction(
self, account: str, _id: int, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(account, _id) = self.validate_and_normalize_inputs(account, _id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, _id).transact(
tx_params.as_dict()
)
def build_transaction(
self, account: str, _id: int, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(account, _id) = self.validate_and_normalize_inputs(account, _id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, _id).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, account: str, _id: int, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(account, _id) = self.validate_and_normalize_inputs(account, _id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, _id).estimateGas(
tx_params.as_dict()
)
class BalanceOfBatchMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the balanceOfBatch method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self, accounts: List[str], ids: List[int]
):
"""Validate the inputs to the balanceOfBatch method."""
self.validator.assert_valid(
method_name="balanceOfBatch",
parameter_name="accounts",
argument_value=accounts,
)
self.validator.assert_valid(
method_name="balanceOfBatch",
parameter_name="ids",
argument_value=ids,
)
return (accounts, ids)
def call(
self,
accounts: List[str],
ids: List[int],
tx_params: Optional[TxParams] = None,
) -> List[int]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(accounts, ids) = self.validate_and_normalize_inputs(accounts, ids)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(accounts, ids).call(
tx_params.as_dict()
)
return [int(element) for element in returned]
def send_transaction(
self,
accounts: List[str],
ids: List[int],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(accounts, ids) = self.validate_and_normalize_inputs(accounts, ids)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(accounts, ids).transact(
tx_params.as_dict()
)
def build_transaction(
self,
accounts: List[str],
ids: List[int],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(accounts, ids) = self.validate_and_normalize_inputs(accounts, ids)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(accounts, ids).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
accounts: List[str],
ids: List[int],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(accounts, ids) = self.validate_and_normalize_inputs(accounts, ids)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(accounts, ids).estimateGas(
tx_params.as_dict()
)
class BurnMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the burn method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self, account: str, _id: int, value: int
):
"""Validate the inputs to the burn method."""
self.validator.assert_valid(
method_name="burn",
parameter_name="account",
argument_value=account,
)
account = self.validate_and_checksum_address(account)
self.validator.assert_valid(
method_name="burn",
parameter_name="id",
argument_value=_id,
)
# safeguard against fractional inputs
_id = int(_id)
self.validator.assert_valid(
method_name="burn",
parameter_name="value",
argument_value=value,
)
# safeguard against fractional inputs
value = int(value)
return (account, _id, value)
def call(
self,
account: str,
_id: int,
value: int,
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(account, _id, value) = self.validate_and_normalize_inputs(
account, _id, value
)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(account, _id, value).call(tx_params.as_dict())
def send_transaction(
self,
account: str,
_id: int,
value: int,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(account, _id, value) = self.validate_and_normalize_inputs(
account, _id, value
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, _id, value).transact(
tx_params.as_dict()
)
def build_transaction(
self,
account: str,
_id: int,
value: int,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(account, _id, value) = self.validate_and_normalize_inputs(
account, _id, value
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, _id, value).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
account: str,
_id: int,
value: int,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(account, _id, value) = self.validate_and_normalize_inputs(
account, _id, value
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, _id, value).estimateGas(
tx_params.as_dict()
)
class BurnBatchMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the burnBatch method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self, account: str, ids: List[int], values: List[int]
):
"""Validate the inputs to the burnBatch method."""
self.validator.assert_valid(
method_name="burnBatch",
parameter_name="account",
argument_value=account,
)
account = self.validate_and_checksum_address(account)
self.validator.assert_valid(
method_name="burnBatch",
parameter_name="ids",
argument_value=ids,
)
self.validator.assert_valid(
method_name="burnBatch",
parameter_name="values",
argument_value=values,
)
return (account, ids, values)
def call(
self,
account: str,
ids: List[int],
values: List[int],
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(account, ids, values) = self.validate_and_normalize_inputs(
account, ids, values
)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(account, ids, values).call(tx_params.as_dict())
def send_transaction(
self,
account: str,
ids: List[int],
values: List[int],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(account, ids, values) = self.validate_and_normalize_inputs(
account, ids, values
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, ids, values).transact(
tx_params.as_dict()
)
def build_transaction(
self,
account: str,
ids: List[int],
values: List[int],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(account, ids, values) = self.validate_and_normalize_inputs(
account, ids, values
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, ids, values).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
account: str,
ids: List[int],
values: List[int],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(account, ids, values) = self.validate_and_normalize_inputs(
account, ids, values
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, ids, values).estimateGas(
tx_params.as_dict()
)
class ContractUriMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the contractURI method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> str:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return str(returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class CreateNativeTokensMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the createNativeTokens method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self,
to: str,
nft_ur_is: List[str],
nft_supplies: List[int],
data: Union[bytes, str],
):
"""Validate the inputs to the createNativeTokens method."""
self.validator.assert_valid(
method_name="createNativeTokens",
parameter_name="to",
argument_value=to,
)
to = self.validate_and_checksum_address(to)
self.validator.assert_valid(
method_name="createNativeTokens",
parameter_name="_nftURIs",
argument_value=nft_ur_is,
)
self.validator.assert_valid(
method_name="createNativeTokens",
parameter_name="_nftSupplies",
argument_value=nft_supplies,
)
self.validator.assert_valid(
method_name="createNativeTokens",
parameter_name="data",
argument_value=data,
)
return (to, nft_ur_is, nft_supplies, data)
def call(
self,
to: str,
nft_ur_is: List[str],
nft_supplies: List[int],
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> List[int]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(
to,
nft_ur_is,
nft_supplies,
data,
) = self.validate_and_normalize_inputs(
to, nft_ur_is, nft_supplies, data
)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(
to, nft_ur_is, nft_supplies, data
).call(tx_params.as_dict())
return [int(element) for element in returned]
def send_transaction(
self,
to: str,
nft_ur_is: List[str],
nft_supplies: List[int],
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(
to,
nft_ur_is,
nft_supplies,
data,
) = self.validate_and_normalize_inputs(
to, nft_ur_is, nft_supplies, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
to, nft_ur_is, nft_supplies, data
).transact(tx_params.as_dict())
def build_transaction(
self,
to: str,
nft_ur_is: List[str],
nft_supplies: List[int],
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(
to,
nft_ur_is,
nft_supplies,
data,
) = self.validate_and_normalize_inputs(
to, nft_ur_is, nft_supplies, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
to, nft_ur_is, nft_supplies, data
).buildTransaction(tx_params.as_dict())
def estimate_gas(
self,
to: str,
nft_ur_is: List[str],
nft_supplies: List[int],
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(
to,
nft_ur_is,
nft_supplies,
data,
) = self.validate_and_normalize_inputs(
to, nft_ur_is, nft_supplies, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
to, nft_ur_is, nft_supplies, data
).estimateGas(tx_params.as_dict())
class CreatorMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the creator method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, nft_id: int):
"""Validate the inputs to the creator method."""
self.validator.assert_valid(
method_name="creator",
parameter_name="_nftId",
argument_value=nft_id,
)
# safeguard against fractional inputs
nft_id = int(nft_id)
return nft_id
def call(self, nft_id: int, tx_params: Optional[TxParams] = None) -> str:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(nft_id) = self.validate_and_normalize_inputs(nft_id)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(nft_id).call(tx_params.as_dict())
return str(returned)
def send_transaction(
self, nft_id: int, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(nft_id) = self.validate_and_normalize_inputs(nft_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(nft_id).transact(tx_params.as_dict())
def build_transaction(
self, nft_id: int, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(nft_id) = self.validate_and_normalize_inputs(nft_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(nft_id).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, nft_id: int, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(nft_id) = self.validate_and_normalize_inputs(nft_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(nft_id).estimateGas(tx_params.as_dict())
class Erc20WrappedTokensMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the erc20WrappedTokens method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, index_0: int):
"""Validate the inputs to the erc20WrappedTokens method."""
self.validator.assert_valid(
method_name="erc20WrappedTokens",
parameter_name="index_0",
argument_value=index_0,
)
# safeguard against fractional inputs
index_0 = int(index_0)
return index_0
def call(
self, index_0: int, tx_params: Optional[TxParams] = None
) -> Tuple[str, int, int]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(index_0) = self.validate_and_normalize_inputs(index_0)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(index_0).call(tx_params.as_dict())
return (
returned[0],
returned[1],
returned[2],
)
def send_transaction(
self, index_0: int, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(index_0) = self.validate_and_normalize_inputs(index_0)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(index_0).transact(tx_params.as_dict())
def build_transaction(
self, index_0: int, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(index_0) = self.validate_and_normalize_inputs(index_0)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(index_0).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, index_0: int, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(index_0) = self.validate_and_normalize_inputs(index_0)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(index_0).estimateGas(
tx_params.as_dict()
)
class Erc721WrappedTokensMethod(
ContractMethod
): # pylint: disable=invalid-name
"""Various interfaces to the erc721WrappedTokens method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, index_0: int):
"""Validate the inputs to the erc721WrappedTokens method."""
self.validator.assert_valid(
method_name="erc721WrappedTokens",
parameter_name="index_0",
argument_value=index_0,
)
# safeguard against fractional inputs
index_0 = int(index_0)
return index_0
def call(
self, index_0: int, tx_params: Optional[TxParams] = None
) -> Tuple[str, int]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(index_0) = self.validate_and_normalize_inputs(index_0)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(index_0).call(tx_params.as_dict())
return (
returned[0],
returned[1],
)
def send_transaction(
self, index_0: int, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(index_0) = self.validate_and_normalize_inputs(index_0)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(index_0).transact(tx_params.as_dict())
def build_transaction(
self, index_0: int, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(index_0) = self.validate_and_normalize_inputs(index_0)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(index_0).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, index_0: int, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(index_0) = self.validate_and_normalize_inputs(index_0)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(index_0).estimateGas(
tx_params.as_dict()
)
class GetRoleAdminMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the getRoleAdmin method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, role: Union[bytes, str]):
"""Validate the inputs to the getRoleAdmin method."""
self.validator.assert_valid(
method_name="getRoleAdmin",
parameter_name="role",
argument_value=role,
)
return role
def call(
self, role: Union[bytes, str], tx_params: Optional[TxParams] = None
) -> Union[bytes, str]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(role).call(tx_params.as_dict())
return Union[bytes, str](returned)
def send_transaction(
self, role: Union[bytes, str], tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role).transact(tx_params.as_dict())
def build_transaction(
self, role: Union[bytes, str], tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, role: Union[bytes, str], tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role).estimateGas(tx_params.as_dict())
class GetRoleMemberMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the getRoleMember method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self, role: Union[bytes, str], index: int
):
"""Validate the inputs to the getRoleMember method."""
self.validator.assert_valid(
method_name="getRoleMember",
parameter_name="role",
argument_value=role,
)
self.validator.assert_valid(
method_name="getRoleMember",
parameter_name="index",
argument_value=index,
)
# safeguard against fractional inputs
index = int(index)
return (role, index)
def call(
self,
role: Union[bytes, str],
index: int,
tx_params: Optional[TxParams] = None,
) -> str:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role, index) = self.validate_and_normalize_inputs(role, index)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(role, index).call(
tx_params.as_dict()
)
return str(returned)
def send_transaction(
self,
role: Union[bytes, str],
index: int,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role, index) = self.validate_and_normalize_inputs(role, index)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, index).transact(
tx_params.as_dict()
)
def build_transaction(
self,
role: Union[bytes, str],
index: int,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(role, index) = self.validate_and_normalize_inputs(role, index)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, index).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
role: Union[bytes, str],
index: int,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(role, index) = self.validate_and_normalize_inputs(role, index)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, index).estimateGas(
tx_params.as_dict()
)
class GetRoleMemberCountMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the getRoleMemberCount method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, role: Union[bytes, str]):
"""Validate the inputs to the getRoleMemberCount method."""
self.validator.assert_valid(
method_name="getRoleMemberCount",
parameter_name="role",
argument_value=role,
)
return role
def call(
self, role: Union[bytes, str], tx_params: Optional[TxParams] = None
) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(role).call(tx_params.as_dict())
return int(returned)
def send_transaction(
self, role: Union[bytes, str], tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role).transact(tx_params.as_dict())
def build_transaction(
self, role: Union[bytes, str], tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, role: Union[bytes, str], tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(role) = self.validate_and_normalize_inputs(role)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role).estimateGas(tx_params.as_dict())
class GrantRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the grantRole method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self, role: Union[bytes, str], account: str
):
"""Validate the inputs to the grantRole method."""
self.validator.assert_valid(
method_name="grantRole",
parameter_name="role",
argument_value=role,
)
self.validator.assert_valid(
method_name="grantRole",
parameter_name="account",
argument_value=account,
)
account = self.validate_and_checksum_address(account)
return (role, account)
def call(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(role, account).call(tx_params.as_dict())
def send_transaction(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).transact(
tx_params.as_dict()
)
def build_transaction(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).estimateGas(
tx_params.as_dict()
)
class HasRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the hasRole method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self, role: Union[bytes, str], account: str
):
"""Validate the inputs to the hasRole method."""
self.validator.assert_valid(
method_name="hasRole",
parameter_name="role",
argument_value=role,
)
self.validator.assert_valid(
method_name="hasRole",
parameter_name="account",
argument_value=account,
)
account = self.validate_and_checksum_address(account)
return (role, account)
def call(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(role, account).call(
tx_params.as_dict()
)
return bool(returned)
def send_transaction(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).transact(
tx_params.as_dict()
)
def build_transaction(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).estimateGas(
tx_params.as_dict()
)
class IsApprovedForAllMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the isApprovedForAll method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, account: str, operator: str):
"""Validate the inputs to the isApprovedForAll method."""
self.validator.assert_valid(
method_name="isApprovedForAll",
parameter_name="account",
argument_value=account,
)
account = self.validate_and_checksum_address(account)
self.validator.assert_valid(
method_name="isApprovedForAll",
parameter_name="operator",
argument_value=operator,
)
operator = self.validate_and_checksum_address(operator)
return (account, operator)
def call(
self, account: str, operator: str, tx_params: Optional[TxParams] = None
) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(account, operator) = self.validate_and_normalize_inputs(
account, operator
)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(account, operator).call(
tx_params.as_dict()
)
return bool(returned)
def send_transaction(
self, account: str, operator: str, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(account, operator) = self.validate_and_normalize_inputs(
account, operator
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, operator).transact(
tx_params.as_dict()
)
def build_transaction(
self, account: str, operator: str, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(account, operator) = self.validate_and_normalize_inputs(
account, operator
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, operator).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, account: str, operator: str, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(account, operator) = self.validate_and_normalize_inputs(
account, operator
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(account, operator).estimateGas(
tx_params.as_dict()
)
class IsTrustedForwarderMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the isTrustedForwarder method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, forwarder: str):
"""Validate the inputs to the isTrustedForwarder method."""
self.validator.assert_valid(
method_name="isTrustedForwarder",
parameter_name="forwarder",
argument_value=forwarder,
)
forwarder = self.validate_and_checksum_address(forwarder)
return forwarder
def call(
self, forwarder: str, tx_params: Optional[TxParams] = None
) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(forwarder) = self.validate_and_normalize_inputs(forwarder)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(forwarder).call(tx_params.as_dict())
return bool(returned)
def send_transaction(
self, forwarder: str, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(forwarder) = self.validate_and_normalize_inputs(forwarder)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(forwarder).transact(tx_params.as_dict())
def build_transaction(
self, forwarder: str, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(forwarder) = self.validate_and_normalize_inputs(forwarder)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(forwarder).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, forwarder: str, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(forwarder) = self.validate_and_normalize_inputs(forwarder)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(forwarder).estimateGas(
tx_params.as_dict()
)
class MintMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the mint method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self, to: str, _id: int, amount: int, data: Union[bytes, str]
):
"""Validate the inputs to the mint method."""
self.validator.assert_valid(
method_name="mint",
parameter_name="to",
argument_value=to,
)
to = self.validate_and_checksum_address(to)
self.validator.assert_valid(
method_name="mint",
parameter_name="id",
argument_value=_id,
)
# safeguard against fractional inputs
_id = int(_id)
self.validator.assert_valid(
method_name="mint",
parameter_name="amount",
argument_value=amount,
)
# safeguard against fractional inputs
amount = int(amount)
self.validator.assert_valid(
method_name="mint",
parameter_name="data",
argument_value=data,
)
return (to, _id, amount, data)
def call(
self,
to: str,
_id: int,
amount: int,
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(to, _id, amount, data) = self.validate_and_normalize_inputs(
to, _id, amount, data
)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(to, _id, amount, data).call(
tx_params.as_dict()
)
def send_transaction(
self,
to: str,
_id: int,
amount: int,
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(to, _id, amount, data) = self.validate_and_normalize_inputs(
to, _id, amount, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(to, _id, amount, data).transact(
tx_params.as_dict()
)
def build_transaction(
self,
to: str,
_id: int,
amount: int,
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(to, _id, amount, data) = self.validate_and_normalize_inputs(
to, _id, amount, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(to, _id, amount, data).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
to: str,
_id: int,
amount: int,
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(to, _id, amount, data) = self.validate_and_normalize_inputs(
to, _id, amount, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(to, _id, amount, data).estimateGas(
tx_params.as_dict()
)
class MintBatchMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the mintBatch method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self,
to: str,
ids: List[int],
amounts: List[int],
data: Union[bytes, str],
):
"""Validate the inputs to the mintBatch method."""
self.validator.assert_valid(
method_name="mintBatch",
parameter_name="to",
argument_value=to,
)
to = self.validate_and_checksum_address(to)
self.validator.assert_valid(
method_name="mintBatch",
parameter_name="ids",
argument_value=ids,
)
self.validator.assert_valid(
method_name="mintBatch",
parameter_name="amounts",
argument_value=amounts,
)
self.validator.assert_valid(
method_name="mintBatch",
parameter_name="data",
argument_value=data,
)
return (to, ids, amounts, data)
def call(
self,
to: str,
ids: List[int],
amounts: List[int],
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(to, ids, amounts, data) = self.validate_and_normalize_inputs(
to, ids, amounts, data
)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(to, ids, amounts, data).call(
tx_params.as_dict()
)
def send_transaction(
self,
to: str,
ids: List[int],
amounts: List[int],
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(to, ids, amounts, data) = self.validate_and_normalize_inputs(
to, ids, amounts, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(to, ids, amounts, data).transact(
tx_params.as_dict()
)
def build_transaction(
self,
to: str,
ids: List[int],
amounts: List[int],
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(to, ids, amounts, data) = self.validate_and_normalize_inputs(
to, ids, amounts, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
to, ids, amounts, data
).buildTransaction(tx_params.as_dict())
def estimate_gas(
self,
to: str,
ids: List[int],
amounts: List[int],
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(to, ids, amounts, data) = self.validate_and_normalize_inputs(
to, ids, amounts, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(to, ids, amounts, data).estimateGas(
tx_params.as_dict()
)
class MulticallMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the multicall method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, data: List[Union[bytes, str]]):
"""Validate the inputs to the multicall method."""
self.validator.assert_valid(
method_name="multicall",
parameter_name="data",
argument_value=data,
)
return data
def call(
self,
data: List[Union[bytes, str]],
tx_params: Optional[TxParams] = None,
) -> List[Union[bytes, str]]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(data) = self.validate_and_normalize_inputs(data)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(data).call(tx_params.as_dict())
return [Union[bytes, str](element) for element in returned]
def send_transaction(
self,
data: List[Union[bytes, str]],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(data) = self.validate_and_normalize_inputs(data)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(data).transact(tx_params.as_dict())
def build_transaction(
self,
data: List[Union[bytes, str]],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(data) = self.validate_and_normalize_inputs(data)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(data).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
data: List[Union[bytes, str]],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(data) = self.validate_and_normalize_inputs(data)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(data).estimateGas(tx_params.as_dict())
class NextTokenIdMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the nextTokenId method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return int(returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class OnErc1155BatchReceivedMethod(
ContractMethod
): # pylint: disable=invalid-name
"""Various interfaces to the onERC1155BatchReceived method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self,
index_0: str,
index_1: str,
index_2: List[int],
index_3: List[int],
index_4: Union[bytes, str],
):
"""Validate the inputs to the onERC1155BatchReceived method."""
self.validator.assert_valid(
method_name="onERC1155BatchReceived",
parameter_name="index_0",
argument_value=index_0,
)
index_0 = self.validate_and_checksum_address(index_0)
self.validator.assert_valid(
method_name="onERC1155BatchReceived",
parameter_name="index_1",
argument_value=index_1,
)
index_1 = self.validate_and_checksum_address(index_1)
self.validator.assert_valid(
method_name="onERC1155BatchReceived",
parameter_name="index_2",
argument_value=index_2,
)
self.validator.assert_valid(
method_name="onERC1155BatchReceived",
parameter_name="index_3",
argument_value=index_3,
)
self.validator.assert_valid(
method_name="onERC1155BatchReceived",
parameter_name="index_4",
argument_value=index_4,
)
return (index_0, index_1, index_2, index_3, index_4)
def call(
self,
index_0: str,
index_1: str,
index_2: List[int],
index_3: List[int],
index_4: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Union[bytes, str]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(
index_0,
index_1,
index_2,
index_3,
index_4,
) = self.validate_and_normalize_inputs(
index_0, index_1, index_2, index_3, index_4
)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(
index_0, index_1, index_2, index_3, index_4
).call(tx_params.as_dict())
return Union[bytes, str](returned)
def send_transaction(
self,
index_0: str,
index_1: str,
index_2: List[int],
index_3: List[int],
index_4: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(
index_0,
index_1,
index_2,
index_3,
index_4,
) = self.validate_and_normalize_inputs(
index_0, index_1, index_2, index_3, index_4
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
index_0, index_1, index_2, index_3, index_4
).transact(tx_params.as_dict())
def build_transaction(
self,
index_0: str,
index_1: str,
index_2: List[int],
index_3: List[int],
index_4: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(
index_0,
index_1,
index_2,
index_3,
index_4,
) = self.validate_and_normalize_inputs(
index_0, index_1, index_2, index_3, index_4
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
index_0, index_1, index_2, index_3, index_4
).buildTransaction(tx_params.as_dict())
def estimate_gas(
self,
index_0: str,
index_1: str,
index_2: List[int],
index_3: List[int],
index_4: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(
index_0,
index_1,
index_2,
index_3,
index_4,
) = self.validate_and_normalize_inputs(
index_0, index_1, index_2, index_3, index_4
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
index_0, index_1, index_2, index_3, index_4
).estimateGas(tx_params.as_dict())
class OnErc1155ReceivedMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the onERC1155Received method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self,
index_0: str,
index_1: str,
index_2: int,
index_3: int,
index_4: Union[bytes, str],
):
"""Validate the inputs to the onERC1155Received method."""
self.validator.assert_valid(
method_name="onERC1155Received",
parameter_name="index_0",
argument_value=index_0,
)
index_0 = self.validate_and_checksum_address(index_0)
self.validator.assert_valid(
method_name="onERC1155Received",
parameter_name="index_1",
argument_value=index_1,
)
index_1 = self.validate_and_checksum_address(index_1)
self.validator.assert_valid(
method_name="onERC1155Received",
parameter_name="index_2",
argument_value=index_2,
)
# safeguard against fractional inputs
index_2 = int(index_2)
self.validator.assert_valid(
method_name="onERC1155Received",
parameter_name="index_3",
argument_value=index_3,
)
# safeguard against fractional inputs
index_3 = int(index_3)
self.validator.assert_valid(
method_name="onERC1155Received",
parameter_name="index_4",
argument_value=index_4,
)
return (index_0, index_1, index_2, index_3, index_4)
def call(
self,
index_0: str,
index_1: str,
index_2: int,
index_3: int,
index_4: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Union[bytes, str]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(
index_0,
index_1,
index_2,
index_3,
index_4,
) = self.validate_and_normalize_inputs(
index_0, index_1, index_2, index_3, index_4
)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(
index_0, index_1, index_2, index_3, index_4
).call(tx_params.as_dict())
return Union[bytes, str](returned)
def send_transaction(
self,
index_0: str,
index_1: str,
index_2: int,
index_3: int,
index_4: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(
index_0,
index_1,
index_2,
index_3,
index_4,
) = self.validate_and_normalize_inputs(
index_0, index_1, index_2, index_3, index_4
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
index_0, index_1, index_2, index_3, index_4
).transact(tx_params.as_dict())
def build_transaction(
self,
index_0: str,
index_1: str,
index_2: int,
index_3: int,
index_4: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(
index_0,
index_1,
index_2,
index_3,
index_4,
) = self.validate_and_normalize_inputs(
index_0, index_1, index_2, index_3, index_4
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
index_0, index_1, index_2, index_3, index_4
).buildTransaction(tx_params.as_dict())
def estimate_gas(
self,
index_0: str,
index_1: str,
index_2: int,
index_3: int,
index_4: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(
index_0,
index_1,
index_2,
index_3,
index_4,
) = self.validate_and_normalize_inputs(
index_0, index_1, index_2, index_3, index_4
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
index_0, index_1, index_2, index_3, index_4
).estimateGas(tx_params.as_dict())
class OnErc721ReceivedMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the onERC721Received method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self,
index_0: str,
index_1: str,
index_2: int,
index_3: Union[bytes, str],
):
"""Validate the inputs to the onERC721Received method."""
self.validator.assert_valid(
method_name="onERC721Received",
parameter_name="index_0",
argument_value=index_0,
)
index_0 = self.validate_and_checksum_address(index_0)
self.validator.assert_valid(
method_name="onERC721Received",
parameter_name="index_1",
argument_value=index_1,
)
index_1 = self.validate_and_checksum_address(index_1)
self.validator.assert_valid(
method_name="onERC721Received",
parameter_name="index_2",
argument_value=index_2,
)
# safeguard against fractional inputs
index_2 = int(index_2)
self.validator.assert_valid(
method_name="onERC721Received",
parameter_name="index_3",
argument_value=index_3,
)
return (index_0, index_1, index_2, index_3)
def call(
self,
index_0: str,
index_1: str,
index_2: int,
index_3: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Union[bytes, str]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(
index_0,
index_1,
index_2,
index_3,
) = self.validate_and_normalize_inputs(
index_0, index_1, index_2, index_3
)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(
index_0, index_1, index_2, index_3
).call(tx_params.as_dict())
return Union[bytes, str](returned)
def send_transaction(
self,
index_0: str,
index_1: str,
index_2: int,
index_3: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(
index_0,
index_1,
index_2,
index_3,
) = self.validate_and_normalize_inputs(
index_0, index_1, index_2, index_3
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
index_0, index_1, index_2, index_3
).transact(tx_params.as_dict())
def build_transaction(
self,
index_0: str,
index_1: str,
index_2: int,
index_3: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(
index_0,
index_1,
index_2,
index_3,
) = self.validate_and_normalize_inputs(
index_0, index_1, index_2, index_3
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
index_0, index_1, index_2, index_3
).buildTransaction(tx_params.as_dict())
def estimate_gas(
self,
index_0: str,
index_1: str,
index_2: int,
index_3: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(
index_0,
index_1,
index_2,
index_3,
) = self.validate_and_normalize_inputs(
index_0, index_1, index_2, index_3
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
index_0, index_1, index_2, index_3
).estimateGas(tx_params.as_dict())
class PauseMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the pause method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method().call(tx_params.as_dict())
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class PausedMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the paused method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return bool(returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class RedeemErc20Method(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the redeemERC20 method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, nft_id: int, amount: int):
"""Validate the inputs to the redeemERC20 method."""
self.validator.assert_valid(
method_name="redeemERC20",
parameter_name="_nftId",
argument_value=nft_id,
)
# safeguard against fractional inputs
nft_id = int(nft_id)
self.validator.assert_valid(
method_name="redeemERC20",
parameter_name="_amount",
argument_value=amount,
)
# safeguard against fractional inputs
amount = int(amount)
return (nft_id, amount)
def call(
self, nft_id: int, amount: int, tx_params: Optional[TxParams] = None
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(nft_id, amount) = self.validate_and_normalize_inputs(nft_id, amount)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(nft_id, amount).call(tx_params.as_dict())
def send_transaction(
self, nft_id: int, amount: int, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(nft_id, amount) = self.validate_and_normalize_inputs(nft_id, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(nft_id, amount).transact(
tx_params.as_dict()
)
def build_transaction(
self, nft_id: int, amount: int, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(nft_id, amount) = self.validate_and_normalize_inputs(nft_id, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(nft_id, amount).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, nft_id: int, amount: int, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(nft_id, amount) = self.validate_and_normalize_inputs(nft_id, amount)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(nft_id, amount).estimateGas(
tx_params.as_dict()
)
class RedeemErc721Method(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the redeemERC721 method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, nft_id: int):
"""Validate the inputs to the redeemERC721 method."""
self.validator.assert_valid(
method_name="redeemERC721",
parameter_name="_nftId",
argument_value=nft_id,
)
# safeguard against fractional inputs
nft_id = int(nft_id)
return nft_id
def call(self, nft_id: int, tx_params: Optional[TxParams] = None) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(nft_id) = self.validate_and_normalize_inputs(nft_id)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(nft_id).call(tx_params.as_dict())
def send_transaction(
self, nft_id: int, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(nft_id) = self.validate_and_normalize_inputs(nft_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(nft_id).transact(tx_params.as_dict())
def build_transaction(
self, nft_id: int, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(nft_id) = self.validate_and_normalize_inputs(nft_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(nft_id).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, nft_id: int, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(nft_id) = self.validate_and_normalize_inputs(nft_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(nft_id).estimateGas(tx_params.as_dict())
class RenounceRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the renounceRole method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self, role: Union[bytes, str], account: str
):
"""Validate the inputs to the renounceRole method."""
self.validator.assert_valid(
method_name="renounceRole",
parameter_name="role",
argument_value=role,
)
self.validator.assert_valid(
method_name="renounceRole",
parameter_name="account",
argument_value=account,
)
account = self.validate_and_checksum_address(account)
return (role, account)
def call(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(role, account).call(tx_params.as_dict())
def send_transaction(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).transact(
tx_params.as_dict()
)
def build_transaction(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).estimateGas(
tx_params.as_dict()
)
class RevokeRoleMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the revokeRole method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self, role: Union[bytes, str], account: str
):
"""Validate the inputs to the revokeRole method."""
self.validator.assert_valid(
method_name="revokeRole",
parameter_name="role",
argument_value=role,
)
self.validator.assert_valid(
method_name="revokeRole",
parameter_name="account",
argument_value=account,
)
account = self.validate_and_checksum_address(account)
return (role, account)
def call(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(role, account).call(tx_params.as_dict())
def send_transaction(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).transact(
tx_params.as_dict()
)
def build_transaction(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
role: Union[bytes, str],
account: str,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(role, account) = self.validate_and_normalize_inputs(role, account)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(role, account).estimateGas(
tx_params.as_dict()
)
class RoyaltyBpsMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the royaltyBps method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return int(returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class RoyaltyInfoMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the royaltyInfo method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, index_0: int, sale_price: int):
"""Validate the inputs to the royaltyInfo method."""
self.validator.assert_valid(
method_name="royaltyInfo",
parameter_name="index_0",
argument_value=index_0,
)
# safeguard against fractional inputs
index_0 = int(index_0)
self.validator.assert_valid(
method_name="royaltyInfo",
parameter_name="salePrice",
argument_value=sale_price,
)
# safeguard against fractional inputs
sale_price = int(sale_price)
return (index_0, sale_price)
def call(
self,
index_0: int,
sale_price: int,
tx_params: Optional[TxParams] = None,
) -> Tuple[str, int]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(index_0, sale_price) = self.validate_and_normalize_inputs(
index_0, sale_price
)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(index_0, sale_price).call(
tx_params.as_dict()
)
return (
returned[0],
returned[1],
)
def send_transaction(
self,
index_0: int,
sale_price: int,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(index_0, sale_price) = self.validate_and_normalize_inputs(
index_0, sale_price
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(index_0, sale_price).transact(
tx_params.as_dict()
)
def build_transaction(
self,
index_0: int,
sale_price: int,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(index_0, sale_price) = self.validate_and_normalize_inputs(
index_0, sale_price
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(index_0, sale_price).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
index_0: int,
sale_price: int,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(index_0, sale_price) = self.validate_and_normalize_inputs(
index_0, sale_price
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(index_0, sale_price).estimateGas(
tx_params.as_dict()
)
class SafeBatchTransferFromMethod(
ContractMethod
): # pylint: disable=invalid-name
"""Various interfaces to the safeBatchTransferFrom method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self,
_from: str,
to: str,
ids: List[int],
amounts: List[int],
data: Union[bytes, str],
):
"""Validate the inputs to the safeBatchTransferFrom method."""
self.validator.assert_valid(
method_name="safeBatchTransferFrom",
parameter_name="from",
argument_value=_from,
)
_from = self.validate_and_checksum_address(_from)
self.validator.assert_valid(
method_name="safeBatchTransferFrom",
parameter_name="to",
argument_value=to,
)
to = self.validate_and_checksum_address(to)
self.validator.assert_valid(
method_name="safeBatchTransferFrom",
parameter_name="ids",
argument_value=ids,
)
self.validator.assert_valid(
method_name="safeBatchTransferFrom",
parameter_name="amounts",
argument_value=amounts,
)
self.validator.assert_valid(
method_name="safeBatchTransferFrom",
parameter_name="data",
argument_value=data,
)
return (_from, to, ids, amounts, data)
def call(
self,
_from: str,
to: str,
ids: List[int],
amounts: List[int],
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(_from, to, ids, amounts, data) = self.validate_and_normalize_inputs(
_from, to, ids, amounts, data
)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(_from, to, ids, amounts, data).call(
tx_params.as_dict()
)
def send_transaction(
self,
_from: str,
to: str,
ids: List[int],
amounts: List[int],
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(_from, to, ids, amounts, data) = self.validate_and_normalize_inputs(
_from, to, ids, amounts, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(_from, to, ids, amounts, data).transact(
tx_params.as_dict()
)
def build_transaction(
self,
_from: str,
to: str,
ids: List[int],
amounts: List[int],
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(_from, to, ids, amounts, data) = self.validate_and_normalize_inputs(
_from, to, ids, amounts, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
_from, to, ids, amounts, data
).buildTransaction(tx_params.as_dict())
def estimate_gas(
self,
_from: str,
to: str,
ids: List[int],
amounts: List[int],
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(_from, to, ids, amounts, data) = self.validate_and_normalize_inputs(
_from, to, ids, amounts, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
_from, to, ids, amounts, data
).estimateGas(tx_params.as_dict())
class SafeTransferFromMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the safeTransferFrom method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self,
_from: str,
to: str,
_id: int,
amount: int,
data: Union[bytes, str],
):
"""Validate the inputs to the safeTransferFrom method."""
self.validator.assert_valid(
method_name="safeTransferFrom",
parameter_name="from",
argument_value=_from,
)
_from = self.validate_and_checksum_address(_from)
self.validator.assert_valid(
method_name="safeTransferFrom",
parameter_name="to",
argument_value=to,
)
to = self.validate_and_checksum_address(to)
self.validator.assert_valid(
method_name="safeTransferFrom",
parameter_name="id",
argument_value=_id,
)
# safeguard against fractional inputs
_id = int(_id)
self.validator.assert_valid(
method_name="safeTransferFrom",
parameter_name="amount",
argument_value=amount,
)
# safeguard against fractional inputs
amount = int(amount)
self.validator.assert_valid(
method_name="safeTransferFrom",
parameter_name="data",
argument_value=data,
)
return (_from, to, _id, amount, data)
def call(
self,
_from: str,
to: str,
_id: int,
amount: int,
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(_from, to, _id, amount, data) = self.validate_and_normalize_inputs(
_from, to, _id, amount, data
)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(_from, to, _id, amount, data).call(
tx_params.as_dict()
)
def send_transaction(
self,
_from: str,
to: str,
_id: int,
amount: int,
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(_from, to, _id, amount, data) = self.validate_and_normalize_inputs(
_from, to, _id, amount, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(_from, to, _id, amount, data).transact(
tx_params.as_dict()
)
def build_transaction(
self,
_from: str,
to: str,
_id: int,
amount: int,
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(_from, to, _id, amount, data) = self.validate_and_normalize_inputs(
_from, to, _id, amount, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
_from, to, _id, amount, data
).buildTransaction(tx_params.as_dict())
def estimate_gas(
self,
_from: str,
to: str,
_id: int,
amount: int,
data: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(_from, to, _id, amount, data) = self.validate_and_normalize_inputs(
_from, to, _id, amount, data
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
_from, to, _id, amount, data
).estimateGas(tx_params.as_dict())
class SetApprovalForAllMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the setApprovalForAll method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, operator: str, approved: bool):
"""Validate the inputs to the setApprovalForAll method."""
self.validator.assert_valid(
method_name="setApprovalForAll",
parameter_name="operator",
argument_value=operator,
)
operator = self.validate_and_checksum_address(operator)
self.validator.assert_valid(
method_name="setApprovalForAll",
parameter_name="approved",
argument_value=approved,
)
return (operator, approved)
def call(
self,
operator: str,
approved: bool,
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(operator, approved) = self.validate_and_normalize_inputs(
operator, approved
)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(operator, approved).call(tx_params.as_dict())
def send_transaction(
self,
operator: str,
approved: bool,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(operator, approved) = self.validate_and_normalize_inputs(
operator, approved
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(operator, approved).transact(
tx_params.as_dict()
)
def build_transaction(
self,
operator: str,
approved: bool,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(operator, approved) = self.validate_and_normalize_inputs(
operator, approved
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(operator, approved).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
operator: str,
approved: bool,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(operator, approved) = self.validate_and_normalize_inputs(
operator, approved
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(operator, approved).estimateGas(
tx_params.as_dict()
)
class SetContractUriMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the setContractURI method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, uri: str):
"""Validate the inputs to the setContractURI method."""
self.validator.assert_valid(
method_name="setContractURI",
parameter_name="_URI",
argument_value=uri,
)
return uri
def call(self, uri: str, tx_params: Optional[TxParams] = None) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(uri) = self.validate_and_normalize_inputs(uri)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(uri).call(tx_params.as_dict())
def send_transaction(
self, uri: str, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(uri) = self.validate_and_normalize_inputs(uri)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(uri).transact(tx_params.as_dict())
def build_transaction(
self, uri: str, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(uri) = self.validate_and_normalize_inputs(uri)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(uri).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, uri: str, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(uri) = self.validate_and_normalize_inputs(uri)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(uri).estimateGas(tx_params.as_dict())
class SetRestrictedTransferMethod(
ContractMethod
): # pylint: disable=invalid-name
"""Various interfaces to the setRestrictedTransfer method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, restricted_transfer: bool):
"""Validate the inputs to the setRestrictedTransfer method."""
self.validator.assert_valid(
method_name="setRestrictedTransfer",
parameter_name="_restrictedTransfer",
argument_value=restricted_transfer,
)
return restricted_transfer
def call(
self, restricted_transfer: bool, tx_params: Optional[TxParams] = None
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(restricted_transfer) = self.validate_and_normalize_inputs(
restricted_transfer
)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(restricted_transfer).call(tx_params.as_dict())
def send_transaction(
self, restricted_transfer: bool, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(restricted_transfer) = self.validate_and_normalize_inputs(
restricted_transfer
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(restricted_transfer).transact(
tx_params.as_dict()
)
def build_transaction(
self, restricted_transfer: bool, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(restricted_transfer) = self.validate_and_normalize_inputs(
restricted_transfer
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(restricted_transfer).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, restricted_transfer: bool, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(restricted_transfer) = self.validate_and_normalize_inputs(
restricted_transfer
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(restricted_transfer).estimateGas(
tx_params.as_dict()
)
class SetRoyaltyBpsMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the setRoyaltyBps method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, royalty_bps: int):
"""Validate the inputs to the setRoyaltyBps method."""
self.validator.assert_valid(
method_name="setRoyaltyBps",
parameter_name="_royaltyBps",
argument_value=royalty_bps,
)
# safeguard against fractional inputs
royalty_bps = int(royalty_bps)
return royalty_bps
def call(
self, royalty_bps: int, tx_params: Optional[TxParams] = None
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(royalty_bps) = self.validate_and_normalize_inputs(royalty_bps)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(royalty_bps).call(tx_params.as_dict())
def send_transaction(
self, royalty_bps: int, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(royalty_bps) = self.validate_and_normalize_inputs(royalty_bps)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(royalty_bps).transact(
tx_params.as_dict()
)
def build_transaction(
self, royalty_bps: int, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(royalty_bps) = self.validate_and_normalize_inputs(royalty_bps)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(royalty_bps).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, royalty_bps: int, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(royalty_bps) = self.validate_and_normalize_inputs(royalty_bps)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(royalty_bps).estimateGas(
tx_params.as_dict()
)
class SupportsInterfaceMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the supportsInterface method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, interface_id: Union[bytes, str]):
"""Validate the inputs to the supportsInterface method."""
self.validator.assert_valid(
method_name="supportsInterface",
parameter_name="interfaceId",
argument_value=interface_id,
)
return interface_id
def call(
self,
interface_id: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(interface_id) = self.validate_and_normalize_inputs(interface_id)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(interface_id).call(
tx_params.as_dict()
)
return bool(returned)
def send_transaction(
self,
interface_id: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(interface_id) = self.validate_and_normalize_inputs(interface_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(interface_id).transact(
tx_params.as_dict()
)
def build_transaction(
self,
interface_id: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(interface_id) = self.validate_and_normalize_inputs(interface_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(interface_id).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self,
interface_id: Union[bytes, str],
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(interface_id) = self.validate_and_normalize_inputs(interface_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(interface_id).estimateGas(
tx_params.as_dict()
)
class TokenStateMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the tokenState method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, index_0: int):
"""Validate the inputs to the tokenState method."""
self.validator.assert_valid(
method_name="tokenState",
parameter_name="index_0",
argument_value=index_0,
)
# safeguard against fractional inputs
index_0 = int(index_0)
return index_0
def call(
self, index_0: int, tx_params: Optional[TxParams] = None
) -> Tuple[str, str, int]:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(index_0) = self.validate_and_normalize_inputs(index_0)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(index_0).call(tx_params.as_dict())
return (
returned[0],
returned[1],
returned[2],
)
def send_transaction(
self, index_0: int, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(index_0) = self.validate_and_normalize_inputs(index_0)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(index_0).transact(tx_params.as_dict())
def build_transaction(
self, index_0: int, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(index_0) = self.validate_and_normalize_inputs(index_0)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(index_0).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, index_0: int, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(index_0) = self.validate_and_normalize_inputs(index_0)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(index_0).estimateGas(
tx_params.as_dict()
)
class TokenUriMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the tokenURI method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, nft_id: int):
"""Validate the inputs to the tokenURI method."""
self.validator.assert_valid(
method_name="tokenURI",
parameter_name="_nftId",
argument_value=nft_id,
)
# safeguard against fractional inputs
nft_id = int(nft_id)
return nft_id
def call(self, nft_id: int, tx_params: Optional[TxParams] = None) -> str:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(nft_id) = self.validate_and_normalize_inputs(nft_id)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(nft_id).call(tx_params.as_dict())
return str(returned)
def send_transaction(
self, nft_id: int, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(nft_id) = self.validate_and_normalize_inputs(nft_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(nft_id).transact(tx_params.as_dict())
def build_transaction(
self, nft_id: int, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(nft_id) = self.validate_and_normalize_inputs(nft_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(nft_id).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, nft_id: int, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(nft_id) = self.validate_and_normalize_inputs(nft_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(nft_id).estimateGas(tx_params.as_dict())
class TotalSupplyMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the totalSupply method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, _id: int):
"""Validate the inputs to the totalSupply method."""
self.validator.assert_valid(
method_name="totalSupply",
parameter_name="id",
argument_value=_id,
)
# safeguard against fractional inputs
_id = int(_id)
return _id
def call(self, _id: int, tx_params: Optional[TxParams] = None) -> int:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(_id) = self.validate_and_normalize_inputs(_id)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(_id).call(tx_params.as_dict())
return int(returned)
def send_transaction(
self, _id: int, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(_id) = self.validate_and_normalize_inputs(_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(_id).transact(tx_params.as_dict())
def build_transaction(
self, _id: int, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(_id) = self.validate_and_normalize_inputs(_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(_id).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, _id: int, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(_id) = self.validate_and_normalize_inputs(_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(_id).estimateGas(tx_params.as_dict())
class TransfersRestrictedMethod(
ContractMethod
): # pylint: disable=invalid-name
"""Various interfaces to the transfersRestricted method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> bool:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method().call(tx_params.as_dict())
return bool(returned)
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class UnpauseMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the unpause method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address)
self._underlying_method = contract_function
def call(self, tx_params: Optional[TxParams] = None) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method().call(tx_params.as_dict())
def send_transaction(
self, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().transact(tx_params.as_dict())
def build_transaction(self, tx_params: Optional[TxParams] = None) -> dict:
"""Construct calldata to be used as input to the method."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().buildTransaction(tx_params.as_dict())
def estimate_gas(self, tx_params: Optional[TxParams] = None) -> int:
"""Estimate gas consumption of method call."""
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method().estimateGas(tx_params.as_dict())
class UriMethod(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the uri method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(self, nft_id: int):
"""Validate the inputs to the uri method."""
self.validator.assert_valid(
method_name="uri",
parameter_name="_nftId",
argument_value=nft_id,
)
# safeguard against fractional inputs
nft_id = int(nft_id)
return nft_id
def call(self, nft_id: int, tx_params: Optional[TxParams] = None) -> str:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(nft_id) = self.validate_and_normalize_inputs(nft_id)
tx_params = super().normalize_tx_params(tx_params)
returned = self._underlying_method(nft_id).call(tx_params.as_dict())
return str(returned)
def send_transaction(
self, nft_id: int, tx_params: Optional[TxParams] = None
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(nft_id) = self.validate_and_normalize_inputs(nft_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(nft_id).transact(tx_params.as_dict())
def build_transaction(
self, nft_id: int, tx_params: Optional[TxParams] = None
) -> dict:
"""Construct calldata to be used as input to the method."""
(nft_id) = self.validate_and_normalize_inputs(nft_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(nft_id).buildTransaction(
tx_params.as_dict()
)
def estimate_gas(
self, nft_id: int, tx_params: Optional[TxParams] = None
) -> int:
"""Estimate gas consumption of method call."""
(nft_id) = self.validate_and_normalize_inputs(nft_id)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(nft_id).estimateGas(tx_params.as_dict())
class WrapErc20Method(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the wrapERC20 method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self,
token_contract: str,
token_amount: int,
num_of_nfts_to_mint: int,
nft_uri: str,
):
"""Validate the inputs to the wrapERC20 method."""
self.validator.assert_valid(
method_name="wrapERC20",
parameter_name="_tokenContract",
argument_value=token_contract,
)
token_contract = self.validate_and_checksum_address(token_contract)
self.validator.assert_valid(
method_name="wrapERC20",
parameter_name="_tokenAmount",
argument_value=token_amount,
)
# safeguard against fractional inputs
token_amount = int(token_amount)
self.validator.assert_valid(
method_name="wrapERC20",
parameter_name="_numOfNftsToMint",
argument_value=num_of_nfts_to_mint,
)
# safeguard against fractional inputs
num_of_nfts_to_mint = int(num_of_nfts_to_mint)
self.validator.assert_valid(
method_name="wrapERC20",
parameter_name="_nftURI",
argument_value=nft_uri,
)
return (token_contract, token_amount, num_of_nfts_to_mint, nft_uri)
def call(
self,
token_contract: str,
token_amount: int,
num_of_nfts_to_mint: int,
nft_uri: str,
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(
token_contract,
token_amount,
num_of_nfts_to_mint,
nft_uri,
) = self.validate_and_normalize_inputs(
token_contract, token_amount, num_of_nfts_to_mint, nft_uri
)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(
token_contract, token_amount, num_of_nfts_to_mint, nft_uri
).call(tx_params.as_dict())
def send_transaction(
self,
token_contract: str,
token_amount: int,
num_of_nfts_to_mint: int,
nft_uri: str,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(
token_contract,
token_amount,
num_of_nfts_to_mint,
nft_uri,
) = self.validate_and_normalize_inputs(
token_contract, token_amount, num_of_nfts_to_mint, nft_uri
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
token_contract, token_amount, num_of_nfts_to_mint, nft_uri
).transact(tx_params.as_dict())
def build_transaction(
self,
token_contract: str,
token_amount: int,
num_of_nfts_to_mint: int,
nft_uri: str,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(
token_contract,
token_amount,
num_of_nfts_to_mint,
nft_uri,
) = self.validate_and_normalize_inputs(
token_contract, token_amount, num_of_nfts_to_mint, nft_uri
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
token_contract, token_amount, num_of_nfts_to_mint, nft_uri
).buildTransaction(tx_params.as_dict())
def estimate_gas(
self,
token_contract: str,
token_amount: int,
num_of_nfts_to_mint: int,
nft_uri: str,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(
token_contract,
token_amount,
num_of_nfts_to_mint,
nft_uri,
) = self.validate_and_normalize_inputs(
token_contract, token_amount, num_of_nfts_to_mint, nft_uri
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
token_contract, token_amount, num_of_nfts_to_mint, nft_uri
).estimateGas(tx_params.as_dict())
class WrapErc721Method(ContractMethod): # pylint: disable=invalid-name
"""Various interfaces to the wrapERC721 method."""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
contract_function: ContractFunction,
validator: Validator = None,
):
"""Persist instance data."""
super().__init__(web3_or_provider, contract_address, validator)
self._underlying_method = contract_function
def validate_and_normalize_inputs(
self, nft_contract: str, token_id: int, nft_uri: str
):
"""Validate the inputs to the wrapERC721 method."""
self.validator.assert_valid(
method_name="wrapERC721",
parameter_name="_nftContract",
argument_value=nft_contract,
)
nft_contract = self.validate_and_checksum_address(nft_contract)
self.validator.assert_valid(
method_name="wrapERC721",
parameter_name="_tokenId",
argument_value=token_id,
)
# safeguard against fractional inputs
token_id = int(token_id)
self.validator.assert_valid(
method_name="wrapERC721",
parameter_name="_nftURI",
argument_value=nft_uri,
)
return (nft_contract, token_id, nft_uri)
def call(
self,
nft_contract: str,
token_id: int,
nft_uri: str,
tx_params: Optional[TxParams] = None,
) -> None:
"""Execute underlying contract method via eth_call.
:param tx_params: transaction parameters
:returns: the return value of the underlying method.
"""
(nft_contract, token_id, nft_uri) = self.validate_and_normalize_inputs(
nft_contract, token_id, nft_uri
)
tx_params = super().normalize_tx_params(tx_params)
self._underlying_method(nft_contract, token_id, nft_uri).call(
tx_params.as_dict()
)
def send_transaction(
self,
nft_contract: str,
token_id: int,
nft_uri: str,
tx_params: Optional[TxParams] = None,
) -> Union[HexBytes, bytes]:
"""Execute underlying contract method via eth_sendTransaction.
:param tx_params: transaction parameters
"""
(nft_contract, token_id, nft_uri) = self.validate_and_normalize_inputs(
nft_contract, token_id, nft_uri
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
nft_contract, token_id, nft_uri
).transact(tx_params.as_dict())
def build_transaction(
self,
nft_contract: str,
token_id: int,
nft_uri: str,
tx_params: Optional[TxParams] = None,
) -> dict:
"""Construct calldata to be used as input to the method."""
(nft_contract, token_id, nft_uri) = self.validate_and_normalize_inputs(
nft_contract, token_id, nft_uri
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
nft_contract, token_id, nft_uri
).buildTransaction(tx_params.as_dict())
def estimate_gas(
self,
nft_contract: str,
token_id: int,
nft_uri: str,
tx_params: Optional[TxParams] = None,
) -> int:
"""Estimate gas consumption of method call."""
(nft_contract, token_id, nft_uri) = self.validate_and_normalize_inputs(
nft_contract, token_id, nft_uri
)
tx_params = super().normalize_tx_params(tx_params)
return self._underlying_method(
nft_contract, token_id, nft_uri
).estimateGas(tx_params.as_dict())
# pylint: disable=too-many-public-methods,too-many-instance-attributes
class NFTCollection:
"""Wrapper class for NFTCollection Solidity contract.
All method parameters of type `bytes`:code: should be encoded as UTF-8,
which can be accomplished via `str.encode("utf_8")`:code:.
"""
default_admin_role: DefaultAdminRoleMethod
"""Constructor-initialized instance of
:class:`DefaultAdminRoleMethod`.
"""
minter_role: MinterRoleMethod
"""Constructor-initialized instance of
:class:`MinterRoleMethod`.
"""
pauser_role: PauserRoleMethod
"""Constructor-initialized instance of
:class:`PauserRoleMethod`.
"""
transfer_role: TransferRoleMethod
"""Constructor-initialized instance of
:class:`TransferRoleMethod`.
"""
contract_uri_: ContractUri_Method
"""Constructor-initialized instance of
:class:`ContractUri_Method`.
"""
balance_of: BalanceOfMethod
"""Constructor-initialized instance of
:class:`BalanceOfMethod`.
"""
balance_of_batch: BalanceOfBatchMethod
"""Constructor-initialized instance of
:class:`BalanceOfBatchMethod`.
"""
burn: BurnMethod
"""Constructor-initialized instance of
:class:`BurnMethod`.
"""
burn_batch: BurnBatchMethod
"""Constructor-initialized instance of
:class:`BurnBatchMethod`.
"""
contract_uri: ContractUriMethod
"""Constructor-initialized instance of
:class:`ContractUriMethod`.
"""
create_native_tokens: CreateNativeTokensMethod
"""Constructor-initialized instance of
:class:`CreateNativeTokensMethod`.
"""
creator: CreatorMethod
"""Constructor-initialized instance of
:class:`CreatorMethod`.
"""
erc20_wrapped_tokens: Erc20WrappedTokensMethod
"""Constructor-initialized instance of
:class:`Erc20WrappedTokensMethod`.
"""
erc721_wrapped_tokens: Erc721WrappedTokensMethod
"""Constructor-initialized instance of
:class:`Erc721WrappedTokensMethod`.
"""
get_role_admin: GetRoleAdminMethod
"""Constructor-initialized instance of
:class:`GetRoleAdminMethod`.
"""
get_role_member: GetRoleMemberMethod
"""Constructor-initialized instance of
:class:`GetRoleMemberMethod`.
"""
get_role_member_count: GetRoleMemberCountMethod
"""Constructor-initialized instance of
:class:`GetRoleMemberCountMethod`.
"""
grant_role: GrantRoleMethod
"""Constructor-initialized instance of
:class:`GrantRoleMethod`.
"""
has_role: HasRoleMethod
"""Constructor-initialized instance of
:class:`HasRoleMethod`.
"""
is_approved_for_all: IsApprovedForAllMethod
"""Constructor-initialized instance of
:class:`IsApprovedForAllMethod`.
"""
is_trusted_forwarder: IsTrustedForwarderMethod
"""Constructor-initialized instance of
:class:`IsTrustedForwarderMethod`.
"""
mint: MintMethod
"""Constructor-initialized instance of
:class:`MintMethod`.
"""
mint_batch: MintBatchMethod
"""Constructor-initialized instance of
:class:`MintBatchMethod`.
"""
multicall: MulticallMethod
"""Constructor-initialized instance of
:class:`MulticallMethod`.
"""
next_token_id: NextTokenIdMethod
"""Constructor-initialized instance of
:class:`NextTokenIdMethod`.
"""
on_erc1155_batch_received: OnErc1155BatchReceivedMethod
"""Constructor-initialized instance of
:class:`OnErc1155BatchReceivedMethod`.
"""
on_erc1155_received: OnErc1155ReceivedMethod
"""Constructor-initialized instance of
:class:`OnErc1155ReceivedMethod`.
"""
on_erc721_received: OnErc721ReceivedMethod
"""Constructor-initialized instance of
:class:`OnErc721ReceivedMethod`.
"""
pause: PauseMethod
"""Constructor-initialized instance of
:class:`PauseMethod`.
"""
paused: PausedMethod
"""Constructor-initialized instance of
:class:`PausedMethod`.
"""
redeem_erc20: RedeemErc20Method
"""Constructor-initialized instance of
:class:`RedeemErc20Method`.
"""
redeem_erc721: RedeemErc721Method
"""Constructor-initialized instance of
:class:`RedeemErc721Method`.
"""
renounce_role: RenounceRoleMethod
"""Constructor-initialized instance of
:class:`RenounceRoleMethod`.
"""
revoke_role: RevokeRoleMethod
"""Constructor-initialized instance of
:class:`RevokeRoleMethod`.
"""
royalty_bps: RoyaltyBpsMethod
"""Constructor-initialized instance of
:class:`RoyaltyBpsMethod`.
"""
royalty_info: RoyaltyInfoMethod
"""Constructor-initialized instance of
:class:`RoyaltyInfoMethod`.
"""
safe_batch_transfer_from: SafeBatchTransferFromMethod
"""Constructor-initialized instance of
:class:`SafeBatchTransferFromMethod`.
"""
safe_transfer_from: SafeTransferFromMethod
"""Constructor-initialized instance of
:class:`SafeTransferFromMethod`.
"""
set_approval_for_all: SetApprovalForAllMethod
"""Constructor-initialized instance of
:class:`SetApprovalForAllMethod`.
"""
set_contract_uri: SetContractUriMethod
"""Constructor-initialized instance of
:class:`SetContractUriMethod`.
"""
set_restricted_transfer: SetRestrictedTransferMethod
"""Constructor-initialized instance of
:class:`SetRestrictedTransferMethod`.
"""
set_royalty_bps: SetRoyaltyBpsMethod
"""Constructor-initialized instance of
:class:`SetRoyaltyBpsMethod`.
"""
supports_interface: SupportsInterfaceMethod
"""Constructor-initialized instance of
:class:`SupportsInterfaceMethod`.
"""
token_state: TokenStateMethod
"""Constructor-initialized instance of
:class:`TokenStateMethod`.
"""
token_uri: TokenUriMethod
"""Constructor-initialized instance of
:class:`TokenUriMethod`.
"""
total_supply: TotalSupplyMethod
"""Constructor-initialized instance of
:class:`TotalSupplyMethod`.
"""
transfers_restricted: TransfersRestrictedMethod
"""Constructor-initialized instance of
:class:`TransfersRestrictedMethod`.
"""
unpause: UnpauseMethod
"""Constructor-initialized instance of
:class:`UnpauseMethod`.
"""
uri: UriMethod
"""Constructor-initialized instance of
:class:`UriMethod`.
"""
wrap_erc20: WrapErc20Method
"""Constructor-initialized instance of
:class:`WrapErc20Method`.
"""
wrap_erc721: WrapErc721Method
"""Constructor-initialized instance of
:class:`WrapErc721Method`.
"""
def __init__(
self,
web3_or_provider: Union[Web3, BaseProvider],
contract_address: str,
validator: NFTCollectionValidator = None,
):
"""Get an instance of wrapper for smart contract.
:param web3_or_provider: Either an instance of `web3.Web3`:code: or
`web3.providers.base.BaseProvider`:code:
:param contract_address: where the contract has been deployed
:param validator: for validation of method inputs.
"""
# pylint: disable=too-many-statements
self.contract_address = contract_address
if not validator:
validator = NFTCollectionValidator(
web3_or_provider, contract_address
)
web3 = None
if isinstance(web3_or_provider, BaseProvider):
web3 = Web3(web3_or_provider)
elif isinstance(web3_or_provider, Web3):
web3 = web3_or_provider
else:
raise TypeError(
"Expected parameter 'web3_or_provider' to be an instance of either"
+ " Web3 or BaseProvider"
)
# if any middleware was imported, inject it
try:
MIDDLEWARE
except NameError:
pass
else:
try:
for middleware in MIDDLEWARE:
web3.middleware_onion.inject(
middleware["function"],
layer=middleware["layer"],
)
except ValueError as value_error:
if value_error.args == (
"You can't add the same un-named instance twice",
):
pass
self._web3_eth = web3.eth
functions = self._web3_eth.contract(
address=to_checksum_address(contract_address),
abi=NFTCollection.abi(),
).functions
self.default_admin_role = DefaultAdminRoleMethod(
web3_or_provider, contract_address, functions.DEFAULT_ADMIN_ROLE
)
self.minter_role = MinterRoleMethod(
web3_or_provider, contract_address, functions.MINTER_ROLE
)
self.pauser_role = PauserRoleMethod(
web3_or_provider, contract_address, functions.PAUSER_ROLE
)
self.transfer_role = TransferRoleMethod(
web3_or_provider, contract_address, functions.TRANSFER_ROLE
)
self.contract_uri_ = ContractUri_Method(
web3_or_provider, contract_address, functions._contractURI
)
self.balance_of = BalanceOfMethod(
web3_or_provider, contract_address, functions.balanceOf, validator
)
self.balance_of_batch = BalanceOfBatchMethod(
web3_or_provider,
contract_address,
functions.balanceOfBatch,
validator,
)
self.burn = BurnMethod(
web3_or_provider, contract_address, functions.burn, validator
)
self.burn_batch = BurnBatchMethod(
web3_or_provider, contract_address, functions.burnBatch, validator
)
self.contract_uri = ContractUriMethod(
web3_or_provider, contract_address, functions.contractURI
)
self.create_native_tokens = CreateNativeTokensMethod(
web3_or_provider,
contract_address,
functions.createNativeTokens,
validator,
)
self.creator = CreatorMethod(
web3_or_provider, contract_address, functions.creator, validator
)
self.erc20_wrapped_tokens = Erc20WrappedTokensMethod(
web3_or_provider,
contract_address,
functions.erc20WrappedTokens,
validator,
)
self.erc721_wrapped_tokens = Erc721WrappedTokensMethod(
web3_or_provider,
contract_address,
functions.erc721WrappedTokens,
validator,
)
self.get_role_admin = GetRoleAdminMethod(
web3_or_provider,
contract_address,
functions.getRoleAdmin,
validator,
)
self.get_role_member = GetRoleMemberMethod(
web3_or_provider,
contract_address,
functions.getRoleMember,
validator,
)
self.get_role_member_count = GetRoleMemberCountMethod(
web3_or_provider,
contract_address,
functions.getRoleMemberCount,
validator,
)
self.grant_role = GrantRoleMethod(
web3_or_provider, contract_address, functions.grantRole, validator
)
self.has_role = HasRoleMethod(
web3_or_provider, contract_address, functions.hasRole, validator
)
self.is_approved_for_all = IsApprovedForAllMethod(
web3_or_provider,
contract_address,
functions.isApprovedForAll,
validator,
)
self.is_trusted_forwarder = IsTrustedForwarderMethod(
web3_or_provider,
contract_address,
functions.isTrustedForwarder,
validator,
)
self.mint = MintMethod(
web3_or_provider, contract_address, functions.mint, validator
)
self.mint_batch = MintBatchMethod(
web3_or_provider, contract_address, functions.mintBatch, validator
)
self.multicall = MulticallMethod(
web3_or_provider, contract_address, functions.multicall, validator
)
self.next_token_id = NextTokenIdMethod(
web3_or_provider, contract_address, functions.nextTokenId
)
self.on_erc1155_batch_received = OnErc1155BatchReceivedMethod(
web3_or_provider,
contract_address,
functions.onERC1155BatchReceived,
validator,
)
self.on_erc1155_received = OnErc1155ReceivedMethod(
web3_or_provider,
contract_address,
functions.onERC1155Received,
validator,
)
self.on_erc721_received = OnErc721ReceivedMethod(
web3_or_provider,
contract_address,
functions.onERC721Received,
validator,
)
self.pause = PauseMethod(
web3_or_provider, contract_address, functions.pause
)
self.paused = PausedMethod(
web3_or_provider, contract_address, functions.paused
)
self.redeem_erc20 = RedeemErc20Method(
web3_or_provider,
contract_address,
functions.redeemERC20,
validator,
)
self.redeem_erc721 = RedeemErc721Method(
web3_or_provider,
contract_address,
functions.redeemERC721,
validator,
)
self.renounce_role = RenounceRoleMethod(
web3_or_provider,
contract_address,
functions.renounceRole,
validator,
)
self.revoke_role = RevokeRoleMethod(
web3_or_provider, contract_address, functions.revokeRole, validator
)
self.royalty_bps = RoyaltyBpsMethod(
web3_or_provider, contract_address, functions.royaltyBps
)
self.royalty_info = RoyaltyInfoMethod(
web3_or_provider,
contract_address,
functions.royaltyInfo,
validator,
)
self.safe_batch_transfer_from = SafeBatchTransferFromMethod(
web3_or_provider,
contract_address,
functions.safeBatchTransferFrom,
validator,
)
self.safe_transfer_from = SafeTransferFromMethod(
web3_or_provider,
contract_address,
functions.safeTransferFrom,
validator,
)
self.set_approval_for_all = SetApprovalForAllMethod(
web3_or_provider,
contract_address,
functions.setApprovalForAll,
validator,
)
self.set_contract_uri = SetContractUriMethod(
web3_or_provider,
contract_address,
functions.setContractURI,
validator,
)
self.set_restricted_transfer = SetRestrictedTransferMethod(
web3_or_provider,
contract_address,
functions.setRestrictedTransfer,
validator,
)
self.set_royalty_bps = SetRoyaltyBpsMethod(
web3_or_provider,
contract_address,
functions.setRoyaltyBps,
validator,
)
self.supports_interface = SupportsInterfaceMethod(
web3_or_provider,
contract_address,
functions.supportsInterface,
validator,
)
self.token_state = TokenStateMethod(
web3_or_provider, contract_address, functions.tokenState, validator
)
self.token_uri = TokenUriMethod(
web3_or_provider, contract_address, functions.tokenURI, validator
)
self.total_supply = TotalSupplyMethod(
web3_or_provider,
contract_address,
functions.totalSupply,
validator,
)
self.transfers_restricted = TransfersRestrictedMethod(
web3_or_provider, contract_address, functions.transfersRestricted
)
self.unpause = UnpauseMethod(
web3_or_provider, contract_address, functions.unpause
)
self.uri = UriMethod(
web3_or_provider, contract_address, functions.uri, validator
)
self.wrap_erc20 = WrapErc20Method(
web3_or_provider, contract_address, functions.wrapERC20, validator
)
self.wrap_erc721 = WrapErc721Method(
web3_or_provider, contract_address, functions.wrapERC721, validator
)
def get_approval_for_all_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for ApprovalForAll event.
:param tx_hash: hash of transaction emitting ApprovalForAll event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=NFTCollection.abi(),
)
.events.ApprovalForAll()
.processReceipt(tx_receipt)
)
def get_erc20_redeemed_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for ERC20Redeemed event.
:param tx_hash: hash of transaction emitting ERC20Redeemed event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=NFTCollection.abi(),
)
.events.ERC20Redeemed()
.processReceipt(tx_receipt)
)
def get_erc20_wrapped_token_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for ERC20WrappedToken event.
:param tx_hash: hash of transaction emitting ERC20WrappedToken event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=NFTCollection.abi(),
)
.events.ERC20WrappedToken()
.processReceipt(tx_receipt)
)
def get_erc721_redeemed_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for ERC721Redeemed event.
:param tx_hash: hash of transaction emitting ERC721Redeemed event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=NFTCollection.abi(),
)
.events.ERC721Redeemed()
.processReceipt(tx_receipt)
)
def get_erc721_wrapped_token_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for ERC721WrappedToken event.
:param tx_hash: hash of transaction emitting ERC721WrappedToken event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=NFTCollection.abi(),
)
.events.ERC721WrappedToken()
.processReceipt(tx_receipt)
)
def get_native_tokens_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for NativeTokens event.
:param tx_hash: hash of transaction emitting NativeTokens event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=NFTCollection.abi(),
)
.events.NativeTokens()
.processReceipt(tx_receipt)
)
def get_paused_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for Paused event.
:param tx_hash: hash of transaction emitting Paused event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=NFTCollection.abi(),
)
.events.Paused()
.processReceipt(tx_receipt)
)
def get_restricted_transfer_updated_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for RestrictedTransferUpdated event.
:param tx_hash: hash of transaction emitting RestrictedTransferUpdated
event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=NFTCollection.abi(),
)
.events.RestrictedTransferUpdated()
.processReceipt(tx_receipt)
)
def get_role_admin_changed_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for RoleAdminChanged event.
:param tx_hash: hash of transaction emitting RoleAdminChanged event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=NFTCollection.abi(),
)
.events.RoleAdminChanged()
.processReceipt(tx_receipt)
)
def get_role_granted_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for RoleGranted event.
:param tx_hash: hash of transaction emitting RoleGranted event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=NFTCollection.abi(),
)
.events.RoleGranted()
.processReceipt(tx_receipt)
)
def get_role_revoked_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for RoleRevoked event.
:param tx_hash: hash of transaction emitting RoleRevoked event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=NFTCollection.abi(),
)
.events.RoleRevoked()
.processReceipt(tx_receipt)
)
def get_royalty_updated_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for RoyaltyUpdated event.
:param tx_hash: hash of transaction emitting RoyaltyUpdated event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=NFTCollection.abi(),
)
.events.RoyaltyUpdated()
.processReceipt(tx_receipt)
)
def get_transfer_batch_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for TransferBatch event.
:param tx_hash: hash of transaction emitting TransferBatch event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=NFTCollection.abi(),
)
.events.TransferBatch()
.processReceipt(tx_receipt)
)
def get_transfer_single_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for TransferSingle event.
:param tx_hash: hash of transaction emitting TransferSingle event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=NFTCollection.abi(),
)
.events.TransferSingle()
.processReceipt(tx_receipt)
)
def get_uri_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for URI event.
:param tx_hash: hash of transaction emitting URI event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=NFTCollection.abi(),
)
.events.URI()
.processReceipt(tx_receipt)
)
def get_unpaused_event(
self, tx_hash: Union[HexBytes, bytes]
) -> Tuple[AttributeDict]:
"""Get log entry for Unpaused event.
:param tx_hash: hash of transaction emitting Unpaused event
"""
tx_receipt = self._web3_eth.getTransactionReceipt(tx_hash)
return (
self._web3_eth.contract(
address=to_checksum_address(self.contract_address),
abi=NFTCollection.abi(),
)
.events.Unpaused()
.processReceipt(tx_receipt)
)
@staticmethod
def abi():
"""Return the ABI to the underlying contract."""
return json.loads(
'[{"inputs":[{"internalType":"address payable","name":"_controlCenter","type":"address"},{"internalType":"address","name":"_trustedForwarder","type":"address"},{"internalType":"string","name":"_uri","type":"string"}],"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"account","type":"address"},{"indexed":true,"internalType":"address","name":"operator","type":"address"},{"indexed":false,"internalType":"bool","name":"approved","type":"bool"}],"name":"ApprovalForAll","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"redeemer","type":"address"},{"indexed":true,"internalType":"uint256","name":"tokenId","type":"uint256"},{"indexed":true,"internalType":"address","name":"sourceOfUnderlying","type":"address"},{"indexed":false,"internalType":"uint256","name":"tokenAmountReceived","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"sharesRedeemed","type":"uint256"}],"name":"ERC20Redeemed","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"creator","type":"address"},{"indexed":true,"internalType":"address","name":"sourceOfUnderlying","type":"address"},{"indexed":false,"internalType":"uint256","name":"totalAmountOfUnderlying","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"shares","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"tokenId","type":"uint256"},{"indexed":false,"internalType":"string","name":"tokenURI","type":"string"}],"name":"ERC20WrappedToken","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"redeemer","type":"address"},{"indexed":true,"internalType":"address","name":"sourceOfUnderlying","type":"address"},{"indexed":false,"internalType":"uint256","name":"tokenIdOfUnderlying","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"ERC721Redeemed","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"creator","type":"address"},{"indexed":true,"internalType":"address","name":"sourceOfUnderlying","type":"address"},{"indexed":false,"internalType":"uint256","name":"tokenIdOfUnderlying","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"tokenId","type":"uint256"},{"indexed":false,"internalType":"string","name":"tokenURI","type":"string"}],"name":"ERC721WrappedToken","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"creator","type":"address"},{"indexed":false,"internalType":"uint256[]","name":"tokenIds","type":"uint256[]"},{"indexed":false,"internalType":"string[]","name":"tokenURIs","type":"string[]"},{"indexed":false,"internalType":"uint256[]","name":"tokenSupplies","type":"uint256[]"}],"name":"NativeTokens","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"address","name":"account","type":"address"}],"name":"Paused","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"bool","name":"transferable","type":"bool"}],"name":"RestrictedTransferUpdated","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"bytes32","name":"role","type":"bytes32"},{"indexed":true,"internalType":"bytes32","name":"previousAdminRole","type":"bytes32"},{"indexed":true,"internalType":"bytes32","name":"newAdminRole","type":"bytes32"}],"name":"RoleAdminChanged","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"bytes32","name":"role","type":"bytes32"},{"indexed":true,"internalType":"address","name":"account","type":"address"},{"indexed":true,"internalType":"address","name":"sender","type":"address"}],"name":"RoleGranted","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"bytes32","name":"role","type":"bytes32"},{"indexed":true,"internalType":"address","name":"account","type":"address"},{"indexed":true,"internalType":"address","name":"sender","type":"address"}],"name":"RoleRevoked","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"uint256","name":"royaltyBps","type":"uint256"}],"name":"RoyaltyUpdated","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"operator","type":"address"},{"indexed":true,"internalType":"address","name":"from","type":"address"},{"indexed":true,"internalType":"address","name":"to","type":"address"},{"indexed":false,"internalType":"uint256[]","name":"ids","type":"uint256[]"},{"indexed":false,"internalType":"uint256[]","name":"values","type":"uint256[]"}],"name":"TransferBatch","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"operator","type":"address"},{"indexed":true,"internalType":"address","name":"from","type":"address"},{"indexed":true,"internalType":"address","name":"to","type":"address"},{"indexed":false,"internalType":"uint256","name":"id","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"value","type":"uint256"}],"name":"TransferSingle","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"string","name":"value","type":"string"},{"indexed":true,"internalType":"uint256","name":"id","type":"uint256"}],"name":"URI","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"address","name":"account","type":"address"}],"name":"Unpaused","type":"event"},{"inputs":[],"name":"DEFAULT_ADMIN_ROLE","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"MINTER_ROLE","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"PAUSER_ROLE","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"TRANSFER_ROLE","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"_contractURI","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"},{"internalType":"uint256","name":"id","type":"uint256"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address[]","name":"accounts","type":"address[]"},{"internalType":"uint256[]","name":"ids","type":"uint256[]"}],"name":"balanceOfBatch","outputs":[{"internalType":"uint256[]","name":"","type":"uint256[]"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"},{"internalType":"uint256","name":"id","type":"uint256"},{"internalType":"uint256","name":"value","type":"uint256"}],"name":"burn","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"},{"internalType":"uint256[]","name":"ids","type":"uint256[]"},{"internalType":"uint256[]","name":"values","type":"uint256[]"}],"name":"burnBatch","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"contractURI","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"to","type":"address"},{"internalType":"string[]","name":"_nftURIs","type":"string[]"},{"internalType":"uint256[]","name":"_nftSupplies","type":"uint256[]"},{"internalType":"bytes","name":"data","type":"bytes"}],"name":"createNativeTokens","outputs":[{"internalType":"uint256[]","name":"nftIds","type":"uint256[]"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_nftId","type":"uint256"}],"name":"creator","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index_0","type":"uint256"}],"name":"erc20WrappedTokens","outputs":[{"internalType":"address","name":"source","type":"address"},{"internalType":"uint256","name":"shares","type":"uint256"},{"internalType":"uint256","name":"underlyingTokenAmount","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index_0","type":"uint256"}],"name":"erc721WrappedTokens","outputs":[{"internalType":"address","name":"source","type":"address"},{"internalType":"uint256","name":"tokenId","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"}],"name":"getRoleAdmin","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"uint256","name":"index","type":"uint256"}],"name":"getRoleMember","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"}],"name":"getRoleMemberCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"account","type":"address"}],"name":"grantRole","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"account","type":"address"}],"name":"hasRole","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"},{"internalType":"address","name":"operator","type":"address"}],"name":"isApprovedForAll","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"forwarder","type":"address"}],"name":"isTrustedForwarder","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"id","type":"uint256"},{"internalType":"uint256","name":"amount","type":"uint256"},{"internalType":"bytes","name":"data","type":"bytes"}],"name":"mint","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256[]","name":"ids","type":"uint256[]"},{"internalType":"uint256[]","name":"amounts","type":"uint256[]"},{"internalType":"bytes","name":"data","type":"bytes"}],"name":"mintBatch","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bytes[]","name":"data","type":"bytes[]"}],"name":"multicall","outputs":[{"internalType":"bytes[]","name":"results","type":"bytes[]"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"nextTokenId","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"index_0","type":"address"},{"internalType":"address","name":"index_1","type":"address"},{"internalType":"uint256[]","name":"index_2","type":"uint256[]"},{"internalType":"uint256[]","name":"index_3","type":"uint256[]"},{"internalType":"bytes","name":"index_4","type":"bytes"}],"name":"onERC1155BatchReceived","outputs":[{"internalType":"bytes4","name":"","type":"bytes4"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"index_0","type":"address"},{"internalType":"address","name":"index_1","type":"address"},{"internalType":"uint256","name":"index_2","type":"uint256"},{"internalType":"uint256","name":"index_3","type":"uint256"},{"internalType":"bytes","name":"index_4","type":"bytes"}],"name":"onERC1155Received","outputs":[{"internalType":"bytes4","name":"","type":"bytes4"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"index_0","type":"address"},{"internalType":"address","name":"index_1","type":"address"},{"internalType":"uint256","name":"index_2","type":"uint256"},{"internalType":"bytes","name":"index_3","type":"bytes"}],"name":"onERC721Received","outputs":[{"internalType":"bytes4","name":"","type":"bytes4"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"pause","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"paused","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_nftId","type":"uint256"},{"internalType":"uint256","name":"_amount","type":"uint256"}],"name":"redeemERC20","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_nftId","type":"uint256"}],"name":"redeemERC721","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"account","type":"address"}],"name":"renounceRole","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"account","type":"address"}],"name":"revokeRole","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"royaltyBps","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index_0","type":"uint256"},{"internalType":"uint256","name":"salePrice","type":"uint256"}],"name":"royaltyInfo","outputs":[{"internalType":"address","name":"receiver","type":"address"},{"internalType":"uint256","name":"royaltyAmount","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"from","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256[]","name":"ids","type":"uint256[]"},{"internalType":"uint256[]","name":"amounts","type":"uint256[]"},{"internalType":"bytes","name":"data","type":"bytes"}],"name":"safeBatchTransferFrom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"from","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"id","type":"uint256"},{"internalType":"uint256","name":"amount","type":"uint256"},{"internalType":"bytes","name":"data","type":"bytes"}],"name":"safeTransferFrom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"operator","type":"address"},{"internalType":"bool","name":"approved","type":"bool"}],"name":"setApprovalForAll","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"string","name":"_URI","type":"string"}],"name":"setContractURI","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bool","name":"_restrictedTransfer","type":"bool"}],"name":"setRestrictedTransfer","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_royaltyBps","type":"uint256"}],"name":"setRoyaltyBps","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bytes4","name":"interfaceId","type":"bytes4"}],"name":"supportsInterface","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index_0","type":"uint256"}],"name":"tokenState","outputs":[{"internalType":"address","name":"creator","type":"address"},{"internalType":"string","name":"uri","type":"string"},{"internalType":"enum NFTCollection.UnderlyingType","name":"underlyingType","type":"uint8"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_nftId","type":"uint256"}],"name":"tokenURI","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"id","type":"uint256"}],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"transfersRestricted","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"unpause","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_nftId","type":"uint256"}],"name":"uri","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_tokenContract","type":"address"},{"internalType":"uint256","name":"_tokenAmount","type":"uint256"},{"internalType":"uint256","name":"_numOfNftsToMint","type":"uint256"},{"internalType":"string","name":"_nftURI","type":"string"}],"name":"wrapERC20","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"_nftContract","type":"address"},{"internalType":"uint256","name":"_tokenId","type":"uint256"},{"internalType":"string","name":"_nftURI","type":"string"}],"name":"wrapERC721","outputs":[],"stateMutability":"nonpayable","type":"function"}]' # noqa: E501 (line-too-long)
)
# pylint: disable=too-many-lines
| 37.684333
| 17,643
| 0.634865
| 192,613
| 0.992278
| 0
| 0
| 17,766
| 0.091524
| 0
| 0
| 56,492
| 0.291028
|
c45419a203ad566f8ae9d52cc297219542ecf9f1
| 237
|
py
|
Python
|
sausage_grinder/urls.py
|
jesseerdmann/audiobonsai
|
ec1edcdbadc6b2aff3b743b5c42515f4d5638830
|
[
"Apache-2.0"
] | null | null | null |
sausage_grinder/urls.py
|
jesseerdmann/audiobonsai
|
ec1edcdbadc6b2aff3b743b5c42515f4d5638830
|
[
"Apache-2.0"
] | null | null | null |
sausage_grinder/urls.py
|
jesseerdmann/audiobonsai
|
ec1edcdbadc6b2aff3b743b5c42515f4d5638830
|
[
"Apache-2.0"
] | null | null | null |
from django.urls import path
from . import views as sg
urlpatterns = [
path('artist', sg.artist),
path('genre', sg.genre),
path('release', sg.release),
path('track', sg.track),
path('', sg.sausage_grinder_index),
]
| 19.75
| 39
| 0.637131
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 33
| 0.139241
|
c45577ce768212873fbaadfacdbe638ce864abf9
| 1,194
|
py
|
Python
|
sails/ui/mmck/parameters/string.py
|
metrasynth/solar-sails
|
3a10774dad29d85834d3acb38171741b3a11ef91
|
[
"MIT"
] | 6
|
2016-11-22T14:32:55.000Z
|
2021-08-15T01:35:33.000Z
|
sails/ui/mmck/parameters/string.py
|
metrasynth/solar-sails
|
3a10774dad29d85834d3acb38171741b3a11ef91
|
[
"MIT"
] | 2
|
2022-03-18T16:47:43.000Z
|
2022-03-18T16:47:44.000Z
|
sails/ui/mmck/parameters/string.py
|
metrasynth/solar-sails
|
3a10774dad29d85834d3acb38171741b3a11ef91
|
[
"MIT"
] | 2
|
2019-07-09T23:44:08.000Z
|
2021-08-15T01:35:37.000Z
|
from PyQt5.QtCore import pyqtSlot
from PyQt5.QtWidgets import QComboBox
from PyQt5.QtWidgets import QLineEdit
from sf.mmck.parameters import String
from .manager import widget_class_for
from .widget import ParameterWidget
@widget_class_for(String)
class StringParameterWidget(ParameterWidget):
def setUp(self, ui):
super().setUp(ui)
if self.parameter.choices:
self.combobox = QComboBox(self)
self.combobox.setEditable(True)
self.combobox.setCurrentText(self.value)
self.combobox.insertItems(0, self.parameter.choices)
self.combobox.currentTextChanged.connect(self.on_combobox_currentTextChanged)
self.layout.addWidget(self.combobox)
else:
self.lineedit = QLineEdit(self)
self.lineedit.setText(self.value)
self.lineedit.textChanged.connect(self.on_lineedit_textChanged)
self.layout.addWidget(self.lineedit)
@pyqtSlot(str)
def on_combobox_currentTextChanged(self, value):
self.valueChanged.emit(value, self.name)
@pyqtSlot(str)
def on_lineedit_textChanged(self, value):
self.valueChanged.emit(value, self.name)
| 35.117647
| 89
| 0.707705
| 943
| 0.789782
| 0
| 0
| 969
| 0.811558
| 0
| 0
| 0
| 0
|
c455ef3791cf634263613f0736425fbda6d62c4c
| 550
|
py
|
Python
|
plot_top_performers.py
|
jmphil09/mario_rl
|
6e93c1318e9957d679a5ec8d29687756ac7fc4b1
|
[
"MIT"
] | null | null | null |
plot_top_performers.py
|
jmphil09/mario_rl
|
6e93c1318e9957d679a5ec8d29687756ac7fc4b1
|
[
"MIT"
] | null | null | null |
plot_top_performers.py
|
jmphil09/mario_rl
|
6e93c1318e9957d679a5ec8d29687756ac7fc4b1
|
[
"MIT"
] | null | null | null |
from FitnessPlot import FitnessPlot
'''
for n in range(1,6):
plot = FitnessPlot(folder_prefix='data_top{}'.format(n))
plot.plot_all_workers()
plot.plot_workers_as_average()
'''
plot = FitnessPlot(folder_prefix='data_top1', num_workers=16)
worker_dict = plot.create_worker_dict()
#plot.plot_all_workers()
#plot.plot_workers_as_average()
#print(worker_dict)
for key,value in worker_dict.items():
dict_len = len(value)
#if dict_len < 100:
# print(key)
# print(dict_len)
print(key)
print(value[len(value)-1])
| 23.913043
| 61
| 0.703636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 291
| 0.529091
|
c456582b3a99628d95abc79fe354227dac957e3b
| 3,696
|
py
|
Python
|
test/PySrc/tests/test_code_tracer_width.py
|
lifubang/live-py-plugin
|
38a3cf447fd7d9c4e6014b71134e178b0d8a01de
|
[
"MIT"
] | 224
|
2015-03-22T23:40:52.000Z
|
2022-03-01T21:45:51.000Z
|
test/PySrc/tests/test_code_tracer_width.py
|
lifubang/live-py-plugin
|
38a3cf447fd7d9c4e6014b71134e178b0d8a01de
|
[
"MIT"
] | 371
|
2015-04-28T05:14:00.000Z
|
2022-03-28T01:31:22.000Z
|
test/PySrc/tests/test_code_tracer_width.py
|
lifubang/live-py-plugin
|
38a3cf447fd7d9c4e6014b71134e178b0d8a01de
|
[
"MIT"
] | 53
|
2015-10-30T07:52:07.000Z
|
2022-02-28T12:56:35.000Z
|
from space_tracer.main import replace_input, TraceRunner
def test_source_width_positive():
code = """\
i = 1 + 1
"""
expected_report = """\
i = 1 + | i = 2"""
with replace_input(code):
report = TraceRunner().trace_command(['space_tracer',
'--source_width', '8',
'--traced_file', 'foo.py'])
assert report == expected_report
def test_source_width_negative():
code = """\
i = 1 + 1
"""
expected_report = """\
i = 1 + | i = 2"""
with replace_input(code):
report = TraceRunner().trace_command(['space_tracer',
'--source_width', '-2',
'--traced_file', 'foo.py'])
assert report == expected_report
def test_source_indent():
code = """\
i = 1 + 1
"""
expected_report = """\
i = 1 + 1 | i = 2"""
with replace_input(code):
report = TraceRunner().trace_command(['space_tracer',
'--source_indent', '4',
'--traced_file', 'foo.py'])
assert report == expected_report
def test_source_indent_small():
code = """\
i = 1 + 1
"""
expected_report = """\
i = 1 + 1 | i = 2"""
with replace_input(code):
report = TraceRunner().trace_command(['space_tracer',
'--source_indent', '2',
'--traced_file', 'foo.py'])
assert report == expected_report
def test_source_indent_negative():
code = """\
i = 1 + 1
"""
expected_report = """\
= 1 + 1 | i = 2"""
with replace_input(code):
report = TraceRunner().trace_command(['space_tracer',
'--source_indent', '-2',
'--traced_file', 'foo.py'])
assert report == expected_report
def test_trace_width():
code = """\
i = 1 + 1
"""
expected_report = """\
i = 1 + 1 | i ="""
with replace_input(code):
report = TraceRunner().trace_command(['space_tracer',
'--trace_width', '15',
'--traced_file', 'foo.py'])
assert report == expected_report
def test_trace_width_negative():
code = """\
i = 1 + 1
s = 'a' * 10
"""
expected_report = """\
i = 1 + 1 | i = 2
s = 'a' * 10 | s = 'aaaaaa"""
with replace_input(code):
report = TraceRunner().trace_command(['space_tracer',
'--trace_width', '-5',
'--traced_file', 'foo.py'])
assert report == expected_report
def test_trace_width_without_source():
code = """\
i = 1 + 1
s = 'a' * 10
"""
expected_report = """\
i = 2
s = 'aaaaaa"""
with replace_input(code):
report = TraceRunner().trace_command(['space_tracer',
'--source_width', '0',
'--trace_width', '-5',
'--traced_file', 'foo.py'])
assert report == expected_report
def test_trace_offset():
code = """\
i = 1 + 1
s = 'a' * 10
"""
expected_report = """\
i = 1 + 1 | 2
s = 'a' * 10 | 'aaaaaaaaaa'"""
with replace_input(code):
report = TraceRunner().trace_command(['space_tracer',
'--trace_offset', '3',
'--traced_file', 'foo.py'])
assert report == expected_report
| 25.666667
| 73
| 0.446699
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,012
| 0.27381
|
c45760cde68ead756aaeedf9a4958bde55f0fdc2
| 458
|
py
|
Python
|
benchmark/src/benchmark/bench_logging.py
|
lwanfuturewei/QFlock
|
90d6875d9adc8fe2968694904f8421d41e30e189
|
[
"Apache-2.0"
] | null | null | null |
benchmark/src/benchmark/bench_logging.py
|
lwanfuturewei/QFlock
|
90d6875d9adc8fe2968694904f8421d41e30e189
|
[
"Apache-2.0"
] | null | null | null |
benchmark/src/benchmark/bench_logging.py
|
lwanfuturewei/QFlock
|
90d6875d9adc8fe2968694904f8421d41e30e189
|
[
"Apache-2.0"
] | 2
|
2022-03-03T15:28:23.000Z
|
2022-03-04T15:33:19.000Z
|
import logging
def setup_logger():
formatter = logging.Formatter('%(asctime)s.%(msecs)03d %(levelname)s %(message)s',
'%Y-%m-%d %H:%M:%S')
logging.basicConfig(level=logging.INFO,
format='%(asctime)s.%(msecs)03d %(levelname)-8s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
root = logging.getLogger()
hdlr = root.handlers[0]
hdlr.setFormatter(formatter)
| 26.941176
| 86
| 0.530568
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 142
| 0.310044
|
c45814f676d4f4897bba48b176daa7d8a452554e
| 6,921
|
py
|
Python
|
tools/configure-gateway/threescale/proxies.py
|
jparsai/f8a-3scale-connect-api
|
a782753d662eee5d450da3c20e9ae9eb13b8b560
|
[
"Apache-2.0"
] | 1
|
2018-09-14T05:18:52.000Z
|
2018-09-14T05:18:52.000Z
|
tools/configure-gateway/threescale/proxies.py
|
jparsai/f8a-3scale-connect-api
|
a782753d662eee5d450da3c20e9ae9eb13b8b560
|
[
"Apache-2.0"
] | 48
|
2017-12-05T12:05:56.000Z
|
2021-03-25T22:09:29.000Z
|
tools/configure-gateway/threescale/proxies.py
|
jparsai/f8a-3scale-connect-api
|
a782753d662eee5d450da3c20e9ae9eb13b8b560
|
[
"Apache-2.0"
] | 5
|
2018-01-29T04:53:13.000Z
|
2020-04-16T13:59:42.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""ThreeScale Proxies Rule interface for APIs."""
from .base import ThreeScale
import logging
import requests
import xmltodict
import json
logger = logging.getLogger(__name__)
class Proxies(ThreeScale):
"""ThreeScale Proxies create, update."""
response = None
def __init__(self):
"""Initialize object."""
super().__init__()
self.service_id = None
def update(self,
tracker,
service_id,
api_backend,
credentials_location='query',
auth_app_key='user_key',
endpoint=None,
auth_app_id=None,
auth_user_key=None,
error_auth_failed=None,
error_status_auth_failed=None,
error_headers_auth_failed=None,
error_auth_missing=None,
error_status_auth_missing=None,
error_headers_auth_missing=None,
error_no_match=None,
error_status_no_match=None,
error_headers_no_match=None,
oidc_issuer_endpoint=None,
sandbox_endpoint=None
):
"""Update policy."""
self.service_id = service_id
request_body = {
'access_token': self._access_token,
"api_backend": api_backend,
"credentials_location": credentials_location,
"auth_app_key": auth_app_key,
"endpoint": endpoint,
"auth_app_id": auth_app_id,
"auth_user_key": auth_user_key,
"error_auth_failed": error_auth_failed,
"error_status_auth_failed": error_status_auth_failed,
"error_headers_auth_failed": error_headers_auth_failed,
"error_auth_missing": error_auth_missing,
"error_status_auth_missing": error_status_auth_missing,
"error_headers_auth_missing": error_headers_auth_missing,
"error_no_match": error_no_match,
"error_status_no_match": error_status_no_match,
"error_headers_no_match": error_headers_no_match,
"oidc_issuer_endpoint": oidc_issuer_endpoint,
"sandbox_endpoint": sandbox_endpoint,
}
request_body = {k: v for k, v in request_body.items() if v}
_url = self._build_url(
self._endpoints.proxy_update.format(service_id=service_id))
_resp = requests.patch(_url, data=request_body)
logger.info("[PATCH] {} with STATUS CODE: {}".format(
_url, _resp.status_code))
if _resp.ok:
self.response = xmltodict.parse(
_resp.content, dict_constructor=dict)
logger.info(
"Successfully Updated Proxy: {}".format(api_backend))
return self.response
else:
logger.error("Update Proxy FAILED {} with STATUS CODE {}".format(
_url, _resp.status_code))
logger.error("FAILED RESPONSE: {}".format(_resp.content))
tracker._rollback()
def _get_highest_version(self, service_id=None, environment='sandbox'):
service_id = service_id or self.service_id
params = {
'access_token': self._access_token,
}
_url = self._build_url(
self._endpoints.proxy_config_list.format(service_id=service_id,
environment=environment))
_resp = requests.get(_url, params=params)
logger.info("[GET] {} with STATUS CODE: {}".format(
_url, _resp.status_code))
if _resp.ok:
output = _resp.json()
if output:
higest_version = max([conf.get('proxy_config', {}).get('version', 2)
for conf in output.get('proxy_configs', {})])
logger.info("HIGHEST Version: {}".format(higest_version))
return higest_version
else:
logger.error("Unable to fetch the latest version.")
return 2
def policy_update(self, tracker, headers, service_id=None):
"""Update the Proxy Policy Configuration."""
policies_config = [{
"name": "headers",
"configuration": {
"response": [],
"request":headers},
"version": "builtin",
"enabled": True
}]
service_id = service_id or self.service_id
request_body = {
'access_token': self._access_token,
'service_id': service_id,
'policies_config': json.dumps(policies_config)
}
_url = self._build_url(
self._endpoints.proxy_policy_update.format(service_id=service_id))
_resp = requests.put(_url, data=request_body)
logger.info("[PUT] {} with STATUS CODE: {}".format(
_url, _resp.status_code))
if _resp.ok:
self.response = _resp
logger.info("Successfully Updated Proxy Policy Config")
return self.response
else:
logger.error("Update Proxy Policy Config FAILED {} with STATUS CODE {}".format(
_url, _resp.status_code))
logger.error("FAILED RESPONSE: {}".format(_resp.content))
tracker._rollback()
def proxy_promote(self, tracker,
service_id=None,
environment='sandbox',
to='production'):
"""Promote Proxy to another environment."""
service_id = service_id or self.service_id
version = self._get_highest_version()
request_body = {
'access_token': self._access_token,
'to': to
}
_url = self._build_url(
self._endpoints.proxy_config_promote.format(service_id=service_id,
environment=environment,
version=version))
_resp = requests.post(_url, data=request_body)
logger.info("[POST] {} with STATUS CODE: {}".format(
_url, _resp.status_code))
if _resp.ok:
self.response = _resp
logger.info("Successfully Promoted Proxy to {}".format(to))
return self.response
else:
logger.error("Promote Proxy FAILED {} with STATUS CODE {}".format(
_url, _resp.status_code))
logger.error("FAILED RESPONSE: {}".format(_resp.content))
tracker._rollback()
def find(self):
"""Find the Mapping."""
raise NotImplementedError("Method find Not Implemented.")
def __repr__(self):
"""Representation of class."""
api_backend = self.response.get('proxy', {}).get('api_backend')
return "Class Mappings(id={})".format(api_backend)
| 38.237569
| 91
| 0.566681
| 6,692
| 0.966912
| 0
| 0
| 0
| 0
| 0
| 0
| 1,482
| 0.214131
|
c458cb4e772b1e30729560fd59117cb1dab40b05
| 241
|
py
|
Python
|
src/__main__.py
|
Grox2006/Kayambot
|
a49cf7fd16fdc049500ae645784cc671b04edf87
|
[
"MIT"
] | null | null | null |
src/__main__.py
|
Grox2006/Kayambot
|
a49cf7fd16fdc049500ae645784cc671b04edf87
|
[
"MIT"
] | null | null | null |
src/__main__.py
|
Grox2006/Kayambot
|
a49cf7fd16fdc049500ae645784cc671b04edf87
|
[
"MIT"
] | null | null | null |
import sys
from __init__ import Bot
MESSAGE_USAGE = "Usage is python %s [name] [token]"
if __name__ == "__main__":
if len(sys.argv) == 3:
Bot(sys.argv[1], sys.argv[2])
else:
print(MESSAGE_USAGE.format(sys.argv[0]))
| 21.909091
| 51
| 0.630705
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 45
| 0.186722
|
c45a35a45e18477dcb0c3a971fc4e41ecd533922
| 985
|
py
|
Python
|
app/__init__.py
|
logicalicy/flask-react-boilerplate
|
2a999c969a7fc7d244830ebba02a00f0feca79dd
|
[
"MIT"
] | 2
|
2017-02-27T16:48:08.000Z
|
2019-05-10T11:22:07.000Z
|
app/__init__.py
|
logicalicy/flask-react-boilerplate
|
2a999c969a7fc7d244830ebba02a00f0feca79dd
|
[
"MIT"
] | null | null | null |
app/__init__.py
|
logicalicy/flask-react-boilerplate
|
2a999c969a7fc7d244830ebba02a00f0feca79dd
|
[
"MIT"
] | null | null | null |
# Created with tutorials:
# https://www.digitalocean.com/community/tutorials/how-to-structure-large-flask-applications
# http://flask.pocoo.org/docs/0.12/tutorial
from flask import Flask, g, render_template
from flask_sqlalchemy import SQLAlchemy
import sqlite3
# Define WSGI application object.
app = Flask(__name__)
# Configurations
app.config.from_object('config')
app.config.from_envvar('CONFIG', silent=True)
# Define database object.
db = SQLAlchemy(app)
@app.errorhandler(404)
def not_found(error):
return render_template('404.html'), 404
# Import a module / component using its blueprint handler variable (mod_auth)
from app.api.entries.controllers import mod as entries_module
from app.site.controllers import mod as site_module
# Register blueprint(s)
app.register_blueprint(entries_module)
app.register_blueprint(site_module)
# app.register_blueprint(xyz_module)
# ..
# Build the database:
# This will create the database file using SQLAlchemy
db.create_all()
| 25.921053
| 92
| 0.792893
| 0
| 0
| 0
| 0
| 88
| 0.08934
| 0
| 0
| 474
| 0.481218
|
c45c0b6aabc6d08c2689d66882739d5b4c1b5f06
| 19,075
|
py
|
Python
|
dumpcode/cpiter.py
|
gkfthddk/keras
|
46d96c65d69c39df298800336bbb4d867a2561fb
|
[
"MIT"
] | null | null | null |
dumpcode/cpiter.py
|
gkfthddk/keras
|
46d96c65d69c39df298800336bbb4d867a2561fb
|
[
"MIT"
] | null | null | null |
dumpcode/cpiter.py
|
gkfthddk/keras
|
46d96c65d69c39df298800336bbb4d867a2561fb
|
[
"MIT"
] | null | null | null |
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import subprocess
import numpy as np
import datetime
import random
import warnings
import ROOT as rt
import math
from keras.preprocessing.sequence import pad_sequences
from keras.callbacks import Callback
from array import array
from sklearn.metrics import roc_auc_score, auc, roc_curve
class AddVal(Callback):
def __init__(self,valid_sets,savename):
self.valid_sets = valid_sets
self.epoch=[]
self.history={}
self.savename=savename
def on_train_begin(self,logs=None):
self.epoch=[]
self.history={}
def on_epoch_end(self, epoch, logs=None):
logs=logs or {}
self.epoch.append(epoch)
print("validation")
for i,j in logs.items():
self.history.setdefault(i,[]).append(j)
for valid_set in self.valid_sets:
valid,val_name=valid_set
#valid.reset()
#gen=valid.next()
#tar_set=[]
#pre_set=[]
atar_set=[]
apre_set=[]
X,Y=valid
#X=X[0]
"""for j in range(valid.totalnum()):
data,target=next(gen)
#print(target)
#tar_set=np.append(tar_set,target[:,0])
#pre_set=np.append(pre_set,self.model.predict(data,verbose=0)[:,0])
try:atar_set.extend(target[:,0])
except:print(np.array(target).shape)
apre_set.extend(self.model.predict(data,verbose=0)[:,0])
valid.reset()"""
#tar_set=np.array(tar_set)
#pre_set=np.array(pre_set)
atar_set=np.array(Y)[:,0]
apre_set=np.array(self.model.predict(X,verbose=0)[:,0])
#print(valid.totalnum(),valid.batch_size)
#print("############")
#print(tar_set)
#print("AAAAAAAAAAAAAAAAAAAA")
#print(atar_set)
auc_val=roc_auc_score(atar_set,apre_set)
results=self.model.evaluate(X,Y)
print(results,auc_val)
self.history.setdefault(val_name+"_auc",[]).append(auc_val)
for i,result in enumerate(results):
if(i==0):
name=val_name+"_loss"
else:
name=val_name+"_"+self.model.metrics[i-1][:3]
self.history.setdefault(name,[]).append(result)
f=open(self.savename+'/history','w')
f.write(str(self.history))
f.close()
class wkiter(object):
def __init__(self,data_path,data_names=['data'],label_names=['softmax_label'],batch_size=100,begin=0.0,end=1.0,rat=0.7,endcut=1,arnum=16,maxx=0.4,maxy=0.4,istrain=0, varbs=0,rc="rc",onehot=0,channel=64,order=1,eta=0.,etabin=2.4,pt=None,ptmin=0.,ptmax=2.,unscale=0):
self.eta=eta
self.pt=pt
self.ptmin=ptmin
self.ptmax=ptmax
self.etabin=etabin
self.channel=channel
self.istrain=istrain
self.unscale=unscale
#if(batch_size<100):
self.rand=0.5
# print("batch_size is small it might cause error")
self.count=0
self.rc=rc
self.onehot=onehot
self.order=1
#self.file=rt.TFile(data_path,'read')
dataname1=data_path[0]
dataname2=data_path[1]
self.qfile=rt.TFile(dataname1,'read')
self.gfile=rt.TFile(dataname2,'read')
print(dataname2)
self.gjet=self.gfile.Get("jetAnalyser")
self.gEntries=self.gjet.GetEntriesFast()
if(begin>1):
self.gBegin=int(begin)
else:
self.gBegin=int(begin*self.gEntries)
if(end>1):
self.gEnd=int(end)
else:
self.gEnd=int(self.gEntries*end)
self.a=self.gBegin
self.qjet=self.qfile.Get("jetAnalyser")
self.qEntries=self.qjet.GetEntriesFast()
if(begin>1):
self.qBegin=int(begin)
else:
self.qBegin=int(begin*self.qEntries)
if(end>1):
self.qEnd=int(end)
else:
self.qEnd=int(self.qEntries*end)
self.b=self.qBegin
self.ratt=rat
self.rat=sorted([1-rat,rat])
self.batch_size = batch_size
if(varbs==0):
self._provide_data = zip(data_names, [(self.batch_size, 3, 33, 33)])
else:
data_names=['images','variables']
self._provide_data = zip(data_names, [(self.batch_size, 3, 33, 33),(self.batch_size,5)])
self.varbs=varbs
self._provide_label = zip(label_names, [(self.batch_size,)])
self.arnum=arnum
self.maxx=maxx
self.maxy=maxy
self.endfile=0
self.endcut=endcut
qjetset=[]
gjetset=[]
qrnnset=[]
grnnset=[]
qptset=[]
gptset=[]
qetaset=[]
getaset=[]
qchadmultset=[]
gchadmultset=[]
qnhadmultset=[]
gnhadmultset=[]
qelectronmultset=[]
gelectronmultset=[]
qmuonmultset=[]
gmuonmultset=[]
qphotonmultset=[]
gphotonmultset=[]
qcmultset=[]
gcmultset=[]
qnmultset=[]
gnmultset=[]
qptdset=[]
gptdset=[]
qmajorset=[]
gmajorset=[]
qminorset=[]
gminorset=[]
for i in range(self.gEntries):
if(self.a>=self.gEnd):
self.a=self.gBegin
break
#if((self.a-self.gBegin)%int((self.gEnd-self.gBegin)/10)==0):print('.')
self.gjet.GetEntry(self.a)
##label q=1 g=0
self.a+=1
if(self.eta>abs(self.gjet.eta) or self.eta+self.etabin<abs(self.gjet.eta)):
continue
if(self.pt!=None):
if(self.pt*self.ptmin>self.gjet.pt or self.pt*self.ptmax<self.gjet.pt):
continue
gptset.append(self.gjet.pt)
getaset.append(self.gjet.eta)
gchadmultset.append(self.gjet.chad_mult)
gnhadmultset.append(self.gjet.nhad_mult)
gelectronmultset.append(self.gjet.electron_mult)
gmuonmultset.append(self.gjet.muon_mult)
gphotonmultset.append(self.gjet.photon_mult)
gcmultset.append(self.gjet.chad_mult+self.gjet.electron_mult+self.gjet.muon_mult)
gnmultset.append(self.gjet.nhad_mult+self.gjet.photon_mult)
gptdset.append(self.gjet.ptd)
gmajorset.append(self.gjet.major_axis)
gminorset.append(self.gjet.minor_axis)
if("c" in self.rc):
maxchadpt=1.*max(self.gjet.image_chad_pt_33)
maxnhadpt=1.*max(self.gjet.image_nhad_pt_33)
maxelecpt=1.*max(self.gjet.image_electron_pt_33)
maxmuonpt=1.*max(self.gjet.image_muon_pt_33)
maxphotonpt=1.*max(self.gjet.image_photon_pt_33)
maxchadmult=1.*max(self.gjet.image_chad_mult_33)
maxnhadmult=1.*max(self.gjet.image_nhad_mult_33)
maxelecmult=1.*max(self.gjet.image_electron_mult_33)
maxmuonmult=1.*max(self.gjet.image_muon_mult_33)
maxphotonmult=1.*max(self.gjet.image_photon_mult_33)
if(self.unscale==1 or maxchadpt==0):maxchadpt=1.
if(self.unscale==1 or maxnhadpt==0):maxnhadpt=1.
if(self.unscale==1 or maxelecpt==0):maxelecpt=1.
if(self.unscale==1 or maxmuonpt==0):maxmuonpt=1.
if(self.unscale==1 or maxphotonpt==0):maxphotonpt=1.
if(self.unscale==1 or maxchadmult==0):maxchadmult=1.
if(self.unscale==1 or maxnhadmult==0):maxnhadmult=1.
if(self.unscale==1 or maxelecmult==0):maxelecmult=1.
if(self.unscale==1 or maxmuonmult==0):maxmuonmult=1.
if(self.unscale==1 or maxphotonmult==0):maxphotonmult=1.
gjetset.append([(np.array(self.gjet.image_chad_pt_33)/maxchadpt).reshape(2*arnum+1,2*arnum+1),(np.array(self.gjet.image_nhad_pt_33)/maxnhadpt).reshape(2*arnum+1,2*arnum+1),(np.array(self.gjet.image_electron_pt_33)/maxelecpt).reshape(2*arnum+1,2*arnum+1),(np.array(self.gjet.image_muon_pt_33)/maxmuonpt).reshape(2*arnum+1,2*arnum+1),(np.array(self.gjet.image_photon_pt_33)/maxphotonpt).reshape(2*arnum+1,2*arnum+1),(np.array(self.gjet.image_chad_mult_33)/maxchadmult).reshape(2*arnum+1,2*arnum+1),(np.array(self.gjet.image_nhad_mult_33)/maxnhadmult).reshape(2*arnum+1,2*arnum+1),(np.array(self.gjet.image_electron_mult_33)/maxelecmult).reshape(2*arnum+1,2*arnum+1),(np.array(self.gjet.image_muon_mult_33)/maxmuonmult).reshape(2*arnum+1,2*arnum+1),(np.array(self.gjet.image_photon_mult_33)/maxphotonmult).reshape(2*arnum+1,2*arnum+1)])
if("r" in self.rc):
dau_pt=self.gjet.dau_pt
dau_deta=self.gjet.dau_deta
dau_dphi=self.gjet.dau_dphi
dau_charge=self.gjet.dau_charge
dau_pid=self.gjet.dau_pid
dau_is_e=np.zeros(len(dau_pid))
dau_is_mu=np.zeros(len(dau_pid))
dau_is_r=np.zeros(len(dau_pid))
dau_is_chad=np.zeros(len(dau_pid))
dau_is_nhad=np.zeros(len(dau_pid))
for t in range(len(dau_pid)):
if(abs(dau_pid[t])==11):dau_is_e[t]=1.
elif(abs(dau_pid[t])==13):dau_is_mu[t]=1.
elif(abs(dau_pid[t])==22):dau_is_r[t]=1.
elif(dau_charge[t]==0):dau_is_nhad[t]=1.
else:dau_is_chad[t]=1.
dausort=sorted(range(len(dau_pt)),key=lambda k: dau_pt[k],reverse=True)
if(self.order):
maxdaupt=1.*max(dau_pt)
maxdaudeta=1.*max(dau_deta)
maxdaudphi=1.*max(dau_dphi)
maxdaucharge=1.*max(dau_charge)
maxdauc=1.*max(dau_is_chad)
maxdaun=1.*max(dau_is_nhad)
maxdaue=1.*max(dau_is_e)
maxdaum=1.*max(dau_is_mu)
maxdaup=1.*max(dau_is_r)
if(self.unscale==1 or maxdaupt==0):maxdaupt=1.
if(self.unscale==1 or maxdaudeta==0):maxdaudeta=1.
if(self.unscale==1 or maxdaudphi==0):maxdaudphi=1.
if(self.unscale==1 or maxdaucharge==0):maxdaucharge=1.
if(self.unscale==1 or maxdauc==0):maxdauc=1.
if(self.unscale==1 or maxdaun==0):maxdaun=1.
if(self.unscale==1 or maxdaue==0):maxdaue=1.
if(self.unscale==1 or maxdaum==0):maxdaum=1.
if(self.unscale==1 or maxdaup==0):maxdaup=1.
grnnset.append([[dau_pt[dausort[i]]/maxdaupt, dau_deta[dausort[i]]/maxdaudeta, dau_dphi[dausort[i]]/maxdaudphi, dau_charge[dausort[i]]/maxdaucharge, dau_is_e[dausort[i]]/maxdaue, dau_is_mu[dausort[i]]/maxdaum, dau_is_r[dausort[i]]/maxdaup, dau_is_chad[dausort[i]]/maxdauc, dau_is_nhad[dausort[i]]/maxdaun] if len(dau_pt)>i else [0.,0.,0.,0.,0.,0.,0.,0.,0.] for i in range(self.channel)])
self.gjetset=np.array(gjetset)
del gjetset
self.grnnset=np.array(grnnset)
del grnnset
self.gptset=np.array(gptset)
del gptset
self.getaset=np.array(getaset)
del getaset
self.gptdset=np.array(gptdset)
del gptdset
self.gchadmultset=np.array(gchadmultset)
del gchadmultset
self.gnhadmultset=np.array(gnhadmultset)
del gnhadmultset
self.gcmultset=np.array(gcmultset)
del gcmultset
self.gnmultset=np.array(gnmultset)
del gnmultset
self.gelectronmultset=np.array(gelectronmultset)
del gelectronmultset
self.gmuonmultset=np.array(gmuonmultset)
del gmuonmultset
self.gphotonmultset=np.array(gphotonmultset)
del gphotonmultset
self.gmajorset=np.array(gmajorset)
del gmajorset
self.gminorset=np.array(gminorset)
del gminorset
for i in range(self.qEntries):
if(self.b>=self.qEnd):
self.b=self.qBegin
break
#if((self.b-self.qBegin)%int((self.qEnd-self.qBegin)/10)==0):print(',')
self.qjet.GetEntry(self.b)
##label q=1 g=0
self.b+=1
if(self.eta>abs(self.qjet.eta) or self.eta+self.etabin<abs(self.qjet.eta)):
continue
if(self.pt!=None):
if(self.pt*self.ptmin>self.qjet.pt or self.pt*self.ptmax<self.qjet.pt):
continue
qptset.append(self.qjet.pt)
qetaset.append(self.qjet.eta)
qchadmultset.append(self.qjet.chad_mult)
qnhadmultset.append(self.qjet.nhad_mult)
qelectronmultset.append(self.qjet.electron_mult)
qmuonmultset.append(self.qjet.muon_mult)
qphotonmultset.append(self.qjet.photon_mult)
qcmultset.append(self.qjet.chad_mult+self.qjet.electron_mult+self.qjet.muon_mult)
qnmultset.append(self.qjet.nhad_mult+self.qjet.photon_mult)
qptdset.append(self.qjet.ptd)
qmajorset.append(self.qjet.major_axis)
qminorset.append(self.qjet.minor_axis)
if("c" in self.rc):
maxchadpt=1.*max(self.qjet.image_chad_pt_33)
maxnhadpt=1.*max(self.qjet.image_nhad_pt_33)
maxelecpt=1.*max(self.qjet.image_electron_pt_33)
maxmuonpt=1.*max(self.qjet.image_muon_pt_33)
maxphotonpt=1.*max(self.qjet.image_photon_pt_33)
maxchadmult=1.*max(self.qjet.image_chad_mult_33)
maxnhadmult=1.*max(self.qjet.image_nhad_mult_33)
maxelecmult=1.*max(self.qjet.image_electron_mult_33)
maxmuonmult=1.*max(self.qjet.image_muon_mult_33)
maxphotonmult=1.*max(self.qjet.image_photon_mult_33)
if(self.unscale==1 or maxchadpt==0):maxchadpt=1.
if(self.unscale==1 or maxnhadpt==0):maxnhadpt=1.
if(self.unscale==1 or maxelecpt==0):maxelecpt=1.
if(self.unscale==1 or maxmuonpt==0):maxmuonpt=1.
if(self.unscale==1 or maxphotonpt==0):maxphotonpt=1.
if(self.unscale==1 or maxchadmult==0):maxchadmult=1.
if(self.unscale==1 or maxnhadmult==0):maxnhadmult=1.
if(self.unscale==1 or maxelecmult==0):maxelecmult=1.
if(self.unscale==1 or maxmuonmult==0):maxmuonmult=1.
if(self.unscale==1 or maxphotonmult==0):maxphotonmult=1.
qjetset.append([(np.array(self.qjet.image_chad_pt_33)/maxchadpt).reshape(2*arnum+1,2*arnum+1),(np.array(self.qjet.image_nhad_pt_33)/maxnhadpt).reshape(2*arnum+1,2*arnum+1),(np.array(self.qjet.image_electron_pt_33)/maxelecpt).reshape(2*arnum+1,2*arnum+1),(np.array(self.qjet.image_muon_pt_33)/maxmuonpt).reshape(2*arnum+1,2*arnum+1),(np.array(self.qjet.image_photon_pt_33)/maxphotonpt).reshape(2*arnum+1,2*arnum+1),(np.array(self.qjet.image_chad_mult_33)/maxchadmult).reshape(2*arnum+1,2*arnum+1),(np.array(self.qjet.image_nhad_mult_33)/maxnhadmult).reshape(2*arnum+1,2*arnum+1),(np.array(self.qjet.image_electron_mult_33)/maxelecmult).reshape(2*arnum+1,2*arnum+1),(np.array(self.qjet.image_muon_mult_33)/maxmuonmult).reshape(2*arnum+1,2*arnum+1),(np.array(self.qjet.image_photon_mult_33)/maxphotonmult).reshape(2*arnum+1,2*arnum+1)])
if("r" in self.rc):
dau_pt=self.qjet.dau_pt
dau_deta=self.qjet.dau_deta
dau_dphi=self.qjet.dau_dphi
dau_charge=self.qjet.dau_charge
dau_pid=self.qjet.dau_pid
dau_is_e=np.zeros(len(dau_pid))
dau_is_mu=np.zeros(len(dau_pid))
dau_is_r=np.zeros(len(dau_pid))
dau_is_chad=np.zeros(len(dau_pid))
dau_is_nhad=np.zeros(len(dau_pid))
for t in range(len(dau_pid)):
if(abs(dau_pid[t])==11):dau_is_e[t]=1.
elif(abs(dau_pid[t])==13):dau_is_mu[t]=1.
elif(abs(dau_pid[t])==22):dau_is_r[t]=1.
elif(dau_charge[t]==0):dau_is_nhad[t]=1.
else:dau_is_chad[t]=1.
dausort=sorted(range(len(dau_pt)),key=lambda k: dau_pt[k],reverse=True)
#dauset.append([[dau_pt[dausort[i]], dau_deta[dausort[i]], dau_dphi[dausort[i]], dau_charge[dausort[i]]] if len(dau_pt)>i else [0.,0.,0.,0.] for i in range(20)])
if(self.order):
maxdaupt=1.*max(dau_pt)
maxdaudeta=1.*max(dau_deta)
maxdaudphi=1.*max(dau_dphi)
maxdaucharge=1.*max(dau_charge)
maxdauc=1.*max(dau_is_chad)
maxdaun=1.*max(dau_is_nhad)
maxdaue=1.*max(dau_is_e)
maxdaum=1.*max(dau_is_mu)
maxdaup=1.*max(dau_is_r)
if(self.unscale==1 or maxdaupt==0):maxdaupt=1.
if(self.unscale==1 or maxdaudeta==0):maxdaudeta=1.
if(self.unscale==1 or maxdaudphi==0):maxdaudphi=1.
if(self.unscale==1 or maxdaucharge==0):maxdaucharge=1.
if(self.unscale==1 or maxdauc==0):maxdauc=1.
if(self.unscale==1 or maxdaun==0):maxdaun=1.
if(self.unscale==1 or maxdaue==0):maxdaue=1.
if(self.unscale==1 or maxdaum==0):maxdaum=1.
if(self.unscale==1 or maxdaup==0):maxdaup=1.
qrnnset.append([[dau_pt[dausort[i]]/maxdaupt, dau_deta[dausort[i]]/maxdaudeta, dau_dphi[dausort[i]]/maxdaudphi, dau_charge[dausort[i]]/maxdaucharge, dau_is_e[dausort[i]]/maxdaue, dau_is_mu[dausort[i]]/maxdaum, dau_is_r[dausort[i]]/maxdaup, dau_is_chad[dausort[i]]/maxdauc, dau_is_nhad[dausort[i]]/maxdaun] if len(dau_pt)>i else [0.,0.,0.,0.,0.,0.,0.,0.,0.] for i in range(self.channel)])
self.qjetset=np.array(qjetset)
del qjetset
self.qrnnset=np.array(qrnnset)
del qrnnset
self.qptset=np.array(qptset)
del qptset
self.qetaset=np.array(qetaset)
del qetaset
self.qptdset=np.array(qptdset)
del qptdset
self.qchadmultset=np.array(qchadmultset)
del qchadmultset
self.qnhadmultset=np.array(qnhadmultset)
del qnhadmultset
self.qcmultset=np.array(qcmultset)
del qcmultset
self.qnmultset=np.array(qnmultset)
del qnmultset
self.qelectronmultset=np.array(qelectronmultset)
del qelectronmultset
self.qmuonmultset=np.array(qmuonmultset)
del qmuonmultset
self.qphotonmultset=np.array(qphotonmultset)
del qphotonmultset
self.qmajorset=np.array(qmajorset)
del qmajorset
self.qminorset=np.array(qminorset)
del qminorset
"""if("r" in self.rc):
for c in range(channel):
for i in range(3):
#std=np.std(abs(np.append(self.qjetset[:,c,i],self.gjetset[:,c,i])))
#mean=np.mean(abs(np.append(self.qjetset[:,c,i],self.gjetset[:,c,i])))
self.qjetset[:,c,i]=(self.qjetset[:,c,i])#/mean
self.gjetset[:,c,i]=(self.gjetset[:,c,i])#/mean
"""
self.reset()
#print("length ",len(self.gjetset),len(self.qjetset))
def __iter__(self):
return self
def reset(self):
self.rand=0.5
self.gjet.GetEntry(self.gBegin)
self.qjet.GetEntry(self.qBegin)
self.a=self.gBegin
self.b=self.qBegin
self.endfile = 0
self.count=0
def __next__(self):
return self.next()
@property
def provide_data(self):
return self._provide_data
@property
def provide_label(self):
return self._provide_label
def close(self):
self.file.Close()
def sampleallnum(self):
return self.Entries
def trainnum(self):
return self.End-self.Begin
def totalnum(self):
return int(math.ceil(1.*(self.gEnd-self.gBegin+self.qEnd-self.qBegin)/(self.batch_size*1.00)))
def next(self):
while self.endfile==0:
self.count+=1
arnum=self.arnum
jetset=[]
variables=[]
labels=[]
for i in range(self.batch_size):
if(random.random()<0.5):
if(self.a-self.gBegin>=len(self.gjetset)):
self.a=self.gBegin
self.endfile=1
break
labels.append([0,1])
jetset.append(self.gjetset[self.a-self.gBegin])
self.a+=1
else:
if(self.b-self.qBegin>=len(self.qjetset)):
self.b=self.qBegin
self.endfile=1
break
labels.append([1,0])
jetset.append(self.qjetset[self.b-self.qBegin])
self.b+=1
data=[]
data.append(np.array(jetset))
label=np.array(labels)
#if(self.totalnum()<=self.count):
# if(self.istrain==1):print "\nreset\n"
# self.reset()
if(self.endfile==1):
#print "\nendd\n"
self.reset()
#print "\n",self.count,self.istrain,"\n"
yield data, label
#else:
#if(self.istrain==1):
# print "\n",datetime.datetime.now()
#raise StopIteration
| 39.739583
| 841
| 0.654522
| 18,731
| 0.981966
| 1,151
| 0.060341
| 132
| 0.00692
| 0
| 0
| 1,874
| 0.098244
|
c45c12b0519ee7503fa9f8fb44c7b896a2082873
| 210
|
py
|
Python
|
tools/opt.py
|
hmtrii/tirg
|
e404020795bb46fb01b6bd82a2618f9370174012
|
[
"Apache-2.0"
] | null | null | null |
tools/opt.py
|
hmtrii/tirg
|
e404020795bb46fb01b6bd82a2618f9370174012
|
[
"Apache-2.0"
] | null | null | null |
tools/opt.py
|
hmtrii/tirg
|
e404020795bb46fb01b6bd82a2618f9370174012
|
[
"Apache-2.0"
] | null | null | null |
class Opt:
def __init__(self):
self.dataset = "fashion200k"
self.dataset_path = "./dataset/Fashion200k"
self.batch_size = 32
self.embed_dim = 512
self.hashing = False
self.retrieve_by_random = True
| 26.25
| 45
| 0.728571
| 210
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 36
| 0.171429
|
c45d9da847d632f929a40311d340ee5e03a9dfff
| 287
|
py
|
Python
|
addons/iap_crm/models/crm_lead.py
|
SHIVJITH/Odoo_Machine_Test
|
310497a9872db7844b521e6dab5f7a9f61d365a4
|
[
"Apache-2.0"
] | null | null | null |
addons/iap_crm/models/crm_lead.py
|
SHIVJITH/Odoo_Machine_Test
|
310497a9872db7844b521e6dab5f7a9f61d365a4
|
[
"Apache-2.0"
] | null | null | null |
addons/iap_crm/models/crm_lead.py
|
SHIVJITH/Odoo_Machine_Test
|
310497a9872db7844b521e6dab5f7a9f61d365a4
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class Lead(models.Model):
_inherit = 'crm.lead'
reveal_id = fields.Char(string='Reveal ID', help="Technical ID of reveal request done by IAP.")
| 26.090909
| 99
| 0.703833
| 152
| 0.529617
| 0
| 0
| 0
| 0
| 0
| 0
| 163
| 0.567944
|
c45f0b40e801dd329eac9e771b4dd170e217817c
| 6,600
|
py
|
Python
|
vitrage/tests/unit/datasources/kubernetes/test_kubernetes_transformer.py
|
openstack/vitrage
|
95b33dbf39b040e23915882a2879c87aec239ca9
|
[
"Apache-2.0"
] | 89
|
2015-09-30T21:42:17.000Z
|
2022-03-28T16:31:19.000Z
|
vitrage/tests/unit/datasources/kubernetes/test_kubernetes_transformer.py
|
openstack/vitrage
|
95b33dbf39b040e23915882a2879c87aec239ca9
|
[
"Apache-2.0"
] | 4
|
2015-12-13T13:06:53.000Z
|
2016-01-03T19:51:28.000Z
|
vitrage/tests/unit/datasources/kubernetes/test_kubernetes_transformer.py
|
openstack/vitrage
|
95b33dbf39b040e23915882a2879c87aec239ca9
|
[
"Apache-2.0"
] | 43
|
2015-11-04T15:54:27.000Z
|
2021-12-10T14:24:03.000Z
|
# Copyright 2018 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log as logging
from testtools import matchers
from vitrage.common.constants import DatasourceAction
from vitrage.common.constants import DatasourceOpts as DSOpts
from vitrage.common.constants import DatasourceProperties as DSProps
from vitrage.common.constants import EntityCategory
from vitrage.common.constants import GraphAction
from vitrage.common.constants import UpdateMethod
from vitrage.common.constants import VertexProperties as VProps
from vitrage.datasources.kubernetes.properties import KUBERNETES_DATASOURCE
from vitrage.datasources.kubernetes.properties import KubernetesProperties \
as kubProp
from vitrage.datasources.kubernetes.transformer import KubernetesTransformer
from vitrage.datasources.nova.instance import NOVA_INSTANCE_DATASOURCE
from vitrage.datasources.nova.instance.transformer import InstanceTransformer
from vitrage.datasources import transformer_base as tbase
from vitrage.datasources.transformer_base import TransformerBase
from vitrage.tests import base
from vitrage.tests.mocks import mock_driver as mock_sync
from vitrage.tests.mocks import utils
LOG = logging.getLogger(__name__)
cluster_name = 'kubernetes'
class KubernetesTransformerTest(base.BaseTest):
OPTS = [
cfg.StrOpt(DSOpts.UPDATE_METHOD,
default=UpdateMethod.PULL),
cfg.StrOpt(DSOpts.CONFIG_FILE,
default=utils.get_resources_dir() +
'/kubernetes/kubernetes_config.yaml'),
]
# noinspection PyAttributeOutsideInit,PyPep8Naming
@classmethod
def setUpClass(cls):
super(KubernetesTransformerTest, cls).setUpClass()
cls.transformers = {}
cls.transformers[KUBERNETES_DATASOURCE] = KubernetesTransformer(
cls.transformers)
cls.transformers[NOVA_INSTANCE_DATASOURCE] = \
InstanceTransformer(cls.transformers)
def setUp(self):
super(KubernetesTransformerTest, self).setUp()
self.conf_reregister_opts(self.OPTS, group=KUBERNETES_DATASOURCE)
def test_snapshot_event_transform(self):
LOG.debug('Test tactual transform action for '
'snapshot and snapshot init events')
k8s_spec_list = \
mock_sync.simple_k8s_nodes_generators(nodes_num=2,
snapshot_events=1)
nodes_events = mock_sync.generate_random_events_list(k8s_spec_list)
for event in nodes_events:
k8s_wrapper = self.transformers[KUBERNETES_DATASOURCE].transform(
event)
# Test assertions
self.assertEqual(cluster_name, k8s_wrapper.vertex[VProps.NAME])
n_length = str(len(k8s_wrapper.neighbors))
self.assertThat(n_length, matchers.HasLength(1),
'Cluster vertex has one neighbor')
self._validate_cluster_neighbors(k8s_wrapper.neighbors, event)
datasource_action = event[DSProps.DATASOURCE_ACTION]
if datasource_action == DatasourceAction.INIT_SNAPSHOT:
self.assertEqual(GraphAction.CREATE_ENTITY, k8s_wrapper.action)
elif datasource_action == DatasourceAction.SNAPSHOT:
self.assertEqual(GraphAction.UPDATE_ENTITY, k8s_wrapper.action)
def test_build_cluster_key(self):
LOG.debug('Test build cluster key')
# Test setup
expected_key = 'RESOURCE:kubernetes:kubernetes'
instance_transformer = self.transformers[NOVA_INSTANCE_DATASOURCE]
# Test action
key_fields = instance_transformer._key_values(
KUBERNETES_DATASOURCE,
cluster_name)
# Test assertions
observed_key = tbase.build_key(key_fields)
self.assertEqual(expected_key, observed_key)
def _validate_cluster_neighbors(self, neighbor, event):
# Create expected neighbor
time = event[DSProps.SAMPLE_DATE]
external_id = event['resources'][0][kubProp.EXTERNALID]
properties = {
VProps.ID: external_id,
VProps.VITRAGE_TYPE: NOVA_INSTANCE_DATASOURCE,
VProps.VITRAGE_CATEGORY: EntityCategory.RESOURCE,
VProps.VITRAGE_SAMPLE_TIMESTAMP: time
}
nova_instance_tran = self.transformers[NOVA_INSTANCE_DATASOURCE]
expected_neighbor = \
nova_instance_tran.create_neighbor_placeholder_vertex(**properties)
self.assertEqual(expected_neighbor, neighbor[0].vertex)
# Validate neighbor edge
edge = neighbor[0].edge
entity_key = \
self.transformers[KUBERNETES_DATASOURCE]._create_entity_key(event)
entity_uuid = \
TransformerBase.uuid_from_deprecated_vitrage_id(entity_key)
self.assertEqual(edge.source_id, entity_uuid)
self.assertEqual(edge.target_id, neighbor[0].vertex.vertex_id)
def test_create_entity_key(self):
LOG.debug('Test get key from kubernetes transformer')
# Test setup
spec_list = mock_sync.simple_k8s_nodes_generators(nodes_num=1,
snapshot_events=1)
nodes_events = mock_sync.generate_random_events_list(spec_list)
kubernetes_transformer = self.transformers[KUBERNETES_DATASOURCE]
for event in nodes_events:
# Test action
observed_key = kubernetes_transformer._create_entity_key(event)
# Test assertions
observed_key_fields = observed_key.split(
TransformerBase.KEY_SEPARATOR)
self.assertEqual(EntityCategory.RESOURCE, observed_key_fields[0])
self.assertEqual(
KUBERNETES_DATASOURCE,
observed_key_fields[1]
)
key_values = kubernetes_transformer._key_values(
KUBERNETES_DATASOURCE,
cluster_name)
expected_key = tbase.build_key(key_values)
self.assertEqual(expected_key, observed_key)
| 39.759036
| 79
| 0.700455
| 4,820
| 0.730303
| 0
| 0
| 334
| 0.050606
| 0
| 0
| 1,021
| 0.154697
|
c45fabb5527e1d2513cfd056db4a65258232ae26
| 1,058
|
py
|
Python
|
two_children.py
|
daniel2019-max/HackerRank-preparation-month
|
400f8c0cfaa9fc8e13a683c15ecb5d2341d9c209
|
[
"MIT"
] | null | null | null |
two_children.py
|
daniel2019-max/HackerRank-preparation-month
|
400f8c0cfaa9fc8e13a683c15ecb5d2341d9c209
|
[
"MIT"
] | null | null | null |
two_children.py
|
daniel2019-max/HackerRank-preparation-month
|
400f8c0cfaa9fc8e13a683c15ecb5d2341d9c209
|
[
"MIT"
] | null | null | null |
# Two children, Lily and Ron, want to share a chocolate bar. Each of the squares has an integer on it.
# Lily decides to share a contiguous segment of the bar selected such that:
# The length of the segment matches Ron's birth month, and,
# The sum of the integers on the squares is equal to his birth day.
# Determine how many ways she can divide the chocolate.
# int s[n]: the numbers on each of the squares of chocolate
# int d: Ron's birth day
# int m: Ron's birth month
# Two children
def birthday(s, d, m):
# Write your code here
numberDiveded = 0
numberIteration = len(s)-(m-1)
if(numberIteration == 0):
numberIteration = 1
for k in range(0, numberIteration):
newArray = s[k:k+m]
sumArray = sum(newArray)
if sumArray == d:
numberDiveded += 1
return numberDiveded
s = '2 5 1 3 4 4 3 5 1 1 2 1 4 1 3 3 4 2 1'
caracteres = '18 7'
array = list(map(int, s.split()))
caracteresList = list(map(int, caracteres.split()))
print(birthday(array, caracteresList[0], caracteresList[1]))
| 31.117647
| 102
| 0.670132
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 550
| 0.519849
|
c46046acfa73778c21a31da519b8cdbcc2cefaef
| 3,517
|
py
|
Python
|
sdk/python/pulumi_sonarqube/get_users.py
|
jshield/pulumi-sonarqube
|
53664a97903af3ecdf4f613117d83d0acae8e53e
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_sonarqube/get_users.py
|
jshield/pulumi-sonarqube
|
53664a97903af3ecdf4f613117d83d0acae8e53e
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_sonarqube/get_users.py
|
jshield/pulumi-sonarqube
|
53664a97903af3ecdf4f613117d83d0acae8e53e
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = [
'GetUsersResult',
'AwaitableGetUsersResult',
'get_users',
'get_users_output',
]
@pulumi.output_type
class GetUsersResult:
"""
A collection of values returned by getUsers.
"""
def __init__(__self__, email=None, id=None, is_local=None, login_name=None, name=None):
if email and not isinstance(email, str):
raise TypeError("Expected argument 'email' to be a str")
pulumi.set(__self__, "email", email)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if is_local and not isinstance(is_local, bool):
raise TypeError("Expected argument 'is_local' to be a bool")
pulumi.set(__self__, "is_local", is_local)
if login_name and not isinstance(login_name, str):
raise TypeError("Expected argument 'login_name' to be a str")
pulumi.set(__self__, "login_name", login_name)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def email(self) -> str:
return pulumi.get(self, "email")
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="isLocal")
def is_local(self) -> bool:
return pulumi.get(self, "is_local")
@property
@pulumi.getter(name="loginName")
def login_name(self) -> str:
return pulumi.get(self, "login_name")
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
class AwaitableGetUsersResult(GetUsersResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetUsersResult(
email=self.email,
id=self.id,
is_local=self.is_local,
login_name=self.login_name,
name=self.name)
def get_users(login_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetUsersResult:
"""
Use this data source to access information about an existing resource.
"""
__args__ = dict()
__args__['loginName'] = login_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('sonarqube:index/getUsers:getUsers', __args__, opts=opts, typ=GetUsersResult).value
return AwaitableGetUsersResult(
email=__ret__.email,
id=__ret__.id,
is_local=__ret__.is_local,
login_name=__ret__.login_name,
name=__ret__.name)
@_utilities.lift_output_func(get_users)
def get_users_output(login_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetUsersResult]:
"""
Use this data source to access information about an existing resource.
"""
...
| 31.972727
| 119
| 0.644868
| 2,007
| 0.570657
| 250
| 0.071083
| 1,991
| 0.566107
| 0
| 0
| 946
| 0.268979
|
c460565d7c12782c020979637ad1d6a595e59cab
| 504
|
py
|
Python
|
rl/valuefunction/FeatureExtractor.py
|
nickswalker/counterpoint-reinforcement-learning
|
1d0481bd2c9976533175339e411a41f4eb1650aa
|
[
"MIT"
] | 1
|
2016-12-09T18:29:30.000Z
|
2016-12-09T18:29:30.000Z
|
rl/valuefunction/FeatureExtractor.py
|
nickswalker/counterpoint-reinforcement-learning
|
1d0481bd2c9976533175339e411a41f4eb1650aa
|
[
"MIT"
] | null | null | null |
rl/valuefunction/FeatureExtractor.py
|
nickswalker/counterpoint-reinforcement-learning
|
1d0481bd2c9976533175339e411a41f4eb1650aa
|
[
"MIT"
] | null | null | null |
from abc import abstractmethod
from typing import List
from rl.action import Action
from rl.state import State
class StateActionFeatureExtractor:
@abstractmethod
def num_features(self) -> int:
pass
@abstractmethod
def extract(self, state: State, action: Action) -> List[float]:
pass
class StateFeatureExtractor:
@abstractmethod
def num_features(self) -> int:
pass
@abstractmethod
def extract(self, state: State) -> List[float]:
pass
| 19.384615
| 67
| 0.684524
| 386
| 0.765873
| 0
| 0
| 302
| 0.599206
| 0
| 0
| 0
| 0
|
c4606fc8327fb668e73c4a4c899e2f956318eefb
| 563
|
py
|
Python
|
yxf_utils/jsonx.py
|
yanyaming/yxf_utils
|
c1e1f7012f089374088210e55a953548b3c4da8b
|
[
"MIT"
] | null | null | null |
yxf_utils/jsonx.py
|
yanyaming/yxf_utils
|
c1e1f7012f089374088210e55a953548b3c4da8b
|
[
"MIT"
] | null | null | null |
yxf_utils/jsonx.py
|
yanyaming/yxf_utils
|
c1e1f7012f089374088210e55a953548b3c4da8b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
通用json处理接口
"""
import json
# 输入单引号为边界的类json字符串(内部可能还有双引号),返回单引号为边界的python字典or列表对象。
def singleQuoteJsonStr_to_PythonObj(strr):
jsonObj = eval(strr) # 不能用内置函数解析。只能模拟执行。
return jsonObj # dict or list
# 输入完全正规的json字符串(键-值边界为双引号),返回单引号为边界的python字典or列表对象。
def jsonStr_to_PythonObj(strr):
jsonObj = json.loads(strr)
return jsonObj # dict or list
# 输入python列表或字典(边界为单引号的类json对象),返回边界为双引号的json字符串且双引号加斜杠转义。
def pythonObj_to_jsonStr(obj):
jsonStr = json.dumps(obj)
return jsonStr # str
| 20.851852
| 58
| 0.735346
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 566
| 0.666667
|
c4611f97e3d7c75a5d43b772cd3ffe6b29e5f96b
| 1,044
|
py
|
Python
|
ggshield/scan/scannable_errors.py
|
rgajason/gg-shield
|
45c3534bdd174880710b97aedac068f6ddd52eaf
|
[
"MIT"
] | null | null | null |
ggshield/scan/scannable_errors.py
|
rgajason/gg-shield
|
45c3534bdd174880710b97aedac068f6ddd52eaf
|
[
"MIT"
] | 1
|
2021-06-02T04:28:09.000Z
|
2021-06-02T04:28:09.000Z
|
ggshield/scan/scannable_errors.py
|
rgajason/gg-shield
|
45c3534bdd174880710b97aedac068f6ddd52eaf
|
[
"MIT"
] | null | null | null |
from ast import literal_eval
from typing import Dict, List
import click
from pygitguardian.models import Detail
from ggshield.text_utils import STYLE, display_error, format_text, pluralize
def handle_scan_error(detail: Detail, chunk: List[Dict[str, str]]) -> None:
if detail.status_code == 401:
raise click.UsageError(detail.detail)
display_error("Error scanning. Results may be incomplete.")
try:
details = literal_eval(detail.detail)
if isinstance(details, list) and details:
display_error(
f"Add the following {pluralize('file', len(details))}"
" to your paths-ignore:"
)
for i, inner_detail in enumerate(details):
if inner_detail:
click.echo(
f"- {format_text(chunk[i]['filename'], STYLE['filename'])}:"
f" {str(inner_detail)}",
err=True,
)
return
except Exception:
click.echo(f"Error {str(detail)}", err=True)
| 32.625
| 80
| 0.598659
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 227
| 0.217433
|
c461b7cff1ea76d96382e29fc4f6db6ef1e4b933
| 18,049
|
py
|
Python
|
Packs/Base/Scripts/DBotPreprocessTextData/DBotPreprocessTextData.py
|
matan-xmcyber/content
|
7f02301c140b35956af3cd20cb8dfc64f34afb3e
|
[
"MIT"
] | 1
|
2021-08-07T00:21:58.000Z
|
2021-08-07T00:21:58.000Z
|
Packs/Base/Scripts/DBotPreprocessTextData/DBotPreprocessTextData.py
|
matan-xmcyber/content
|
7f02301c140b35956af3cd20cb8dfc64f34afb3e
|
[
"MIT"
] | 48
|
2022-03-08T13:45:00.000Z
|
2022-03-31T14:32:05.000Z
|
Packs/Base/Scripts/DBotPreprocessTextData/DBotPreprocessTextData.py
|
matan-xmcyber/content
|
7f02301c140b35956af3cd20cb8dfc64f34afb3e
|
[
"MIT"
] | 2
|
2020-12-10T12:02:45.000Z
|
2020-12-15T09:20:01.000Z
|
# pylint: disable=no-member
from CommonServerUserPython import *
from CommonServerPython import *
from sklearn.feature_extraction.text import TfidfVectorizer
import pickle
import uuid
import spacy
import string
from html.parser import HTMLParser
from html import unescape
from re import compile as _Re
import pandas as pd
def hash_word(word, hash_seed):
return str(hash_djb2(word, int(hash_seed)))
def create_text_result(original_text, tokenized_text, original_words_to_tokens, hash_seed=None):
text_result = {
'originalText': original_text,
'tokenizedText': tokenized_text,
'originalWordsToTokens': original_words_to_tokens,
}
if hash_seed is not None:
hash_tokenized_text = ' '.join(hash_word(word, hash_seed) for word in tokenized_text.split())
words_to_hashed_tokens = {word: [hash_word(t, hash_seed) for t in tokens_list] for word, tokens_list in
original_words_to_tokens.items()}
text_result['hashedTokenizedText'] = hash_tokenized_text
text_result['wordsToHashedTokens'] = words_to_hashed_tokens
return text_result
class Tokenizer:
def __init__(self, clean_html=True, remove_new_lines=True, hash_seed=None, remove_non_english=True,
remove_stop_words=True, remove_punct=True, remove_non_alpha=True, replace_emails=True,
replace_numbers=True, lemma=True, replace_urls=True, language='English',
tokenization_method='byWords'):
self.number_pattern = "NUMBER_PATTERN"
self.url_pattern = "URL_PATTERN"
self.email_pattern = "EMAIL_PATTERN"
self.reserved_tokens = set([self.number_pattern, self.url_pattern, self.email_pattern])
self.clean_html = clean_html
self.remove_new_lines = remove_new_lines
self.hash_seed = hash_seed
self.remove_non_english = remove_non_english
self.remove_stop_words = remove_stop_words
self.remove_punct = remove_punct
self.remove_non_alpha = remove_non_alpha
self.replace_emails = replace_emails
self.replace_urls = replace_urls
self.replace_numbers = replace_numbers
self.lemma = lemma
self.language = language
self.tokenization_method = tokenization_method
self.max_text_length = 10 ** 5
self.html_patterns = [
re.compile(r"(?is)<(script|style).*?>.*?(</\1>)"),
re.compile(r"(?s)<!--(.*?)-->[\n]?"),
re.compile(r"(?s)<.*?>"),
re.compile(r" "),
re.compile(r" +")
]
self.nlp = None
self.html_parser = HTMLParser()
self._unicode_chr_splitter = _Re('(?s)((?:[\ud800-\udbff][\udc00-\udfff])|.)').split
self.languages_to_model_names = {'English': 'en_core_web_sm',
'German': 'de_core_news_sm',
'French': 'fr_core_news_sm',
'Spanish': 'es_core_news_sm',
'Portuguese': 'pt_core_news_sm',
'Italian': 'it_core_news_sm',
'Dutch': 'nl_core_news_sm'
}
self.spacy_count = 0
self.spacy_reset_count = 500
def handle_long_text(self):
return '', ''
def map_indices_to_words(self, text):
original_text_indices_to_words = {}
word_start = 0
while word_start < len(text) and text[word_start].isspace():
word_start += 1
for word in text.split():
for char_idx, char in enumerate(word):
original_text_indices_to_words[word_start + char_idx] = word
# find beginning of next word
word_start += len(word)
while word_start < len(text) and text[word_start].isspace():
word_start += 1
return original_text_indices_to_words
def remove_line_breaks(self, text):
return text.replace("\r", " ").replace("\n", " ")
def remove_multiple_whitespaces(self, text):
return re.sub(r"\s+", " ", text).strip()
def clean_html_from_text(self, text):
cleaned = text
for pattern in self.html_patterns:
cleaned = pattern.sub(" ", cleaned)
return unescape(cleaned).strip()
def handle_tokenizaion_method(self, text):
language = self.language
if language in self.languages_to_model_names:
tokens_list, original_words_to_tokens = self.tokenize_text_spacy(text)
else:
tokens_list, original_words_to_tokens = self.tokenize_text_other(text)
tokenized_text = ' '.join(tokens_list).strip()
return tokenized_text, original_words_to_tokens
def tokenize_text_other(self, text):
tokens_list = []
tokenization_method = self.tokenization_method
if tokenization_method == 'byWords':
original_words_to_tokens = {}
for t in text.split():
token_without_punct = ''.join([c for c in t if c not in string.punctuation])
if len(token_without_punct) > 0:
tokens_list.append(token_without_punct)
original_words_to_tokens[token_without_punct] = t
elif tokenization_method == 'byLetters':
for t in text:
tokens_list += [chr for chr in self._unicode_chr_splitter(t) if chr and chr != ' ']
original_words_to_tokens = {c: t for c in tokens_list}
else:
return_error('Unsupported tokenization method: when language is "Other" ({})'.format(tokenization_method))
return tokens_list, original_words_to_tokens
def tokenize_text_spacy(self, text):
if self.nlp is None or self.spacy_count % self.spacy_reset_count == 0:
self.init_spacy_model(self.language)
doc = self.nlp(text) # type: ignore
self.spacy_count += 1
original_text_indices_to_words = self.map_indices_to_words(text)
tokens_list = []
original_words_to_tokens = {} # type: ignore
for word in doc:
if word.is_space:
continue
elif self.remove_stop_words and word.is_stop:
continue
elif self.remove_punct and word.is_punct:
continue
elif self.replace_emails and '@' in word.text:
tokens_list.append(self.email_pattern)
elif self.replace_urls and word.like_url:
tokens_list.append(self.url_pattern)
elif self.replace_numbers and (word.like_num or word.pos_ == 'NUM'):
tokens_list.append(self.number_pattern)
elif self.remove_non_alpha and not word.is_alpha:
continue
elif self.remove_non_english and word.text not in self.nlp.vocab: # type: ignore
continue
else:
if self.lemma and word.lemma_ != '-PRON-':
token_to_add = word.lemma_
else:
token_to_add = word.lower_
tokens_list.append(token_to_add)
original_word = original_text_indices_to_words[word.idx]
if original_word not in original_words_to_tokens:
original_words_to_tokens[original_word] = []
original_words_to_tokens[original_word].append(token_to_add)
return tokens_list, original_words_to_tokens
def init_spacy_model(self, language):
try:
self.nlp = spacy.load(self.languages_to_model_names[language],
disable=['tagger', 'parser', 'ner', 'textcat'])
except Exception:
return_error("The specified language is not supported in this docker. In order to pre-process text "
"using this language, it's required to change this docker. Please check at the documentation "
"or contact us for help.")
def word_tokenize(self, text):
if not isinstance(text, list):
text = [text]
result = []
for t in text:
original_text = t
if self.remove_new_lines:
t = self.remove_line_breaks(t)
if self.clean_html:
t = self.clean_html_from_text(t)
t = self.remove_multiple_whitespaces(t)
if len(t) < self.max_text_length:
tokenized_text, original_words_to_tokens = self.handle_tokenizaion_method(t)
else:
tokenized_text, original_words_to_tokens = self.handle_long_text()
text_result = create_text_result(original_text, tokenized_text, original_words_to_tokens,
hash_seed=self.hash_seed)
result.append(text_result)
if len(result) == 1:
result = result[0] # type: ignore
return result
# define global parsers
DBOT_TEXT_FIELD = 'dbot_text'
DBOT_PROCESSED_TEXT_FIELD = 'dbot_processed_text'
CONTEXT_KEY = 'DBotPreProcessTextData'
HTML_PATTERNS = [
re.compile(r"(?is)<(script|style).*?>.*?(</\1>)"),
re.compile(r"(?s)<!--(.*?)-->[\n]?"),
re.compile(r"(?s)<.*?>"),
re.compile(r" "),
re.compile(r" +")
]
html_parser = HTMLParser()
tokenizer = None
def read_file(input_data, input_type):
data = [] # type: ignore
if not input_data:
return data
if input_type.endswith("string"):
if 'b64' in input_type:
input_data = base64.b64decode(input_data)
file_content = input_data.decode("utf-8")
else:
file_content = input_data
else:
res = demisto.getFilePath(input_data)
if not res:
return_error("Entry {} not found".format(input_data))
file_path = res['path']
if input_type.startswith('json'):
with open(file_path, 'r') as f:
file_content = f.read()
if input_type.startswith('csv'):
return pd.read_csv(file_path).fillna('').to_dict(orient='records')
elif input_type.startswith('json'):
return json.loads(file_content)
elif input_type.startswith('pickle'):
return pd.read_pickle(file_path, compression=None)
else:
return_error("Unsupported file type %s" % input_type)
def concat_text_fields(data, target_field, text_fields):
for d in data:
text = ''
for fields in text_fields:
for field in fields.strip().split("|"):
field = field.strip()
if "." in field:
value = demisto.dt(d, field)
if type(value) is list and len(value) > 0:
value = value[0]
else:
value = d.get(field) or d.get(field.lower(), '')
if value and isinstance(value, str):
text += value
text += ' '
break
text = text.strip()
d[target_field] = text
return data
def clean_html(text):
cleaned = text
for pattern in HTML_PATTERNS:
cleaned = pattern.sub(" ", cleaned)
return unescape(cleaned).strip()
def remove_line_breaks(text):
return re.sub(r"\s+", " ", text.replace("\r", " ").replace("\n", " ")).strip()
def pre_process_batch(data, source_text_field, target_text_field, remove_html_tags, pre_process_type, hash_seed):
raw_text_data = [x[source_text_field] for x in data]
if remove_html_tags:
raw_text_data = [clean_html(x) for x in raw_text_data]
raw_text_data = [remove_line_breaks(x) for x in raw_text_data]
tokenized_text_data = []
for raw_text in raw_text_data:
tokenized_text = pre_process_single_text(raw_text, hash_seed, pre_process_type)
if hash_seed is None:
tokenized_text_data.append(tokenized_text['tokenizedText'])
else:
tokenized_text_data.append(tokenized_text['hashedTokenizedText'])
for d, tokenized_text in zip(data, tokenized_text_data):
d[target_text_field] = tokenized_text
return data
def pre_process_single_text(raw_text, hash_seed, pre_process_type):
pre_process_func = PRE_PROCESS_TYPES[pre_process_type]
tokenized_text = pre_process_func(raw_text, hash_seed)
return tokenized_text
def pre_process_tokenizer(text, seed):
global tokenizer
if tokenizer is None:
tokenizer = Tokenizer(tokenization_method=demisto.args()['tokenizationMethod'],
language=demisto.args()['language'], hash_seed=seed)
processed_text = tokenizer.word_tokenize(text)
return processed_text
def pre_process_none(text, seed):
original_text = text
tokenized_text = text
original_words_to_tokens = {x: x for x in text.split()}
return create_text_result(original_text, tokenized_text, original_words_to_tokens, seed)
PRE_PROCESS_TYPES = {
'none': pre_process_none,
'nlp': pre_process_tokenizer,
}
def remove_short_text(data, text_field, target_text_field, remove_short_threshold):
description = ""
before_count = len(data)
data = [x for x in data if len(x[text_field].split(" ")) > remove_short_threshold and len(x[target_text_field]) > 0]
after_count = len(data)
dropped_count = before_count - after_count
if dropped_count > 0:
description += "Dropped %d samples shorter than %d words" % (dropped_count, remove_short_threshold) + "\n"
return data, description
def get_tf_idf_similarity_arr(documents):
tfidf = TfidfVectorizer(stop_words="english", min_df=1).fit_transform(documents)
pairwise_similarity = tfidf * tfidf.T
return pairwise_similarity.toarray()
def find_duplicate_indices(texts, dedup_threshold):
similarity_arr = get_tf_idf_similarity_arr(texts)
indices_to_remove = []
for i in range(similarity_arr.shape[0]):
for j in range(similarity_arr.shape[1]):
if j > i and similarity_arr[i][j] > dedup_threshold:
indices_to_remove.append(j)
return set(indices_to_remove)
def remove_duplicate_by_indices(data, duplicate_indices):
description = ""
data = [x for i, x in enumerate(data) if i not in duplicate_indices]
dropped_count = len(duplicate_indices)
if dropped_count > 0:
description += "Dropped %d samples duplicate to other samples" % dropped_count + "\n"
return data, description
def whitelist_dict_fields(data, fields):
fields = [x.strip() for x in fields] + [x.strip().lower() for x in fields]
new_data = []
for d in data:
new_data.append({k: v for k, v in d.items() if k in fields})
return new_data
def main():
text_fields = demisto.args()['textFields'].split(",")
input = demisto.args().get('input')
input_type = demisto.args()['inputType']
hash_seed = int(demisto.args().get('hashSeed')) if demisto.args().get('hashSeed') else None
remove_short_threshold = int(demisto.args().get('removeShortTextThreshold', 1))
de_dup_threshold = float(demisto.args()['dedupThreshold'])
pre_process_type = demisto.args()['preProcessType']
remove_html_tags = demisto.args()['cleanHTML'] == 'true'
whitelist_fields = demisto.args().get('whitelistFields').split(",") if demisto.args().get(
'whitelistFields') else None
# if input is a snigle string (from DbotPredictPhishingWords):
if input_type == 'string':
res = pre_process_single_text(raw_text=demisto.args().get('input'),
hash_seed=hash_seed, pre_process_type=pre_process_type)
return res
output_original_text_fields = demisto.args().get('outputOriginalTextFields', 'false') == 'true'
description = ""
# read data
data = read_file(input, input_type)
# concat text fields
concat_text_fields(data, DBOT_TEXT_FIELD, text_fields)
description += "Read initial %d samples" % len(data) + "\n"
# clean text
if pre_process_type not in PRE_PROCESS_TYPES:
return_error('Pre-process type {} is not supported'.format(pre_process_type))
data = pre_process_batch(data, DBOT_TEXT_FIELD, DBOT_PROCESSED_TEXT_FIELD, remove_html_tags, pre_process_type,
hash_seed)
# remove short emails
data, desc = remove_short_text(data, DBOT_TEXT_FIELD, DBOT_PROCESSED_TEXT_FIELD, remove_short_threshold)
description += desc
# remove duplicates
try:
if 0 < de_dup_threshold < 1:
duplicate_indices = find_duplicate_indices([x[DBOT_PROCESSED_TEXT_FIELD] for x in data], de_dup_threshold)
data, desc = remove_duplicate_by_indices(data, duplicate_indices)
description += desc
except Exception:
pass
if output_original_text_fields:
for field in text_fields:
whitelist_fields += [x.strip() for x in field.split('|')]
if whitelist_fields and len(whitelist_fields) > 0:
whitelist_fields.append(DBOT_PROCESSED_TEXT_FIELD)
data = whitelist_dict_fields(data, whitelist_fields)
description += "Done processing: %d samples" % len(data) + "\n"
# output
file_name = str(uuid.uuid4())
output_format = demisto.args()['outputFormat']
if output_format == 'pickle':
data_encoded = pickle.dumps(data, protocol=2)
elif output_format == 'json':
data_encoded = json.dumps(data, default=str) # type: ignore
else:
return_error("Invalid output format: %s" % output_format)
entry = fileResult(file_name, data_encoded)
entry['Contents'] = data
entry['HumanReadable'] = description
entry['EntryContext'] = {
CONTEXT_KEY: {
'Filename': file_name,
'FileFormat': output_format,
'TextField': DBOT_TEXT_FIELD,
'TextFieldProcessed': DBOT_PROCESSED_TEXT_FIELD,
}
}
return entry
if __name__ in ['builtins', '__main__']:
entry = main()
demisto.results(entry)
| 40.559551
| 120
| 0.633165
| 7,880
| 0.436589
| 0
| 0
| 0
| 0
| 0
| 0
| 2,134
| 0.118234
|
c464ae6c792d78df3c469e563d6a59248c7a5e64
| 2,799
|
py
|
Python
|
punc_recover/tester/punc_tester.py
|
Z-yq/audioSamples.github.io
|
53c474288f0db1a3acfe40ba57a4cd5f2aecbcd3
|
[
"Apache-2.0"
] | 1
|
2022-03-03T02:51:55.000Z
|
2022-03-03T02:51:55.000Z
|
punc_recover/tester/punc_tester.py
|
RapidAI/TensorflowASR
|
084519b5a0464f465e1d72c24cba07c1ec55cd26
|
[
"Apache-2.0"
] | null | null | null |
punc_recover/tester/punc_tester.py
|
RapidAI/TensorflowASR
|
084519b5a0464f465e1d72c24cba07c1ec55cd26
|
[
"Apache-2.0"
] | null | null | null |
import logging
import os
import tensorflow as tf
from punc_recover.models.punc_transformer import PuncTransformer
from punc_recover.tester.base_tester import BaseTester
from utils.text_featurizers import TextFeaturizer
class PuncTester(BaseTester):
""" Trainer for CTC Models """
def __init__(self,
config,
):
super(PuncTester, self).__init__(config['running_config'])
self.model_config = config['model_config']
self.vocab_featurizer = TextFeaturizer(config['punc_vocab'])
self.bd_featurizer = TextFeaturizer(config['punc_biaodian'])
self.opt_config = config['optimizer_config']
self.eval_metrics = {
"acc": tf.keras.metrics.Mean(),
}
def _eval_step(self, batch):
x, labels = batch
mask = self.creat_mask(x)
pred_bd = self.model.inference(x, mask)
acc=self.classes_acc(labels,pred_bd)
self.eval_metrics["acc"].update_state(acc)
def creat_mask(self, seq):
seq_pad = tf.cast(tf.equal(seq, 0), tf.float32)
return seq_pad[:, tf.newaxis, tf.newaxis, :] # (batch_size, 1, 1, seq_len)
def classes_acc(self, real, pred):
mask = tf.math.logical_not(tf.math.equal(real, 0))
accs = tf.keras.metrics.sparse_categorical_accuracy(real,pred)
mask = tf.cast(mask, dtype=accs.dtype)
accs *= mask
final=tf.reduce_sum(accs,-1)/tf.reduce_sum(mask,-1)
return tf.reduce_mean(final)
def compile(self, ):
self.model = PuncTransformer(num_layers=self.model_config['num_layers'],
d_model=self.model_config['d_model'],
enc_embedding_dim=self.model_config['enc_embedding_dim'],
num_heads=self.model_config['num_heads'],
dff=self.model_config['dff'],
input_vocab_size=self.vocab_featurizer.num_classes,
bd_vocab_size=self.bd_featurizer.num_classes,
pe_input=self.model_config['pe_input'],
rate=self.model_config['rate'])
self.model._build()
self.load_checkpoint()
logging.info('trainer resume failed')
self.model.summary(line_length=100)
def run(self, ):
self._eval_batches()
def load_checkpoint(self, ):
"""Load checkpoint."""
self.checkpoint_dir = os.path.join(self.running_config["outdir"], "checkpoints")
files = os.listdir(self.checkpoint_dir)
files.sort(key=lambda x: int(x.split('_')[-1].replace('.h5', '')))
self.model.load_weights(os.path.join(self.checkpoint_dir, files[-1]))
| 36.350649
| 94
| 0.599857
| 2,577
| 0.920686
| 0
| 0
| 0
| 0
| 0
| 0
| 292
| 0.104323
|
c464fcd2cef62b8d92f36f587767dd193dd32e2a
| 13,293
|
py
|
Python
|
moztrap/model/core/migrations/0003_auto__add_field_productversion_cc_version__add_field_product.py
|
mbeko/moztrap
|
db75e1f8756ef2c0c39652a66302b19c8afa0256
|
[
"BSD-2-Clause"
] | null | null | null |
moztrap/model/core/migrations/0003_auto__add_field_productversion_cc_version__add_field_product.py
|
mbeko/moztrap
|
db75e1f8756ef2c0c39652a66302b19c8afa0256
|
[
"BSD-2-Clause"
] | null | null | null |
moztrap/model/core/migrations/0003_auto__add_field_productversion_cc_version__add_field_product.py
|
mbeko/moztrap
|
db75e1f8756ef2c0c39652a66302b19c8afa0256
|
[
"BSD-2-Clause"
] | null | null | null |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'ProductVersion.cc_version'
db.add_column('core_productversion', 'cc_version', self.gf('django.db.models.fields.IntegerField')(default=0), keep_default=False)
# Adding field 'Product.cc_version'
db.add_column('core_product', 'cc_version', self.gf('django.db.models.fields.IntegerField')(default=0), keep_default=False)
def backwards(self, orm):
# Deleting field 'ProductVersion.cc_version'
db.delete_column('core_productversion', 'cc_version')
# Deleting field 'Product.cc_version'
db.delete_column('core_product', 'cc_version')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.product': {
'Meta': {'ordering': "['name']", 'object_name': 'Product'},
'cc_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 0, 59, 558711)'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'deleted_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'has_team': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 0, 59, 558895)'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'own_team': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False', 'blank': 'True'})
},
'core.productversion': {
'Meta': {'ordering': "['product', 'order']", 'object_name': 'ProductVersion'},
'cc_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 0, 59, 559819)'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'deleted_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'productversion'", 'symmetrical': 'False', 'to': "orm['environments.Environment']"}),
'has_team': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latest': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 0, 59, 560004)'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'own_team': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False', 'blank': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'versions'", 'to': "orm['core.Product']"}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'", '_ormbases': ['auth.User'], 'proxy': 'True'}
},
'environments.category': {
'Meta': {'ordering': "['name']", 'object_name': 'Category'},
'cc_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 0, 59, 562776)'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'deleted_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 0, 59, 562967)'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'environments.element': {
'Meta': {'ordering': "['name']", 'object_name': 'Element'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'elements'", 'to': "orm['environments.Category']"}),
'cc_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 0, 59, 561818)'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'deleted_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 0, 59, 562003)'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'environments.environment': {
'Meta': {'object_name': 'Environment'},
'cc_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 0, 59, 555711)'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'deleted_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'elements': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'environments'", 'symmetrical': 'False', 'to': "orm['environments.Element']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 0, 59, 555910)'}),
'profile': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'environments'", 'null': 'True', 'to': "orm['environments.Profile']"})
},
'environments.profile': {
'Meta': {'object_name': 'Profile'},
'cc_version': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 0, 59, 557817)'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'deleted_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 2, 25, 0, 0, 59, 558002)'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
}
}
complete_apps = ['core']
| 85.76129
| 187
| 0.568871
| 13,166
| 0.990446
| 0
| 0
| 0
| 0
| 0
| 0
| 9,412
| 0.708042
|
c4667c374455b11e101ec3e8d25bd29cd21c3a81
| 3,965
|
py
|
Python
|
tests/downloader_test.py
|
jkawamoto/roadie-gcp
|
96394a47d375bd01e167f351fc86a03905e98395
|
[
"MIT"
] | 1
|
2018-09-20T01:51:23.000Z
|
2018-09-20T01:51:23.000Z
|
tests/downloader_test.py
|
jkawamoto/roadie-gcp
|
96394a47d375bd01e167f351fc86a03905e98395
|
[
"MIT"
] | 9
|
2016-01-31T11:28:12.000Z
|
2021-04-30T20:43:39.000Z
|
tests/downloader_test.py
|
jkawamoto/roadie-gcp
|
96394a47d375bd01e167f351fc86a03905e98395
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
#
# downloader_test.py
#
# Copyright (c) 2015-2016 Junpei Kawamoto
#
# This software is released under the MIT License.
#
# http://opensource.org/licenses/mit-license.php
#
""" Test for downloader module.
"""
import logging
import shutil
import sys
import unittest
import os
from os import path
import downloader # pylint: disable=import-error
TARGET_FILE = "bin/entrypoint.sh"
SAMPLE_FILE = "https://raw.githubusercontent.com/jkawamoto/roadie-gcp/master/bin/entrypoint.sh"
ORIGINAL_FILE = path.normpath(
path.join(path.dirname(__file__), "..", TARGET_FILE))
ARCHIVE_ROOT = "./roadie-gcp-20160618"
ZIP_FILE = "https://github.com/jkawamoto/roadie-gcp/archive/v20160618.zip"
TAR_FILE = "https://github.com/jkawamoto/roadie-gcp/archive/v20160618.tar.gz"
class TestDownload(unittest.TestCase):
""" Test case for download module.
"""
def test_download(self):
""" Test downloading a file.
"""
downloader.download(SAMPLE_FILE)
basename = path.basename(SAMPLE_FILE)
self.evaluate_file(basename, ORIGINAL_FILE)
os.remove(basename)
def test_set_destination(self):
""" Test downloading a file to another directory.
"""
downloader.download(SAMPLE_FILE + ":/tmp/")
target = "/tmp/" + path.basename(SAMPLE_FILE)
self.evaluate_file(target, ORIGINAL_FILE)
os.remove(target)
def test_rename(self):
""" Test downloading a file and renaming it.
"""
target = "test.md"
downloader.download(SAMPLE_FILE + ":" + target)
self.evaluate_file(target, ORIGINAL_FILE)
os.remove(target)
def test_set_destination_and_rename(self):
""" Test downloading a file to a directory and renaming it.
"""
target = "/tmp/test.md"
downloader.download(SAMPLE_FILE + ":" + target)
self.evaluate_file(target, ORIGINAL_FILE)
os.remove(target)
def test_download_zip(self):
""" Test downloading a zip file.
"""
downloader.download(ZIP_FILE)
target = path.join(ARCHIVE_ROOT, TARGET_FILE)
self.evaluate_file(target, ORIGINAL_FILE)
shutil.rmtree(ARCHIVE_ROOT)
def test_set_destination_zip(self):
""" Test downloading a zip file to a specified path.
"""
downloader.download(ZIP_FILE + ":/tmp/")
target = path.join("/tmp/", ARCHIVE_ROOT, TARGET_FILE)
self.evaluate_file(target, ORIGINAL_FILE)
shutil.rmtree(path.join("/tmp/", ARCHIVE_ROOT))
def test_download_tarball(self):
""" Test downloading a tarball file.
"""
downloader.download(TAR_FILE)
target = path.join(ARCHIVE_ROOT, TARGET_FILE)
self.evaluate_file(target, ORIGINAL_FILE)
shutil.rmtree(ARCHIVE_ROOT)
def test_set_destination_taball(self):
""" Test downloading a tarball file to a specified path.
"""
downloader.download(TAR_FILE + ":/tmp/")
target = path.join("/tmp/", ARCHIVE_ROOT, TARGET_FILE)
self.evaluate_file(target, ORIGINAL_FILE)
shutil.rmtree(path.join("/tmp/", ARCHIVE_ROOT))
def evaluate_file(self, target, original):
""" Evaluate existence and contents of the target file.
Args:
target: target file to be checked.
original: original file of which contetns will be compared of the ones
of target.
"""
self.assertTrue(path.exists(target))
self.assertEqual(
self.read_file(target),
self.read_file(original))
@staticmethod
def read_file(fpath):
""" Open a file and read it.
Args:
fpath: Path for a file.
Returns:
Contents of the file.
"""
with open(fpath) as f:
return f.read()
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO, stream=sys.stderr)
unittest.main()
| 30.037879
| 95
| 0.642371
| 3,066
| 0.773266
| 0
| 0
| 246
| 0.062043
| 0
| 0
| 1,477
| 0.372509
|
c466ca50010615bb02d62529ff22d41f7530666b
| 1,800
|
py
|
Python
|
ticle/plotters/plot_phase.py
|
muma7490/TICLE
|
bffa64ee488abac17809d02dfc176fe80128541a
|
[
"MIT"
] | null | null | null |
ticle/plotters/plot_phase.py
|
muma7490/TICLE
|
bffa64ee488abac17809d02dfc176fe80128541a
|
[
"MIT"
] | null | null | null |
ticle/plotters/plot_phase.py
|
muma7490/TICLE
|
bffa64ee488abac17809d02dfc176fe80128541a
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as pl
import os
import numpy as np
from ticle.data.dataHandler import normalizeData,load_file
from ticle.analysis.analysis import get_phases,normalize_phase
pl.rc('xtick', labelsize='x-small')
pl.rc('ytick', labelsize='x-small')
pl.rc('font', family='serif')
pl.rcParams.update({'font.size': 20})
pl.tight_layout()
path = os.getcwd()
phase_dir = f"{path}/results/phase_plots"
try:
os.makedirs(phase_dir)
except FileExistsError:
pass
data_dir = f"{path}/data/"
data_list_file = f"{data_dir}/dataList.txt"
data_list = np.loadtxt(data_list_file)
for data in data_list:
star = f"0{int(data[0])}"
file_name = f"{data_dir}/{star}/{star}_LC_destepped.txt"
res_dir = f"{phase_dir}/{star}"
try:
os.mkdir(res_dir)
except FileExistsError:
pass
t_series = load_file(file_name)
t_series = normalizeData(t_series)
p = [(f"Phaseplot {star} - literature","literature",data[2]),
(f"Phaseplot {star} - P={data[1]} days",f"result",data[1])]
for title,save_text,period in p:
masks = get_phases(t_series,period)
fig_phase = pl.figure(figsize=(10,7))
for i in masks:
plot_data = normalize_phase(np.array((t_series[0][i],t_series[1][i])))
pl.plot(plot_data[0],plot_data[1],linewidth = 1)
pl.xlabel("Phase")
pl.ylabel("Flux")
pl.title(title)
fig_phase.savefig(f"{res_dir}/{star}_{save_text}_phase_.pdf")
fig_lightcurve = pl.figure(figsize=(10,7))
for i in masks:
pl.plot(t_series[0][i],t_series[1][i],linewidth = 1)
pl.xlabel("Period(days)")
pl.ylabel("Flux")
pl.title(f"{star} Lightcurve {save_text}")
fig_lightcurve.savefig(f"{res_dir}/{star}_{save_text}_lightcurve.pdf")
| 27.692308
| 82
| 0.648889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 453
| 0.251667
|
c467d3e82cd1949de48c0e1eac654f4ecca276b3
| 7,267
|
py
|
Python
|
src/putil/rabbitmq/rabbit_util.py
|
scionrep/scioncc_new
|
086be085b69711ee24c4c86ed42f2109ca0db027
|
[
"BSD-2-Clause"
] | 2
|
2015-10-05T20:36:35.000Z
|
2018-11-21T11:45:24.000Z
|
src/putil/rabbitmq/rabbit_util.py
|
scionrep/scioncc_new
|
086be085b69711ee24c4c86ed42f2109ca0db027
|
[
"BSD-2-Clause"
] | 21
|
2015-03-18T14:39:32.000Z
|
2016-07-01T17:16:29.000Z
|
src/putil/rabbitmq/rabbit_util.py
|
scionrep/scioncc_new
|
086be085b69711ee24c4c86ed42f2109ca0db027
|
[
"BSD-2-Clause"
] | 12
|
2015-03-18T10:53:49.000Z
|
2018-06-21T11:19:57.000Z
|
#!/usr/bin/python
import shlex
import simplejson
from putil.rabbitmq.rabbitmqadmin import Management, make_parser, LISTABLE, DELETABLE
class RabbitManagementUtil(object):
def __init__(self, config, options=None, sysname=None):
"""
Given a config object (system CFG or rabbit mgmt config), extracts the correct config
and prepares util for subsequent calls to RabbitMQ via management plugin REST API.
"""
self.mgmt_cfg = self.get_mgmt_config(config, sysname)
self.connect_str = self.build_connect_str(self.mgmt_cfg)
self.options = options
self.sysname = sysname
self.call_args = self.connect_str
if self.options:
self.call_args += "_" + self.options
self.parser = make_parser()
@staticmethod
def get_mgmt_config(config, sysname=None):
""" Returns the RabbitMq management config dict from indirect reference in container CFG
or from given config dict. """
if not config:
raise RuntimeError("Bad config argument")
if "container" in config and hasattr(config, "get_safe"):
mgmt_cfg_key = config.get_safe("container.messaging.management.server", "rabbit_manage")
mgmt_cfg = config.get_safe("server." + mgmt_cfg_key)
elif "host" in config:
mgmt_cfg = config
else:
raise RuntimeError("Bad RabbitMQ management config")
sysname = sysname or "scioncc"
mgmt_cfg = mgmt_cfg.copy()
mgmt_cfg["host"] = mgmt_cfg.get("host", None) or "localhost"
mgmt_cfg["port"] = mgmt_cfg.get("port", None) or "15672"
mgmt_cfg["username"] = mgmt_cfg.get("username", None) or "guest"
mgmt_cfg["password"] = mgmt_cfg.get("password", None) or "guest"
mgmt_cfg["vhost"] = mgmt_cfg.get("vhost", None) or "/"
mgmt_cfg["system_exchange"] = mgmt_cfg.get("system_exchange", None)
if not mgmt_cfg["system_exchange"] and "exchange" in config and hasattr(config, "get_safe"):
mgmt_cfg["system_exchange"] = "%s.%s" % (sysname, config.get_safe('exchange.core.system_xs', 'system'))
mgmt_cfg["events_xp"] = mgmt_cfg.get("events_xp", None)
if not mgmt_cfg["events_xp"] and "exchange" in config and hasattr(config, "get_safe"):
mgmt_cfg["events_xp"] = "%s.%s" % (mgmt_cfg["system_exchange"], config.get_safe('exchange.core.events', 'events'))
return mgmt_cfg
@staticmethod
def build_connect_str(mgmt_cfg):
connect_str = "-q -H {0} -P {1} -u {2} -p {3} -V {4}".format(
mgmt_cfg["host"], mgmt_cfg["port"], mgmt_cfg["username"], mgmt_cfg["password"], mgmt_cfg["vhost"])
return connect_str
@staticmethod
def get_mgmt_url(config, feats=None):
mgmt_cfg = RabbitManagementUtil.get_mgmt_config(config)
feats = feats or []
url = "http://%s:%s/api/%s" % (mgmt_cfg["host"], mgmt_cfg["port"], "/".join(feats))
return url
# -------------------------------------------------------------------------
# Util methods
def clean_by_prefix(self, prefix):
"""
Utility method to clean (sysname) prefixed exchanges and queues on a broker.
@param prefix The sysname / prefix to use to select exchanges and queues to delete.
Must be the prefix to the exchange or queue or this will not be deleted.
@returns A 2-tuple of (list of exchanges deleted, list of queues deleted).
"""
exchanges = self.list_names('exchanges')
deleted_exchanges = self.delete_names_with_prefix('exchange', exchanges, prefix)
queues = self.list_names('queues')
deleted_queues = self.delete_names_with_prefix('queue', queues, prefix)
return deleted_exchanges, deleted_queues
def clean_by_sysname(self, sysname=None):
sysname = sysname or self.sysname
if not sysname:
raise RuntimeError("Must provide sysname")
return self.clean_by_prefix(sysname or self.sysname)
def declare_exchange(self, xp):
if xp == "events":
ex_name = self.mgmt_cfg["events_xp"]
else:
ex_name = self.mgmt_cfg["system_exchange"]
cmd_str = '{0} declare exchange name="{1}" durable=false auto_delete=true type=topic'.format(self.call_args, ex_name)
(options, args) = self.parser.parse_args(shlex.split(cmd_str))
mgmt = Management(options, args[1:])
mgmt.invoke_declare()
def declare_queue(self, xp, queue_name):
if xp == "events":
ex_name = self.mgmt_cfg["events_xp"]
else:
ex_name = self.mgmt_cfg["system_exchange"]
if queue_name.startswith(self.sysname):
qqueue_name = queue_name
else:
qqueue_name = ".".join([ex_name, queue_name])
cmd_str = '{0} declare queue name="{1}" durable=false auto_delete=false'.format(self.call_args, qqueue_name)
(options, args) = self.parser.parse_args(shlex.split(cmd_str))
mgmt = Management(options, args[1:])
mgmt.invoke_declare()
def bind_queue(self, xp, queue_name, binding):
if xp == "events":
ex_name = self.mgmt_cfg["events_xp"]
else:
ex_name = self.mgmt_cfg["system_exchange"]
if queue_name.startswith(self.sysname):
qqueue_name = queue_name
else:
qqueue_name = ".".join([ex_name, queue_name])
cmd_str = '{0} declare binding source="{1}" destination="{2}" destination_type=queue routing_key="{3}"'.format(
self.call_args, ex_name, qqueue_name, binding)
(options, args) = self.parser.parse_args(shlex.split(cmd_str))
mgmt = Management(options, args[1:])
mgmt.invoke_declare()
# TODO: Move the management calls from pyon.ion.exchange here
# -------------------------------------------------------------------------
# Helpers
def list_names(self, listable_type):
list_str = '%s list %s name' % (self.call_args, listable_type)
(options, args) = self.parser.parse_args(shlex.split(list_str))
mgmt = Management(options, args[1:])
uri = mgmt.list_show_uri(LISTABLE, 'list', mgmt.args[1:])
output_json = mgmt.get(uri)
listables = simplejson.loads(output_json)
return listables
def list_names_with_prefix(self, listables, name_prefix):
return [l['name'] for l in listables if l['name'].startswith(name_prefix)]
# This function works on exchange, queue, vhost, user
def delete_names_with_prefix(self, deletable_type, deleteable, name_prefix):
deleted = []
for d in deleteable:
try:
if d['name'].startswith(name_prefix):
delete_cmd = '%s delete %s name="%s"' % (self.call_args, deletable_type, d['name'])
(options, args) = self.parser.parse_args(shlex.split(delete_cmd))
mgmt = Management(options, args[1:])
mgmt.invoke_delete()
deleted.append(d['name'])
except KeyError:
# Some has no key 'name'
pass
return deleted
| 42.00578
| 126
| 0.610706
| 7,125
| 0.98046
| 0
| 0
| 2,190
| 0.301362
| 0
| 0
| 2,144
| 0.295032
|
c4692b2cd0fdba89e13d15c53467b6b2f916be48
| 5,362
|
py
|
Python
|
gaternet/main.py
|
gunpowder78/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | 1
|
2022-03-13T21:48:52.000Z
|
2022-03-13T21:48:52.000Z
|
gaternet/main.py
|
gunpowder78/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | null | null | null |
gaternet/main.py
|
gunpowder78/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | 1
|
2022-03-30T07:20:29.000Z
|
2022-03-30T07:20:29.000Z
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Loads a GaterNet checkpoint and tests on Cifar-10 test set."""
import argparse
import io
import os
from backbone_resnet import Network as Backbone
from gater_resnet import Gater
import torch
import torch.nn as nn
import torch.nn.functional as F
from torchvision import datasets
from torchvision import transforms
def load_from_state(state_dict, model):
"""Loads the state dict of a checkpoint into model."""
tem_dict = dict()
for k in state_dict.keys():
tem_dict[k.replace('module.', '')] = state_dict[k]
state_dict = tem_dict
ckpt_key = set(state_dict.keys())
model_key = set(model.state_dict().keys())
print('Keys not in current model: {}\n'.format(ckpt_key - model_key))
print('Keys not in checkpoint: {}\n'.format(model_key - ckpt_key))
model.load_state_dict(state_dict, strict=True)
print('Successfully reload from state.')
return model
def test(backbone, gater, device, test_loader):
"""Tests the model on a test set."""
backbone.eval()
gater.eval()
loss = 0
correct = 0
with torch.no_grad():
for data, target in test_loader:
data, target = data.to(device), target.to(device)
gate = gater(data)
output = backbone(data, gate)
loss += F.cross_entropy(output, target, size_average=False).item()
pred = output.max(1, keepdim=True)[1]
correct += pred.eq(target.view_as(pred)).sum().item()
loss /= len(test_loader.dataset)
acy = 100. * correct / len(test_loader.dataset)
print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.4f}%)\n'.format(
loss, correct, len(test_loader.dataset), acy))
return acy
def run(args, device, test_loader):
"""Loads checkpoint into GaterNet and runs test on the test data."""
with open(args.checkpoint_file, 'rb') as fin:
inbuffer = io.BytesIO(fin.read())
state_dict = torch.load(inbuffer, map_location='cpu')
print('Successfully load checkpoint file.\n')
backbone = Backbone(depth=args.backbone_depth, num_classes=10)
print('Loading checkpoint weights into backbone.')
backbone = load_from_state(state_dict['backbone_state_dict'], backbone)
backbone = nn.DataParallel(backbone).to(device)
print('Backbone is ready after loading checkpoint and moving to device:')
print(backbone)
n_params_b = sum(
[param.view(-1).size()[0] for param in backbone.parameters()])
print('Number of parameters in backbone: {}\n'.format(n_params_b))
gater = Gater(depth=20,
bottleneck_size=8,
gate_size=backbone.module.gate_size)
print('Loading checkpoint weights into gater.')
gater = load_from_state(state_dict['gater_state_dict'], gater)
gater = nn.DataParallel(gater).to(device)
print('Gater is ready after loading checkpoint and moving to device:')
print(gater)
n_params_g = sum(
[param.view(-1).size()[0] for param in gater.parameters()])
print('Number of parameters in gater: {}'.format(n_params_g))
print('Total number of parameters: {}\n'.format(n_params_b + n_params_g))
print('Running test on test data.')
test(backbone, gater, device, test_loader)
def parse_flags():
"""Parses input arguments."""
parser = argparse.ArgumentParser(description='GaterNet')
parser.add_argument('--no-cuda', action='store_true', default=False,
help='disables CUDA training')
parser.add_argument('--backbone-depth', type=int, default=20,
help='resnet depth of the backbone subnetwork')
parser.add_argument('--checkpoint-file', type=str, default=None,
help='checkpoint file to run test')
parser.add_argument('--data-dir', type=str, default=None,
help='the directory for storing data')
args = parser.parse_args()
return args
def main(args):
print('Input arguments:\n{}\n'.format(args))
use_cuda = not args.no_cuda and torch.cuda.is_available()
print('use_cuda: {}'.format(use_cuda))
device = torch.device('cuda' if use_cuda else 'cpu')
torch.backends.cudnn.benchmark = True
print('device: {}'.format(device))
if not os.path.isdir(args.data_dir):
os.mkdir(args.data_dir)
kwargs = {'num_workers': 8, 'pin_memory': True} if use_cuda else {}
normalize_mean = [0.4914, 0.4822, 0.4465]
normalize_std = [0.2470, 0.2435, 0.2616]
test_loader = torch.utils.data.DataLoader(
datasets.CIFAR10(
args.data_dir,
train=False,
download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(normalize_mean, normalize_std)])
),
batch_size=1000, shuffle=False, drop_last=False, **kwargs)
print('Successfully get data loader.')
run(args, device, test_loader)
if __name__ == '__main__':
main(parse_flags())
| 35.276316
| 79
| 0.693398
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,785
| 0.332898
|
c46ae74020d50b1e15aaa99acf255cf154208cb8
| 251
|
pyw
|
Python
|
client.pyw
|
thatfuckingbird/hydrus-websocket-server
|
b55454740dca5101448bf92224432f8bdbec7e77
|
[
"WTFPL"
] | 1,417
|
2015-01-22T00:50:30.000Z
|
2022-03-30T18:44:55.000Z
|
client.pyw
|
thatfuckingbird/hydrus-websocket-server
|
b55454740dca5101448bf92224432f8bdbec7e77
|
[
"WTFPL"
] | 975
|
2015-01-05T01:41:40.000Z
|
2022-03-31T06:01:50.000Z
|
client.pyw
|
thatfuckingbird/hydrus-websocket-server
|
b55454740dca5101448bf92224432f8bdbec7e77
|
[
"WTFPL"
] | 163
|
2015-02-04T13:09:35.000Z
|
2022-03-23T01:00:05.000Z
|
#!/usr/bin/env python3
# Hydrus is released under WTFPL
# You just DO WHAT THE FUCK YOU WANT TO.
# https://github.com/sirkris/WTFPL/blob/master/WTFPL.md
from hydrus import hydrus_client
if __name__ == '__main__':
hydrus_client.boot()
| 19.307692
| 55
| 0.709163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 159
| 0.633466
|
c46b9bf38daa8aa62af17faaff944dc07ddd1de9
| 5,776
|
py
|
Python
|
fixEngine/fixEngine.py
|
HNGlez/ExchangeConnector
|
5176437963a3e9e671bb059c599c79f39439f4d4
|
[
"MIT"
] | null | null | null |
fixEngine/fixEngine.py
|
HNGlez/ExchangeConnector
|
5176437963a3e9e671bb059c599c79f39439f4d4
|
[
"MIT"
] | null | null | null |
fixEngine/fixEngine.py
|
HNGlez/ExchangeConnector
|
5176437963a3e9e671bb059c599c79f39439f4d4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
ExchangeConnector fixEngine
Copyright (c) 2020 Hugo Nistal Gonzalez
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import asyncio
import simplefix
import threading
import logging
import time
import sys
import configparser
from fixClientMessages import FixClientMessages
from connectionHandler import FIXConnectionHandler, SocketConnectionState
class FixEngine(FIXConnectionHandler):
def __init__(self, config, reader, writer, messageListener):
FIXConnectionHandler.__init__(self, config, reader, writer, messageListener)
self._config = config
self._logout = False
self._engineLogger.info(f"Socket Connection Open to {config['SocketHost']}:{config['SocketPort']}")
self.clientMessage = FixClientMessages(config['SenderCompID'], config['TargetCompID'], config['SenderPassword'], config['BeginString'], config.getint('HeartBeatInterval'))
asyncio.ensure_future(self._handleEngine())
def getConnectionState(self):
return self._connectionState
async def _sessionMessageHandler(self, message: simplefix.FixMessage) -> bool:
""" Handle Session Message."""
assert isinstance(message, simplefix.FixMessage)
# NEED TO ADD HANDLING OF BUSINESS REJECTS
msgType = message.get(simplefix.TAG_MSGTYPE)
if msgType == simplefix.MSGTYPE_LOGON: # Handle logon
if self._connectionState == SocketConnectionState.LOGGED_IN:
self._engineLogger.warning(f"{self._config['SenderCompID']} already looged in -> Ignoring Login Request.")
else:
self._connectionState = SocketConnectionState.LOGGED_IN
self._engineLogger.info(f"{self._config['SenderCompID']} session -> LOGON")
self._config['HeartBeatInterval'] = str(message.get(simplefix.TAG_HEARTBTINT).decode())
return True
elif self._connectionState == SocketConnectionState.LOGGED_IN:
if msgType == simplefix.MSGTYPE_TEST_REQUEST: # Send test heartbeat when requested
msg = self.clientMessage.sendHeartbeat()
msg.append_pair(simplefix.TAG_TESTREQID, message.get(simplefix.TAG_TESTREQID))
await self.sendMessage(msg)
return True
elif msgType == simplefix.MSGTYPE_LOGOUT: # Handle Logout
self._connectionState = SocketConnectionState.LOGGED_OUT
self._engineLogger.info(f"{self._config['SenderCompID']} session -> LOGOUT")
self.handleClose()
return True
elif msgType == simplefix.MSGTYPE_HEARTBEAT:
msg = self.clientMessage.sendHeartbeat()
msg.append_pair(simplefix.TAG_TESTREQID, message.get(simplefix.TAG_TESTREQID))
await self.sendMessage(msg)
return True
elif message.get(simplefix.TAG_RESETSEQNUMFLAG) == simplefix.RESETSEQNUMFLAG_YES: # If ResetSeqNum = Y Then Reset sequence
self._session.resetSeqNo()
self._engineLogger.info("Resetting Sequence Number to 1")
return True
else:
return False
else:
self._engineLogger.warning(f"Cannot process message. {self._config['SenderCompID']} is not logged in.")
return False
async def _handleEngine(self):
await self.logon()
while self._connectionState != SocketConnectionState.DISCONNECTED:
if self._connectionState != SocketConnectionState.LOGGED_OUT:
await self.readMessage()
await self.expectedHeartbeat(self._config.getint('HeartBeatInterval'))
else:
await self.logon()
class FIXClient:
def __init__(self, configFile, gateway, listener):
self._config = self.loadConfig(configFile, gateway)
self._reader = None
self._writer = None
self._client = None
self._messageListener = listener
async def startClient(self, loop):
""" Creates Socket Connection and Runs Main Loop."""
self._reader, self._writer = await asyncio.open_connection(self._config["SocketHost"], self._config["SocketPort"], loop=loop)
self._connectionState = SocketConnectionState.CONNECTED
self._client = FixEngine(self._config, self._reader, self._writer, self._messageListener)
def loadConfig(self, filePath, gateway):
parser = configparser.SafeConfigParser()
parser.read(filePath)
if parser.has_section(gateway):
return parser[gateway]
else:
raise Exception(f"{gateway} section not found in configuration file {filePath}")
def getClient(self):
return self._client
| 46.208
| 179
| 0.691136
| 4,381
| 0.758483
| 0
| 0
| 0
| 0
| 3,082
| 0.533587
| 1,935
| 0.335007
|
c46bcfd7797c21307852fe37265fa68fac0dbbc3
| 570
|
py
|
Python
|
plugins/session_list/views.py
|
farazkhanfk7/ajenti
|
ff51635bea0d29bf9f35dd7912f145398040541d
|
[
"MIT"
] | 1
|
2021-04-27T07:16:01.000Z
|
2021-04-27T07:16:01.000Z
|
plugins/session_list/views.py
|
farazkhanfk7/ajenti
|
ff51635bea0d29bf9f35dd7912f145398040541d
|
[
"MIT"
] | null | null | null |
plugins/session_list/views.py
|
farazkhanfk7/ajenti
|
ff51635bea0d29bf9f35dd7912f145398040541d
|
[
"MIT"
] | null | null | null |
from jadi import component
from aj.api.http import url, HttpPlugin
from aj.auth import authorize
from aj.api.endpoint import endpoint, EndpointError
import aj
import gevent
@component(HttpPlugin)
class Handler(HttpPlugin):
def __init__(self, context):
self.context = context
@url(r'/api/session_list/list')
@endpoint(api=True)
def handle_api_list_sessions(self, http_context):
if http_context.method == 'GET':
self.context.worker.update_sessionlist()
gevent.sleep(1)
return aj.sessions
| 25.909091
| 53
| 0.687719
| 358
| 0.62807
| 0
| 0
| 381
| 0.668421
| 0
| 0
| 30
| 0.052632
|
c46cb76d02d71b063cedf52c09eb7f327cd308da
| 10,606
|
py
|
Python
|
now/collection/prov_execution/argument_captors.py
|
CrystalMei/Prov_Build
|
695576c36b7d5615f1cc568954658f8a7ce9eeba
|
[
"MIT"
] | 2
|
2017-11-10T16:17:11.000Z
|
2021-12-19T18:43:22.000Z
|
now/collection/prov_execution/argument_captors.py
|
CrystalMei/Prov_Build
|
695576c36b7d5615f1cc568954658f8a7ce9eeba
|
[
"MIT"
] | null | null | null |
now/collection/prov_execution/argument_captors.py
|
CrystalMei/Prov_Build
|
695576c36b7d5615f1cc568954658f8a7ce9eeba
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2016 Universidade Federal Fluminense (UFF)
# Copyright (c) 2016 Polytechnic Institute of New York University.
# Copyright (c) 2018, 2019, 2020 President and Fellows of Harvard College.
# This file is part of ProvBuild.
"""Capture arguments from calls"""
from __future__ import (absolute_import, print_function,
division, unicode_literals)
import weakref
import itertools
import inspect
from future.utils import viewitems
from ...utils.functions import abstract
from ..prov_definition.utils import ClassDef, Assert, With, Decorator
WITHOUT_PARAMS = (ClassDef, Assert, With)
class ArgumentCaptor(object): # pylint: disable=too-few-public-methods
"""Collect arguments during calls"""
def __init__(self, provider):
self.provider = weakref.proxy(provider)
def capture(self, frame, activation): # pylint: disable=unused-argument, no-self-use
"""Abstract method for capture"""
abstract()
class ProfilerArgumentCaptor(ArgumentCaptor): # pylint: disable=too-few-public-methods
"""Collect arguments for profiler"""
def __init__(self, *args, **kwargs):
super(ProfilerArgumentCaptor, self).__init__(*args, **kwargs)
self.f_locals = {}
def capture(self, frame, activation):
"""Store argument object values
Arguments:
frame -- current frame, after trace call
activation -- current activation
"""
provider = self.provider
self.f_locals = values = frame.f_locals
code = frame.f_code
names = code.co_varnames
nargs = code.co_argcount
# Capture args
for var in itertools.islice(names, 0, nargs):
try:
provider.object_values.add(
var,
provider.serialize(values[var]), "ARGUMENT", activation.id)
activation.args.append(var)
except Exception: # pylint: disable=broad-except
# ignoring any exception during capture
pass
# Capture *args
if code.co_flags & inspect.CO_VARARGS: # pylint: disable=no-member
varargs = names[nargs]
provider.object_values.add(
varargs,
provider.serialize(values[varargs]), "ARGUMENT", activation.id)
activation.starargs.append(varargs)
nargs += 1
# Capture **kwargs
if code.co_flags & inspect.CO_VARKEYWORDS: # pylint: disable=no-member
kwargs = values[names[nargs]]
for key in kwargs:
provider.object_values.add(
key, provider.serialize(kwargs[key]), "ARGUMENT",
activation.id)
activation.kwargs.append(names[nargs])
class InspectProfilerArgumentCaptor(ArgumentCaptor): # pylint: disable=too-few-public-methods
"""This Argument Captor uses the inspect.getargvalues that is slower
because it considers the existence of anonymous tuple
"""
def capture(self, frame, activation):
"""Store argument object values
Arguments:
frame -- current frame, after trace call
activation -- current activation
"""
provider = self.provider
# ToDo #75: inspect.getargvalues was deprecated on Python 3.5
# ToDo #75: use inspect.signature instead
(args, varargs, keywords, values) = inspect.getargvalues(frame)
for arg in args:
try:
provider.object_values.add(
arg, provider.serialize(values[arg]), "ARGUMENT",
activation.id)
activation.args.append(arg)
except Exception: # ignoring any exception during capture # pylint: disable=broad-except
pass
if varargs:
provider.object_values.add(
varargs, provider.serialize(values[varargs]), "ARGUMENT",
activation.id)
activation.starargs.append(varargs)
if keywords:
for key, value in viewitems(values[keywords]):
provider.object_values.add(
key, provider.serialize(value), "ARGUMENT", activation.id)
activation.kwargs.append(key)
class SlicingArgumentCaptor(ProfilerArgumentCaptor):
"""Create Slicing Variables for Arguments and dependencies between
Parameters and Arguments"""
def __init__(self, *args, **kwargs):
super(SlicingArgumentCaptor, self).__init__(*args, **kwargs)
self.caller, self.activation = None, None
self.filename, self.line = "", 0
self.frame = None
def match_arg(self, passed, arg):
"""Match passed arguments with param
Arguments:
passed -- Call Variable name
arg -- Argument name
"""
provider = self.provider
activation = self.activation
context = activation.context
if arg in context:
act_var = context[arg]
else:
vid = provider.add_variable(activation.id, arg,
self.line, self.f_locals, "param")
act_var = provider.variables[vid]
context[arg] = act_var
if passed:
caller = self.caller
target = provider.find_variable(caller, passed, self.filename)
if target is not None:
provider.dependencies.add(
act_var.activation_id, act_var.id,
target.activation_id, target.id, "parameter"
)
def match_args(self, params, arg):
"""Match passed argument with param
Arguments:
params -- Call Variable names
arg -- Argument name
"""
for param in params:
self.match_arg(param, arg)
def _defined_call(self, activation):
"""Return a call extracted from AST if it has arguments
or None, otherwise
Arguments:
activation -- current activation
"""
if not activation.with_definition or activation.is_main:
return
if activation.is_comprehension():
return
provider = self.provider
lineno, lasti = activation.line, activation.lasti
filename = activation.filename
function_name = activation.name
if (function_name == "__enter__" and
lasti in provider.with_enter_by_lasti[filename][lineno]):
activation.has_parameters = False
return
if (function_name == "__exit__" and
lasti in provider.with_exit_by_lasti[filename][lineno]):
activation.has_parameters = False
return
if lasti in provider.iters[filename][lineno]:
activation.has_parameters = False
provider.next_is_iter = True
return
try:
call = provider.call_by_lasti[filename][lineno][lasti]
except (IndexError, KeyError):
# call not found
# ToDo: show in dev-mode
return
if (isinstance(call, WITHOUT_PARAMS) or
(isinstance(call, Decorator) and not call.is_fn)):
activation.has_parameters = False
return
return call
def capture(self, frame, activation): # pylint: disable=too-many-locals
"""Match call parameters to function arguments
Arguments:
frame -- current frame, after trace call
activation -- current activation
"""
super(SlicingArgumentCaptor, self).capture(frame, activation)
provider = self.provider
self.frame = frame
call = self._defined_call(activation)
if not call:
return
self.filename = activation.filename
self.line = frame.f_lineno
self.caller, self.activation = provider.current_activation, activation
match_args, match_arg = self.match_args, self.match_arg
act_args_index = activation.args.index
# Check if it has starargs and kwargs
sub = -[bool(activation.starargs), bool(activation.kwargs)].count(True)
order = activation.args + activation.starargs + activation.kwargs
activation_arguments = len(order) + sub
used = [0 for _ in order]
j = 0
# Match positional arguments
for i, call_arg in enumerate(call.args):
if call_arg:
j = i if i < activation_arguments else sub
act_arg = order[j]
match_args(call_arg, act_arg)
used[j] += 1
# Match keyword arguments
for act_arg, call_arg in viewitems(call.keywords):
try:
i = act_args_index(act_arg)
match_args(call_arg, act_arg)
used[i] += 1
except ValueError:
for kwargs in activation.kwargs:
match_args(call_arg, kwargs)
# Match kwargs, starargs
# ToDo #75: Python 3.5 supports multiple keyword arguments and starargs
# ToDo #75: improve matching
# Ignore default params
# Do not match f(**kwargs) with def(*args)
args = [(k, order[k]) for k in range(len(used)) if not used[k]]
for star in call.kwargs + call.starargs:
for i, act_arg in args:
match_args(star, act_arg)
used[i] += 1
# Create variables for unmatched arguments
args = [(k, order[k]) for k in range(len(used)) if not used[k]]
for i, act_arg in args:
match_arg(None, act_arg)
# Create dependencies between all parameters
# ToDo #35: improve dependencies to use references.
# Do not create dependencies between all parameters
all_args = list(provider.find_variables(
self.caller, call.all_args(), activation.filename))
if all_args:
graybox = provider.create_func_graybox(activation.id, activation.line)
provider.add_dependencies(graybox, all_args)
provider.add_inter_dependencies(frame.f_back.f_locals, all_args,
self.caller, activation.line,
[(graybox, graybox.name)])
| 36.826389
| 127
| 0.581463
| 9,973
| 0.940317
| 0
| 0
| 0
| 0
| 0
| 0
| 2,669
| 0.25165
|
c46dc4849d73685f3bf2bf7edc6ed45dee20d695
| 307
|
py
|
Python
|
Python/Day8 DictionariesAndMaps.py
|
codePerfectPlus/30-DaysOfCode-With-Python-And-JavaScript
|
570fa12ed30659fa394d86e12583b69f35a2e7a7
|
[
"MIT"
] | 8
|
2020-08-03T01:53:13.000Z
|
2022-01-09T14:47:58.000Z
|
Python/Day8 DictionariesAndMaps.py
|
codePerfectPlus/30-DaysOfCode-With-Python-And-JavaScript
|
570fa12ed30659fa394d86e12583b69f35a2e7a7
|
[
"MIT"
] | null | null | null |
Python/Day8 DictionariesAndMaps.py
|
codePerfectPlus/30-DaysOfCode-With-Python-And-JavaScript
|
570fa12ed30659fa394d86e12583b69f35a2e7a7
|
[
"MIT"
] | 4
|
2020-09-29T11:28:53.000Z
|
2021-06-02T15:34:55.000Z
|
N = int(input())
entry = [input().split() for _ in range(N)]
phoneBook = {name: number for name, number in entry}
while True:
try:
name = input()
if name in phoneBook:
print(f"{name}={phoneBook[name]}")
else:
print("Not found")
except:
break
| 21.928571
| 52
| 0.534202
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 38
| 0.123779
|
c46f3c278fa8309cddd52d6eeccf2dae6ea924e2
| 1,850
|
py
|
Python
|
10. Recurrent Neural Network/10-1) Recurrent Neural Network, RNN.py
|
choijiwoong/-ROKA-torch-tutorial-files
|
c298fdf911cd64757895c3ab9f71ae7c3467c545
|
[
"Unlicense"
] | null | null | null |
10. Recurrent Neural Network/10-1) Recurrent Neural Network, RNN.py
|
choijiwoong/-ROKA-torch-tutorial-files
|
c298fdf911cd64757895c3ab9f71ae7c3467c545
|
[
"Unlicense"
] | null | null | null |
10. Recurrent Neural Network/10-1) Recurrent Neural Network, RNN.py
|
choijiwoong/-ROKA-torch-tutorial-files
|
c298fdf911cd64757895c3ab9f71ae7c3467c545
|
[
"Unlicense"
] | null | null | null |
#Sequence model. != Recursive Neural Network
#memory cell or RNN cell
#hidden state
#one-to-many_image captioning, many-to-one_sentiment classfication || spam detection, many-to-many_chat bot
#2) create RNN in python
import numpy as np
timesteps=10#시점의 수 _문장의 길이
input_size=4#입력의 차원_단어벡터의 차원
hidden_size=8#메모리 셀의 용량(은닉상태의 크기)
inputs=np.random.random((timesteps, input_size))#입력에 해당하는 2D텐서
hidden_state_t=np.zeros((hidden_size,))#jiddensize로 은닉상태 만들고 0초기화
print(hidden_state_t)
Wx=np.random.random((hidden_size, input_size))#입력 가중치
Wh=np.random.random((hidden_size, hidden_size))#은닉 가중치
b=np.random.random((hidden_size,))
print(np.shape(Wx))
print(np.shape(Wh))
print(np.shape(b))
total_hidden_states=[]
#memory cell work
for input_t in inputs:
output_t=np.tanh(np.dot(Wx,input_t)+np.dot(Wh,hidden_state_t)+b)
total_hidden_states.append(list(output_t))#각 시점의 은닉상태값을 축적
print(np.shape(total_hidden_states))
hidden_state_t=output_t
total_hidden_states=np.stack(total_hidden_states, axis=0)#깨끗한 출력
print(total_hidden_states)
#3) nn.RNN() in pytorch
import torch
import torch.nn as nn
input_size=5#입력 크기
hidden_size=8#은닉상태의 크기
inputs=torch.Tensor(1, 10, 5)#배치크기 1 10번의 시점동안 5차원의 입력벡터
cell=nn.RNN(input_size, hidden_size, batch_first=True)#입력텐서의 첫번째 차원이 배치크기
outputs, _status=cell(inputs)#2개의 입력을 리턴. 모든시점의 은닉상태들, 마시막시점의 은닉상태
print(outputs.shape)
#4) Deep Recurrent Neural Network
inputs=torch.Tensor(1, 10, 5)
cell=nn.RNN(input_size=5, hidden_size=8, num_layers=2, batch_first=True)# 은닉층 2개(cell)
print(outputs.shape)
print(_status.shape)#층개수, 배치크기, 은닉상태의 크기
#5) Bidirectional Recurrent Neural Network
inputs=torch.Tensor(1, 10, 5)
cell=nn.RNN(input_size=5, hidden_size=8, num_layers=2, batch_first=True, bidirectional=True)#양방향순환
outputs, _status=cell(inputs)
print(outputs.shape)#연결되었기에 은닉상태크기2배
print(_status.shape)#층의개수2배
| 30.327869
| 107
| 0.778378
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,001
| 0.447674
|
c46f42400056a3b7b9402bc800d3e92633345822
| 720
|
py
|
Python
|
WeLearn/M3-Python/L3-Python_Object/pet.py
|
munoz196/moonyosCSSIrep
|
cdfcd2ae061293471ecdf2d370a27f163efeba97
|
[
"Apache-2.0"
] | null | null | null |
WeLearn/M3-Python/L3-Python_Object/pet.py
|
munoz196/moonyosCSSIrep
|
cdfcd2ae061293471ecdf2d370a27f163efeba97
|
[
"Apache-2.0"
] | null | null | null |
WeLearn/M3-Python/L3-Python_Object/pet.py
|
munoz196/moonyosCSSIrep
|
cdfcd2ae061293471ecdf2d370a27f163efeba97
|
[
"Apache-2.0"
] | null | null | null |
pet = {
"name":"Doggo",
"animal":"dog",
"species":"labrador",
"age":"5"
}
class Pet(object):
def __init__(self, name, age, animal):
self.name = name
self.age = age
self.animal = animal
self.hungry = False
self.mood= "happy"
def eat(self):
print("> %s is eating..." % self.name)
if self.is_hungry:
self.is_hungry = False
else:
print("> %s may have eaten too much." % self.name)
self.mood = "lethargic "
my_pet= Pet("Fido", 3, "dog")
my_pet.is_hungry= True
print("is my pet hungry? %s"% my_pet.is_hungry)
my_pet.eat()
print("how about now? %s" % my_pet.is_hungry)
print ("My pet is feeling %s" % my_pet.mood)
| 22.5
| 62
| 0.566667
| 437
| 0.606944
| 0
| 0
| 0
| 0
| 0
| 0
| 196
| 0.272222
|
c470769346abfe53705868b77ccb1792faae0816
| 1,260
|
py
|
Python
|
src/repositories/example_repo.py
|
pybokeh/dagster-examples
|
459cfbe00585f1d123e49058685c74149efb867d
|
[
"MIT"
] | null | null | null |
src/repositories/example_repo.py
|
pybokeh/dagster-examples
|
459cfbe00585f1d123e49058685c74149efb867d
|
[
"MIT"
] | null | null | null |
src/repositories/example_repo.py
|
pybokeh/dagster-examples
|
459cfbe00585f1d123e49058685c74149efb867d
|
[
"MIT"
] | null | null | null |
from dagster import job, repository
from ops.sklearn_ops import (
fetch_freehand_text_to_generic_data,
separate_features_from_target_label,
label_encode_target,
count_tfid_transform_train,
count_tfid_transform_test,
create_sgd_classifier_model,
predict
)
@ job(
description="Scikit-Learn multi-class text classification: classify free-hand text computer skills descriptions to generic descriptions"
)
def text_classify():
X_train, y_train = separate_features_from_target_label.alias("separate_features_from_target_train")(
fetch_freehand_text_to_generic_data.alias("fetch_training_data")()
)
df_test = fetch_freehand_text_to_generic_data.alias("fetch_test_data")()
y_encoded_train, label_encoder_train = label_encode_target.alias("label_encode_train")(y_train)
X_encoded_train, count_vect, tfid_vect = count_tfid_transform_train.alias("count_tfid_transform_train")(X_train)
clf = create_sgd_classifier_model(X_encoded_train, y_encoded_train)
X_encoded_test = count_tfid_transform_test(df_test, count_vect, tfid_vect)
predict(X_encoded_test, clf, label_encoder_train)
@repository
def examples_repo():
return [
text_classify,
]
| 33.157895
| 141
| 0.768254
| 0
| 0
| 0
| 0
| 955
| 0.757937
| 0
| 0
| 247
| 0.196032
|
c4721b4a3c1999fdb50a16efbe7e2d5c42d79e86
| 551
|
py
|
Python
|
exercicios/Maior_e_Menor_Valores.py
|
jeversonneves/Python
|
c31779d8db64b22711fe612cc943da8c5e51788b
|
[
"MIT"
] | null | null | null |
exercicios/Maior_e_Menor_Valores.py
|
jeversonneves/Python
|
c31779d8db64b22711fe612cc943da8c5e51788b
|
[
"MIT"
] | null | null | null |
exercicios/Maior_e_Menor_Valores.py
|
jeversonneves/Python
|
c31779d8db64b22711fe612cc943da8c5e51788b
|
[
"MIT"
] | null | null | null |
resposta = 'S'
soma = quant = media = maior = menor = 0
while resposta in 'Ss':
n = int(input('Digite um número: '))
soma += n
quant += 1
if quant == 1:
maior = menor = n
else:
if n > maior:
maior = n
elif n < menor:
menor = n
resposta = str(input('Quer continuar? [S/N]: ')).upper().strip()[0]
media = soma / quant
print('Você digitou {} números e a soma foi de {} e media de {}.'.format(quant, soma, media))
print('O maior número {} e o menor número {}.'.format(maior, menor))
| 30.611111
| 93
| 0.548094
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 156
| 0.280576
|
c472af02ddcb4584d404fd75d6b5093bc3a9b31d
| 554
|
py
|
Python
|
rbc/opening/opening.py
|
rebuildingcode/hardware
|
df38d4b955047fdea69dda6b662c56ac301799a2
|
[
"BSD-3-Clause"
] | null | null | null |
rbc/opening/opening.py
|
rebuildingcode/hardware
|
df38d4b955047fdea69dda6b662c56ac301799a2
|
[
"BSD-3-Clause"
] | 27
|
2019-09-04T06:29:34.000Z
|
2020-04-19T19:41:44.000Z
|
rbc/opening/opening.py
|
rebuildingcode/hardware
|
df38d4b955047fdea69dda6b662c56ac301799a2
|
[
"BSD-3-Clause"
] | 2
|
2020-02-28T02:56:31.000Z
|
2020-02-28T03:12:07.000Z
|
from shapely.geometry import Polygon
from ..point import Point
class Opening(Polygon):
"""
Openings are rectangular only.
"""
def __init__(self, width, height):
self.width = width
self.height = height
points = [
Point(0, 0), Point(0, height), Point(width, height), Point(width, 0)
]
super().__init__(shell=[(pt.x, pt.y) for pt in points])
def plot(self):
"""
- [ ] plot plan view
- [ ] plot elevation view
"""
pass # pragma: no cover
| 20.518519
| 80
| 0.539711
| 487
| 0.879061
| 0
| 0
| 0
| 0
| 0
| 0
| 142
| 0.256318
|
c47376723d72b33e6ef5ded0c99f0808db10a51e
| 4,252
|
py
|
Python
|
AI/Housing Prices Prediction/HousePricesNN.py
|
n0rel/self
|
f9f44af42aa652f9a72279e44ffd8d4387a4bdae
|
[
"MIT"
] | null | null | null |
AI/Housing Prices Prediction/HousePricesNN.py
|
n0rel/self
|
f9f44af42aa652f9a72279e44ffd8d4387a4bdae
|
[
"MIT"
] | null | null | null |
AI/Housing Prices Prediction/HousePricesNN.py
|
n0rel/self
|
f9f44af42aa652f9a72279e44ffd8d4387a4bdae
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
from sklearn.preprocessing import LabelEncoder, MinMaxScaler
from numpy.random import uniform
import matplotlib.pyplot as plt
def relu(x):
return x * (x > 0)
def relu_deriv(x):
return 1 * (x > 0)
class NeuralNetwork:
"""
Our NN will predict a housing price given 6 parameters, meaning:
* Input Size: 6
* Error Function: MSE
Hidden Layer:
* Neurons: Param
* Input: 6
* Output: Param
* Activation: ReLU
Output Layer:
* Neurons: 1
* Input: Param
* Output: 1
* Activation: Linear
"""
def __init__(self, hidden_neurons, alpha):
self.hidden_neurons = hidden_neurons
self.alpha = alpha
self.hidden_weights = uniform(low=(-1) * np.sqrt(2 / 6), high=np.sqrt(2 / 6), size=(hidden_neurons, 6))
self.output_weights = uniform(low=-0.1, high=0.1, size=(1, hidden_neurons))
def f_propagate(self, x):
z1 = x.dot(self.hidden_weights.transpose())
g = relu(z1)
output = g.dot(self.output_weights.transpose())
return [g, output, z1]
def fit(self, x, y, epochs):
# ERROR CHECKING
error = []
for epoch in range(epochs):
print(epoch)
# ERROR CHECKING
error_sum = 0
# Arrays used to store incrementing weight changes
hidden_w_sum = np.zeros(shape=self.hidden_weights.shape)
output_w_sum = np.zeros(shape=self.output_weights.shape)
for sample_x, sample_y in zip(x, y):
forward_values = self.f_propagate(sample_x)
output_delta = (forward_values[1] - sample_y) * forward_values[0]
output_delta1 = forward_values[1] - sample_y
# hidden_delta1 = (output_delta1 * self.output_weights).dot(np.outer(relu_deriv(forward_values[2]), sample_x)) # Shape: (Neurons,6)
hidden_delta1 = np.outer(output_delta1 * self.output_weights * relu_deriv(forward_values[2]), sample_x)
output_w_sum += output_delta
hidden_w_sum += hidden_delta1
# ERROR CHECKING
error_sum += abs(sample_y - forward_values[1])
# ERROR CHECKING
error.append(error_sum / len(x))
self.output_weights -= self.alpha * output_w_sum / len(x)
self.hidden_weights -= self.alpha * hidden_w_sum / len(x)
plt.plot(error)
plt.show()
# Import Data
training_amount = 4000
input_scaler = MinMaxScaler((-1, 1))
output_scaler = MinMaxScaler((-1, 1))
data = pd.read_csv('USA_Housing.csv').drop(columns=['Address'])
data = np.insert(data.to_numpy(), 0, np.ones((1, len(data))), axis=1)
x_scaled, y_scaled = input_scaler.fit_transform(data[:, :6]), output_scaler.fit_transform(data[:, 6:7])
x_train, y_train = x_scaled[:training_amount], y_scaled[:training_amount]
x_test, y_test = x_scaled[training_amount:], y_scaled[training_amount:]
hidden_neurons = 10
# Create NN & train it
nn = NeuralNetwork(hidden_neurons, 0.7)
nn.fit(x_train, y_train, epochs=75)
error = 0
amount_to_check = 20
for x, y in zip(x_test[:amount_to_check, :], y_test[:amount_to_check]):
error += abs(output_scaler.inverse_transform(y.reshape(-1, 1))[0][0] -
output_scaler.inverse_transform(nn.f_propagate(x)[1].reshape(-1, 1))[0][0])
print(
f"{output_scaler.inverse_transform(nn.f_propagate(x)[1].reshape(-1, 1))[0][0]} -> {output_scaler.inverse_transform(y.reshape(-1, 1))[0][0]}")
print(f"{(error / len(x_test)):.9f}")
"""
# Keras Version of NN
model = keras.models.Sequential()
model.add(keras.layers.Dense(hidden_neurons, input_dim=5, activation='relu', kernel_initializer='he_normal'))
model.add(keras.layers.Dense(1, input_dim=hidden_neurons, activation='linear'))
model.compile(loss='mse', optimizer='adam', metrics=['mse'])
history = model.fit(x_train, y_train, epochs=10, batch_size=10)
plt.plot(history.history['mse'])
plt.show()
for x, y in zip(model.predict(x_test), y_test):
print(f"{output_scaler.inverse_transform(y.reshape(-1, 1))[0][0]} -> {output_scaler.inverse_transform(x.reshape(-1, 1))[0][0]}")
"""
| 33.480315
| 149
| 0.63476
| 2,302
| 0.541392
| 0
| 0
| 0
| 0
| 0
| 0
| 1,480
| 0.348071
|
c4737a166e262dfedd58077027d802632dac9651
| 7,829
|
py
|
Python
|
tests/test_export_keyword_template_catalina_10_15_4.py
|
PabloKohan/osxphotos
|
2cf3b6bb674c312240c4b12c5d7b558f15be7c85
|
[
"MIT"
] | null | null | null |
tests/test_export_keyword_template_catalina_10_15_4.py
|
PabloKohan/osxphotos
|
2cf3b6bb674c312240c4b12c5d7b558f15be7c85
|
[
"MIT"
] | null | null | null |
tests/test_export_keyword_template_catalina_10_15_4.py
|
PabloKohan/osxphotos
|
2cf3b6bb674c312240c4b12c5d7b558f15be7c85
|
[
"MIT"
] | null | null | null |
import pytest
from osxphotos._constants import _UNKNOWN_PERSON
PHOTOS_DB = "./tests/Test-10.15.4.photoslibrary/database/photos.db"
TOP_LEVEL_FOLDERS = ["Folder1"]
TOP_LEVEL_CHILDREN = ["SubFolder1", "SubFolder2"]
FOLDER_ALBUM_DICT = {"Folder1": [], "SubFolder1": [], "SubFolder2": ["AlbumInFolder"]}
ALBUM_NAMES = ["Pumpkin Farm", "AlbumInFolder", "Test Album", "Test Album"]
ALBUM_PARENT_DICT = {
"Pumpkin Farm": None,
"AlbumInFolder": "SubFolder2",
"Test Album": None,
}
ALBUM_FOLDER_NAMES_DICT = {
"Pumpkin Farm": [],
"AlbumInFolder": ["Folder1", "SubFolder2"],
"Test Album": [],
}
ALBUM_LEN_DICT = {"Pumpkin Farm": 3, "AlbumInFolder": 2, "Test Album": 1}
ALBUM_PHOTO_UUID_DICT = {
"Pumpkin Farm": [
"F12384F6-CD17-4151-ACBA-AE0E3688539E",
"D79B8D77-BFFC-460B-9312-034F2877D35B",
"1EB2B765-0765-43BA-A90C-0D0580E6172C",
],
"Test Album": [
"F12384F6-CD17-4151-ACBA-AE0E3688539E",
"D79B8D77-BFFC-460B-9312-034F2877D35B",
],
"AlbumInFolder": [
"3DD2C897-F19E-4CA6-8C22-B027D5A71907",
"E9BC5C36-7CD1-40A1-A72B-8B8FAC227D51",
],
}
UUID_DICT = {
"two_albums": "F12384F6-CD17-4151-ACBA-AE0E3688539E",
"in_album": "E9BC5C36-7CD1-40A1-A72B-8B8FAC227D51",
"xmp": "F12384F6-CD17-4151-ACBA-AE0E3688539E",
}
def test_exiftool_json_sidecar_keyword_template_long(caplog):
import osxphotos
from osxphotos._constants import _MAX_IPTC_KEYWORD_LEN
import json
photosdb = osxphotos.PhotosDB(dbfile=PHOTOS_DB)
photos = photosdb.photos(uuid=[UUID_DICT["in_album"]])
json_expected = json.loads(
"""
[{"_CreatedBy": "osxphotos, https://github.com/RhetTbull/osxphotos",
"EXIF:ImageDescription": "Bride Wedding day",
"XMP:Description": "Bride Wedding day",
"XMP:TagsList": ["wedding", "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"],
"IPTC:Keywords": ["wedding", "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"],
"XMP:PersonInImage": ["Maria"],
"XMP:Subject": ["wedding", "Maria"],
"EXIF:DateTimeOriginal": "2019:04:15 14:40:24",
"EXIF:OffsetTimeOriginal": "-04:00", "EXIF:ModifyDate": "2019:11:24 13:09:17"}]
"""
)[0]
long_str = "x" * (_MAX_IPTC_KEYWORD_LEN + 1)
json_got = photos[0]._exiftool_json_sidecar(keyword_template=[long_str])
json_got = json.loads(json_got)[0]
assert "Some keywords exceed max IPTC Keyword length" in caplog.text
# some gymnastics to account for different sort order in different pythons
for k, v in json_got.items():
if type(v) in (list, tuple):
assert sorted(json_expected[k]) == sorted(v)
else:
assert json_expected[k] == v
for k, v in json_expected.items():
if type(v) in (list, tuple):
assert sorted(json_got[k]) == sorted(v)
else:
assert json_got[k] == v
for k, v in json_expected.items():
if type(v) in (list, tuple):
assert sorted(json_got[k]) == sorted(v)
else:
assert json_got[k] == v
def test_exiftool_json_sidecar_keyword_template():
import osxphotos
import json
photosdb = osxphotos.PhotosDB(dbfile=PHOTOS_DB)
photos = photosdb.photos(uuid=[UUID_DICT["in_album"]])
json_expected = json.loads(
"""
[{"_CreatedBy": "osxphotos, https://github.com/RhetTbull/osxphotos",
"EXIF:ImageDescription": "Bride Wedding day",
"XMP:Description": "Bride Wedding day",
"XMP:TagsList": ["wedding", "Folder1/SubFolder2/AlbumInFolder"],
"IPTC:Keywords": ["wedding", "Folder1/SubFolder2/AlbumInFolder"],
"XMP:PersonInImage": ["Maria"],
"XMP:Subject": ["wedding", "Maria"],
"EXIF:DateTimeOriginal": "2019:04:15 14:40:24",
"EXIF:OffsetTimeOriginal": "-04:00", "EXIF:ModifyDate": "2019:11:24 13:09:17"}]
"""
)[0]
json_got = photos[0]._exiftool_json_sidecar(keyword_template=["{folder_album}"])
json_got = json.loads(json_got)[0]
# some gymnastics to account for different sort order in different pythons
for k, v in json_got.items():
if type(v) in (list, tuple):
assert sorted(json_expected[k]) == sorted(v)
else:
assert json_expected[k] == v
for k, v in json_expected.items():
if type(v) in (list, tuple):
assert sorted(json_got[k]) == sorted(v)
else:
assert json_got[k] == v
# some gymnastics to account for different sort order in different pythons
for k, v in json_got.items():
if type(v) in (list, tuple):
assert sorted(json_expected[k]) == sorted(v)
else:
assert json_expected[k] == v
for k, v in json_expected.items():
if type(v) in (list, tuple):
assert sorted(json_got[k]) == sorted(v)
else:
assert json_got[k] == v
for k, v in json_expected.items():
if type(v) in (list, tuple):
assert sorted(json_got[k]) == sorted(v)
else:
assert json_got[k] == v
def test_xmp_sidecar_keyword_template():
import osxphotos
photosdb = osxphotos.PhotosDB(dbfile=PHOTOS_DB)
photos = photosdb.photos(uuid=[UUID_DICT["xmp"]])
xmp_expected = """<!-- Created with osxphotos https://github.com/RhetTbull/osxphotos -->
<x:xmpmeta xmlns:x="adobe:ns:meta/" x:xmptk="XMP Core 5.4.0">
<!-- mirrors Photos 5 "Export IPTC as XMP" option -->
<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
<rdf:Description rdf:about=""
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:photoshop="http://ns.adobe.com/photoshop/1.0/">
<dc:description>Girls with pumpkins</dc:description>
<dc:title>Can we carry this?</dc:title>
<!-- keywords and persons listed in <dc:subject> as Photos does -->
<dc:subject>
<rdf:Seq>
<rdf:li>Kids</rdf:li>
<rdf:li>Suzy</rdf:li>
<rdf:li>Katie</rdf:li>
</rdf:Seq>
</dc:subject>
<photoshop:DateCreated>2018-09-28T15:35:49.063000-04:00</photoshop:DateCreated>
</rdf:Description>
<rdf:Description rdf:about=""
xmlns:Iptc4xmpExt='http://iptc.org/std/Iptc4xmpExt/2008-02-29/'>
<Iptc4xmpExt:PersonInImage>
<rdf:Bag>
<rdf:li>Suzy</rdf:li>
<rdf:li>Katie</rdf:li>
</rdf:Bag>
</Iptc4xmpExt:PersonInImage>
</rdf:Description>
<rdf:Description rdf:about=""
xmlns:digiKam='http://www.digikam.org/ns/1.0/'>
<digiKam:TagsList>
<rdf:Seq>
<rdf:li>Kids</rdf:li>
<rdf:li>Pumpkin Farm</rdf:li>
<rdf:li>Test Album</rdf:li>
<rdf:li>2018</rdf:li>
</rdf:Seq>
</digiKam:TagsList>
</rdf:Description>
<rdf:Description rdf:about=""
xmlns:xmp='http://ns.adobe.com/xap/1.0/'>
<xmp:CreateDate>2018-09-28T15:35:49</xmp:CreateDate>
<xmp:ModifyDate>2018-09-28T15:35:49</xmp:ModifyDate>
</rdf:Description>
<rdf:Description rdf:about=""
xmlns:exif='http://ns.adobe.com/exif/1.0/'>
</rdf:Description>
</rdf:RDF>
</x:xmpmeta>"""
xmp_expected_lines = [line.strip() for line in xmp_expected.split("\n")]
xmp_got = photos[0]._xmp_sidecar(
keyword_template=["{created.year}", "{folder_album}"]
)
xmp_got_lines = [line.strip() for line in xmp_got.split("\n")]
for line_expected, line_got in zip(
sorted(xmp_expected_lines), sorted(xmp_got_lines)
):
assert line_expected == line_got
| 35.107623
| 107
| 0.606463
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 4,434
| 0.566356
|
c47490ec669bdd7c9794f49ba2d2ebd89aed558a
| 32,808
|
py
|
Python
|
video_level_models.py
|
pomonam/youtube-8m
|
2d0b9b361785743ec397c6104feb30bb581700e5
|
[
"Apache-2.0"
] | 43
|
2018-10-03T13:29:45.000Z
|
2020-10-12T09:33:44.000Z
|
video_level_models.py
|
pomonam/LearnablePoolingMethodsForVideoClassification
|
2d0b9b361785743ec397c6104feb30bb581700e5
|
[
"Apache-2.0"
] | 1
|
2018-10-01T01:50:56.000Z
|
2019-01-07T17:53:37.000Z
|
video_level_models.py
|
pomonam/LearnablePoolingMethodsForVideoClassification
|
2d0b9b361785743ec397c6104feb30bb581700e5
|
[
"Apache-2.0"
] | 3
|
2018-11-20T14:43:17.000Z
|
2019-07-26T13:25:14.000Z
|
# Copyright 2018 Deep Topology All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains model definitions."""
# noinspection PyUnresolvedReferences
import pathmagic
from tensorflow import flags
import attention_modules
import tensorflow as tf
import tensorflow.contrib.slim as slim
import models
import math
FLAGS = flags.FLAGS
flags.DEFINE_integer(
"moe_num_mixtures", 2,
"The number of mixtures (excluding the dummy 'expert') used for MoeModel.")
###############################################################################
# Baseline (Benchmark) models #################################################
###############################################################################
flags.DEFINE_float(
"moe_l2", 1e-8,
"L2 penalty for MoeModel.")
flags.DEFINE_integer(
"moe_low_rank_gating", -1,
"Low rank gating for MoeModel.")
flags.DEFINE_bool(
"moe_prob_gating", False,
"Prob gating for MoeModel.")
flags.DEFINE_string(
"moe_prob_gating_input", "prob",
"input Prob gating for MoeModel.")
class MoeModel(models.BaseModel):
"""A softmax over a mixture of logistic models (with L2 regularization)."""
def create_model(self,
model_input,
vocab_size,
is_training,
num_mixtures=None,
l2_penalty=1e-8,
**unused_params):
"""Creates a Mixture of (Logistic) Experts model.
It also includes the possibility of gating the probabilities
The model consists of a per-class softmax distribution over a
configurable number of logistic classifiers. One of the classifiers in the
mixture is not trained, and always predicts 0.
Args:
model_input: 'batch_size' x 'num_features' matrix of input features.
vocab_size: The number of classes in the dataset.
is_training: Is this the training phase ?
num_mixtures: The number of mixtures (excluding a dummy 'expert' that
always predicts the non-existence of an entity).
l2_penalty: How much to penalize the squared magnitudes of parameter
values.
Returns:
A dictionary with a tensor containing the probability predictions of the
model in the 'predictions' key. The dimensions of the tensor are
batch_size x num_classes.
"""
num_mixtures = num_mixtures or FLAGS.moe_num_mixtures
low_rank_gating = FLAGS.moe_low_rank_gating
l2_penalty = FLAGS.moe_l2
gating_probabilities = FLAGS.moe_prob_gating
gating_input = FLAGS.moe_prob_gating_input
input_size = model_input.get_shape().as_list()[1]
remove_diag = FLAGS.gating_remove_diag
if low_rank_gating == -1:
gate_activations = slim.fully_connected(
model_input,
vocab_size * (num_mixtures + 1),
activation_fn=None,
biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="gates")
else:
gate_activations1 = slim.fully_connected(
model_input,
low_rank_gating,
activation_fn=None,
biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="gates1")
gate_activations = slim.fully_connected(
gate_activations1,
vocab_size * (num_mixtures + 1),
activation_fn=None,
biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="gates2")
expert_activations = slim.fully_connected(
model_input,
vocab_size * num_mixtures,
activation_fn=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="experts")
gating_distribution = tf.nn.softmax(tf.reshape(
gate_activations,
[-1, num_mixtures + 1])) # (Batch * #Labels) x (num_mixtures + 1)
expert_distribution = tf.nn.sigmoid(tf.reshape(
expert_activations,
[-1, num_mixtures])) # (Batch * #Labels) x num_mixtures
probabilities_by_class_and_batch = tf.reduce_sum(
gating_distribution[:, :num_mixtures] * expert_distribution, 1)
probabilities = tf.reshape(probabilities_by_class_and_batch,
[-1, vocab_size])
if gating_probabilities:
if gating_input == 'prob':
gating_weights = tf.get_variable("gating_prob_weights",
[vocab_size, vocab_size],
initializer=tf.random_normal_initializer(
stddev=1 / math.sqrt(vocab_size)))
gates = tf.matmul(probabilities, gating_weights)
else:
gating_weights = tf.get_variable("gating_prob_weights",
[input_size, vocab_size],
initializer=tf.random_normal_initializer(
stddev=1 / math.sqrt(vocab_size)))
gates = tf.matmul(model_input, gating_weights)
if remove_diag:
# removes diagonals coefficients
diagonals = tf.matrix_diag_part(gating_weights)
gates = gates - tf.multiply(diagonals, probabilities)
gates = slim.batch_norm(
gates,
center=True,
scale=True,
is_training=is_training,
scope="gating_prob_bn")
gates = tf.sigmoid(gates)
probabilities = tf.multiply(probabilities, gates)
return {"predictions": probabilities}
class FishMoeModel(models.BaseModel):
"""A softmax over a mixture of logistic models (with L2 regularization)."""
def create_model(self,
model_input,
vocab_size,
is_training,
num_mixtures=None,
l2_penalty=1e-8,
filter_size=2,
**unused_params):
"""Creates a Mixture of (Logistic) Experts model.
It also includes the possibility of gating the probabilities
The model consists of a per-class softmax distribution over a
configurable number of logistic classifiers. One of the classifiers in the
mixture is not trained, and always predicts 0.
Args:
model_input: 'batch_size' x 'num_features' matrix of input features.
vocab_size: The number of classes in the dataset.
is_training: Is this the training phase ?
num_mixtures: The number of mixtures (excluding a dummy 'expert' that
always predicts the non-existence of an entity).
l2_penalty: How much to penalize the squared magnitudes of parameter
values.
Returns:
A dictionary with a tensor containing the probability predictions of the
model in the 'predictions' key. The dimensions of the tensor are
batch_size x num_classes.
"""
num_mixtures = num_mixtures or FLAGS.moe_num_mixtures
l2_penalty = FLAGS.moe_l2
gate_activations = slim.fully_connected(
model_input,
vocab_size * (num_mixtures + 1),
activation_fn=None,
biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="gates")
expert_activations = slim.fully_connected(
model_input,
vocab_size * num_mixtures,
activation_fn=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="experts")
gating_distribution = tf.nn.softmax(tf.reshape(
gate_activations,
[-1, num_mixtures + 1])) # (Batch * #Labels) x (num_mixtures + 1)
expert_distribution = tf.nn.sigmoid(tf.reshape(
expert_activations,
[-1, num_mixtures])) # (Batch * #Labels) x num_mixtures
probabilities_by_class_and_batch = tf.reduce_sum(
gating_distribution[:, :num_mixtures] * expert_distribution, 1)
probabilities = tf.reshape(probabilities_by_class_and_batch,
[-1, vocab_size])
probabilities = tf.layers.batch_normalization(probabilities, training=is_training)
fish_gate = fish_modules.FishGate(hidden_size=vocab_size,
k=2,
dropout_rate=0.9,
is_training=is_training)
probabilities = fish_gate.forward(probabilities)
probabilities = tf.contrib.layers.layer_norm(probabilities)
probabilities = tf.layers.dense(probabilities, vocab_size, use_bias=True, activation=tf.nn.softmax)
return {"predictions": probabilities}
class FishMoeModel2(models.BaseModel):
"""A softmax over a mixture of logistic models (with L2 regularization)."""
def create_model(self,
model_input,
vocab_size,
is_training,
num_mixtures=None,
l2_penalty=1e-8,
filter_size=2,
**unused_params):
"""Creates a Mixture of (Logistic) Experts model.
It also includes the possibility of gating the probabilities
The model consists of a per-class softmax distribution over a
configurable number of logistic classifiers. One of the classifiers in the
mixture is not trained, and always predicts 0.
Args:
model_input: 'batch_size' x 'num_features' matrix of input features.
vocab_size: The number of classes in the dataset.
is_training: Is this the training phase ?
num_mixtures: The number of mixtures (excluding a dummy 'expert' that
always predicts the non-existence of an entity).
l2_penalty: How much to penalize the squared magnitudes of parameter
values.
Returns:
A dictionary with a tensor containing the probability predictions of the
model in the 'predictions' key. The dimensions of the tensor are
batch_size x num_classes.
"""
num_mixtures = num_mixtures or FLAGS.moe_num_mixtures
l2_penalty = FLAGS.moe_l2
gate_activations = slim.fully_connected(
model_input,
vocab_size * (num_mixtures + 1),
activation_fn=None,
biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="gates")
expert_activations = slim.fully_connected(
model_input,
vocab_size * num_mixtures,
activation_fn=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="experts")
gating_distribution = tf.nn.softmax(tf.reshape(
gate_activations,
[-1, num_mixtures + 1])) # (Batch * #Labels) x (num_mixtures + 1)
expert_distribution = tf.nn.sigmoid(tf.reshape(
expert_activations,
[-1, num_mixtures])) # (Batch * #Labels) x num_mixtures
probabilities_by_class_and_batch = tf.reduce_sum(
gating_distribution[:, :num_mixtures] * expert_distribution, 1)
probabilities = tf.reshape(probabilities_by_class_and_batch,
[-1, vocab_size])
fish_gate = fish_modules.FishGate(hidden_size=vocab_size,
k=filter_size,
dropout_rate=0.8,
is_training=is_training)
probabilities = fish_gate.forward(probabilities)
# probabilities = tf.layers.dense(probabilities, vocab_size, use_bias=True, activation=tf.nn.softmax)
return {"predictions": probabilities}
class FishMoeModel4(models.BaseModel):
"""A softmax over a mixture of logistic models (with L2 regularization)."""
def create_model(self,
model_input,
vocab_size,
is_training,
num_mixtures=None,
l2_penalty=1e-8,
filter_size=2,
**unused_params):
"""Creates a Mixture of (Logistic) Experts model.
It also includes the possibility of gating the probabilities
The model consists of a per-class softmax distribution over a
configurable number of logistic classifiers. One of the classifiers in the
mixture is not trained, and always predicts 0.
Args:
model_input: 'batch_size' x 'num_features' matrix of input features.
vocab_size: The number of classes in the dataset.
is_training: Is this the training phase ?
num_mixtures: The number of mixtures (excluding a dummy 'expert' that
always predicts the non-existence of an entity).
l2_penalty: How much to penalize the squared magnitudes of parameter
values.
Returns:
A dictionary with a tensor containing the probability predictions of the
model in the 'predictions' key. The dimensions of the tensor are
batch_size x num_classes.
"""
num_mixtures = num_mixtures or FLAGS.moe_num_mixtures
l2_penalty = FLAGS.moe_l2
fc1 = tf.layers.dense(model_input, vocab_size, activation=tf.nn.relu,
kernel_regularizer=slim.l2_regularizer(l2_penalty))
fc1 = tf.layers.batch_normalization(fc1, training=is_training)
if is_training:
fc1 = tf.nn.dropout(fc1, keep_prob=0.9)
fc2 = tf.layers.dense(fc1, vocab_size, activation=tf.nn.relu,
kernel_regularizer=slim.l2_regularizer(l2_penalty))
fc2 = tf.layers.batch_normalization(fc2, training=is_training)
if is_training:
fc2 = tf.nn.dropout(fc2, keep_prob=0.9)
fc3 = tf.layers.dense(fc2, vocab_size, activation=tf.nn.sigmoid,
kernel_regularizer=slim.l2_regularizer(l2_penalty))
fc3 = tf.layers.batch_normalization(fc3, training=is_training)
if is_training:
fc3 = tf.nn.dropout(fc3, keep_prob=0.9)
fish_gate = fish_modules.FishGate(hidden_size=vocab_size,
k=filter_size,
dropout_rate=0.9,
is_training=is_training)
probabilities = fish_gate.forward(fc3)
# probabilities = tf.layers.dense(probabilities, vocab_size, use_bias=True, activation=tf.nn.softmax)
return {"predictions": probabilities}
class FishMoeModel3(models.BaseModel):
"""A softmax over a mixture of logistic models (with L2 regularization)."""
def create_model(self,
model_input,
vocab_size,
is_training,
num_mixtures=None,
l2_penalty=1e-6,
filter_size=2,
**unused_params):
"""Creates a Mixture of (Logistic) Experts model.
It also includes the possibility of gating the probabilities
The model consists of a per-class softmax distribution over a
configurable number of logistic classifiers. One of the classifiers in the
mixture is not trained, and always predicts 0.
Args:
model_input: 'batch_size' x 'num_features' matrix of input features.
vocab_size: The number of classes in the dataset.
is_training: Is this the training phase ?
num_mixtures: The number of mixtures (excluding a dummy 'expert' that
always predicts the non-existence of an entity).
l2_penalty: How much to penalize the squared magnitudes of parameter
values.
Returns:
A dictionary with a tensor containing the probability predictions of the
model in the 'predictions' key. The dimensions of the tensor are
batch_size x num_classes.
"""
num_mixtures = num_mixtures or FLAGS.moe_num_mixtures
l2_penalty = FLAGS.moe_l2
gate_activations = slim.fully_connected(
model_input,
vocab_size * (num_mixtures + 1),
activation_fn=None,
biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="gates")
expert_activations = slim.fully_connected(
model_input,
vocab_size * num_mixtures,
activation_fn=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="experts")
gating_distribution = tf.nn.softmax(tf.reshape(
gate_activations,
[-1, num_mixtures + 1])) # (Batch * #Labels) x (num_mixtures + 1)
expert_distribution = tf.nn.sigmoid(tf.reshape(
expert_activations,
[-1, num_mixtures])) # (Batch * #Labels) x num_mixtures
probabilities_by_class_and_batch = tf.reduce_sum(
gating_distribution[:, :num_mixtures] * expert_distribution, 1)
probabilities0 = tf.reshape(probabilities_by_class_and_batch,
[-1, vocab_size])
probabilities0 = tf.layers.batch_normalization(probabilities0, training=is_training)
r_activation0 = tf.layers.dense(probabilities0, vocab_size * filter_size, use_bias=True, activation=tf.nn.relu)
r_activation0 = tf.layers.batch_normalization(r_activation0, training=is_training)
if is_training:
r_activation0 = tf.layers.dropout(r_activation0, 0.9)
r_activation1 = tf.layers.dense(r_activation0, vocab_size, use_bias=True, activation=None)
probabilities1 = probabilities0 + r_activation1
probabilities1 = tf.contrib.layers.layer_norm(probabilities1)
probabilities1 = tf.layers.batch_normalization(probabilities1, training=is_training)
probabilities2 = tf.layers.dense(probabilities1, vocab_size, use_bias=True, activation=tf.nn.softmax)
return {"predictions": probabilities2}
class MoeModel2(models.BaseModel):
"""A softmax over a mixture of logistic models (with L2 regularization)."""
def create_model(self,
model_input,
vocab_size,
is_training,
num_mixtures=None,
l2_penalty=1e-8,
**unused_params):
"""Creates a Mixture of (Logistic) Experts model.
It also includes the possibility of gating the probabilities
The model consists of a per-class softmax distribution over a
configurable number of logistic classifiers. One of the classifiers in the
mixture is not trained, and always predicts 0.
Args:
model_input: 'batch_size' x 'num_features' matrix of input features.
vocab_size: The number of classes in the dataset.
is_training: Is this the training phase ?
num_mixtures: The number of mixtures (excluding a dummy 'expert' that
always predicts the non-existence of an entity).
l2_penalty: How much to penalize the squared magnitudes of parameter
values.
Returns:
A dictionary with a tensor containing the probability predictions of the
model in the 'predictions' key. The dimensions of the tensor are
batch_size x num_classes.
"""
num_mixtures = 3
low_rank_gating = FLAGS.moe_low_rank_gating
l2_penalty = FLAGS.moe_l2
gating_probabilities = FLAGS.moe_prob_gating
gating_input = FLAGS.moe_prob_gating_input
if low_rank_gating == -1:
gate_activations = slim.fully_connected(
model_input,
vocab_size * (num_mixtures + 1),
activation_fn=None,
biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="gates")
else:
gate_activations1 = slim.fully_connected(
model_input,
low_rank_gating,
activation_fn=None,
biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="gates1")
gate_activations = slim.fully_connected(
gate_activations1,
vocab_size * (num_mixtures + 1),
activation_fn=None,
biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="gates2")
expert_activations = slim.fully_connected(
model_input,
vocab_size * num_mixtures,
activation_fn=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="experts")
gating_distribution = tf.nn.softmax(tf.reshape(
gate_activations,
[-1, num_mixtures + 1])) # (Batch * #Labels) x (num_mixtures + 1)
expert_distribution = tf.nn.sigmoid(tf.reshape(
expert_activations,
[-1, num_mixtures])) # (Batch * #Labels) x num_mixtures
probabilities_by_class_and_batch = tf.reduce_sum(
gating_distribution[:, :num_mixtures] * expert_distribution, 1)
probabilities = tf.reshape(probabilities_by_class_and_batch,
[-1, vocab_size])
filter1 = tf.layers.dense(probabilities,
vocab_size * 2,
use_bias=True,
activation=tf.nn.relu,
name="v-filter1")
filter1 = tf.layers.batch_normalization(filter1, training=is_training)
if is_training:
filter1 = tf.nn.dropout(filter1, 0.8)
filter2 = tf.layers.dense(filter1,
vocab_size,
use_bias=False,
activation=None,
name="v-filter2")
probabilities = probabilities + filter2
probabilities = tf.nn.relu(probabilities)
probabilities = tf.layers.batch_normalization(probabilities, training=is_training)
probabilities = tf.layers.dense(probabilities, vocab_size, use_bias=True,
activation=tf.nn.sigmoid, name="v-final_output")
return {"predictions": probabilities}
class JuhanMoeModel(models.BaseModel):
"""A softmax over a mixture of logistic models (with L2 regularization)."""
def create_model(self,
model_input,
vocab_size,
is_training,
num_mixtures=None,
l2_penalty=1e-8,
**unused_params):
"""Creates a Mixture of (Logistic) Experts model.
The model consists of a per-class softmax distribution over a
configurable number of logistic classifiers. One of the classifiers in the
mixture is not trained, and always predicts 0.
Args:
model_input: 'batch_size' x 'num_features' matrix of input features.
vocab_size: The number of classes in the dataset.
num_mixtures: The number of mixtures (excluding a dummy 'expert' that
always predicts the non-existence of an entity).
l2_penalty: How much to penalize the squared magnitudes of parameter
values.
Returns:
A dictionary with a tensor containing the probability predictions of the
model in the 'predictions' key. The dimensions of the tensor are
batch_size x num_classes.
"""
num_mixtures = 3
gate_activations = slim.fully_connected(
model_input,
vocab_size * (num_mixtures + 1),
activation_fn=None,
biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="gates")
expert_activations = slim.fully_connected(
model_input,
vocab_size * num_mixtures,
activation_fn=None,
weights_regularizer=slim.l2_regularizer(l2_penalty),
scope="experts")
gating_distribution = tf.nn.softmax(tf.reshape(
gate_activations,
[-1, num_mixtures + 1])) # (Batch * #Labels) x (num_mixtures + 1)
expert_distribution = tf.nn.sigmoid(tf.reshape(
expert_activations,
[-1, num_mixtures])) # (Batch * #Labels) x num_mixtures
final_probabilities_by_class_and_batch = tf.reduce_sum(
gating_distribution[:, :num_mixtures] * expert_distribution, 1)
probabilities = tf.reshape(final_probabilities_by_class_and_batch,
[-1, vocab_size])
if is_training:
probabilities = tf.nn.dropout(probabilities, 0.8)
filter1 = tf.layers.dense(probabilities,
vocab_size * 2,
use_bias=True,
activation=tf.nn.leaky_relu,
name="v-filter1")
filter1 = tf.layers.batch_normalization(filter1, training=is_training)
if is_training:
filter1 = tf.nn.dropout(filter1, 0.8)
filter2 = tf.layers.dense(filter1,
vocab_size,
use_bias=False,
activation=None,
name="v-filter2")
probabilities = probabilities + filter2
probabilities = tf.nn.leaky_relu(probabilities)
probabilities = tf.layers.batch_normalization(probabilities, training=is_training)
probabilities = tf.layers.dense(probabilities, vocab_size, use_bias=True,
activation=tf.nn.sigmoid, name="v-final_output")
return {"predictions": probabilities}
class FourLayerBatchNeuralModel(models.BaseModel):
def create_model(self,
model_input,
vocab_size,
is_training,
l2_penalty=1e-7,
**unused_params):
model_input_dim = model_input.get_shape().as_list()[1]
fc1_weights = tf.get_variable("fc1_weights",
[model_input_dim, vocab_size],
initializer=tf.contrib.layers.xavier_initializer())
tf.summary.histogram("fc1_weights", fc1_weights)
fc1_activation = tf.matmul(model_input, fc1_weights)
fc1_activation = tf.nn.relu(fc1_activation)
fc1_activation = slim.batch_norm(
fc1_activation,
center=True,
scale=True,
is_training=is_training,
scope="fc1_activation_bn")
fc2_weights = tf.get_variable("fc2_weights",
[vocab_size, vocab_size],
initializer=tf.contrib.layers.xavier_initializer())
tf.summary.histogram("fc2_weights", fc2_weights)
fc2_activation = tf.matmul(fc1_activation, fc2_weights)
fc2_activation = tf.nn.relu(fc2_activation)
fc2_activation = slim.batch_norm(
fc2_activation,
center=True,
scale=True,
is_training=is_training,
scope="fc2_activation_bn")
fc3_weights = tf.get_variable("fc3_weights",
[vocab_size, vocab_size],
initializer=tf.contrib.layers.xavier_initializer())
tf.summary.histogram("fc3_weights", fc3_weights)
fc3_activation = tf.matmul(fc2_activation, fc3_weights)
fc3_activation = tf.nn.relu(fc3_activation)
fc3_activation = slim.batch_norm(
fc3_activation,
center=True,
scale=True,
is_training=is_training,
scope="fc3_activation_bn")
fc4_weights = tf.get_variable("fc4_weights",
[vocab_size, vocab_size],
initializer=tf.contrib.layers.xavier_initializer())
fc4_activation = tf.matmul(fc3_activation, fc4_weights)
cluster_biases = tf.get_variable("fc4_bias",
[vocab_size],
initializer=tf.constant_initializer(0.01))
tf.summary.histogram("fc4_bias", cluster_biases)
fc4_activation += cluster_biases
fc4_activation = tf.sigmoid(fc4_activation)
return {"predictions": fc4_activation}
class ClassLearningThreeNnModel(models.BaseModel):
def create_model(self,
model_input,
vocab_size,
is_training,
l2_penalty=1e-8,
ortho_reg=0,
**unused_params):
fc1 = slim.fully_connected(
model_input, vocab_size, activation_fn=None, biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty))
fc1 = tf.contrib.layers.layer_norm(inputs=fc1, center=True, scale=True, activation_fn=tf.nn.leaky_relu)
if is_training:
fc1 = tf.nn.dropout(fc1, keep_prob=0.5)
fc2 = slim.fully_connected(
fc1, vocab_size, activation_fn=None, biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty))
fc2 = tf.contrib.layers.layer_norm(inputs=fc2, center=True, scale=True, activation_fn=tf.nn.leaky_relu)
if is_training:
fc2 = tf.nn.dropout(fc2, keep_prob=0.5)
fc3 = slim.fully_connected(
fc2, vocab_size, activation_fn=tf.nn.sigmoid, biases_initializer=tf.constant_initializer(0.1),
weights_regularizer=slim.l2_regularizer(l2_penalty))
return {"predictions": fc3,
"regularization_loss": ortho_reg}
class ClassLearningFourNnModel(models.BaseModel):
def create_model(self,
model_input,
vocab_size,
is_training,
l2_penalty=1e-8,
ortho_reg=0,
**unused_params):
fc1 = slim.fully_connected(
model_input, vocab_size, activation_fn=None, biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty))
fc1 = tf.contrib.layers.layer_norm(inputs=fc1, center=True, scale=True, activation_fn=tf.nn.leaky_relu)
# if is_training:
# fc1 = tf.nn.dropout(fc1, keep_prob=0.5)
fc2 = slim.fully_connected(
fc1, vocab_size, activation_fn=None, biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty))
fc2 = tf.contrib.layers.layer_norm(inputs=fc2, center=True, scale=True, activation_fn=tf.nn.leaky_relu)
# if is_training:
# fc2 = tf.nn.dropout(fc2, keep_prob=0.5)
fc3 = slim.fully_connected(
fc2, vocab_size, activation_fn=None, biases_initializer=None,
weights_regularizer=slim.l2_regularizer(l2_penalty))
fc3 = tf.contrib.layers.layer_norm(inputs=fc3, center=True, scale=True, activation_fn=tf.nn.leaky_relu)
fc4 = slim.fully_connected(
fc3, vocab_size, activation_fn=tf.nn.sigmoid, biases_initializer=tf.constant_initializer(0.1),
weights_regularizer=slim.l2_regularizer(l2_penalty))
return {"predictions": fc4,
"regularization_loss": ortho_reg}
| 43.802403
| 119
| 0.596745
| 31,214
| 0.951414
| 0
| 0
| 0
| 0
| 0
| 0
| 9,976
| 0.304072
|
c474a170eb0e1f1c4fbbb4250190b02bde10d265
| 4,537
|
py
|
Python
|
tests/test_refinement.py
|
qfardet/Pandora2D
|
9b36d29a199f2acc67499d22b796c7dd6867bc5f
|
[
"Apache-2.0"
] | 4
|
2022-02-09T10:07:03.000Z
|
2022-03-08T05:16:30.000Z
|
tests/test_refinement.py
|
qfardet/Pandora2D
|
9b36d29a199f2acc67499d22b796c7dd6867bc5f
|
[
"Apache-2.0"
] | null | null | null |
tests/test_refinement.py
|
qfardet/Pandora2D
|
9b36d29a199f2acc67499d22b796c7dd6867bc5f
|
[
"Apache-2.0"
] | 4
|
2022-02-03T09:21:28.000Z
|
2022-03-25T07:32:13.000Z
|
#!/usr/bin/env python
# coding: utf8
#
# Copyright (c) 2021 Centre National d'Etudes Spatiales (CNES).
#
# This file is part of PANDORA2D
#
# https://github.com/CNES/Pandora2D
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Test refinement step
"""
import unittest
import numpy as np
import xarray as xr
import pytest
from pandora2d import refinement, common
class TestRefinement(unittest.TestCase):
"""
TestRefinement class allows to test the refinement module
"""
@staticmethod
def test_check_conf():
"""
Test the interpolation method
"""
refinement.AbstractRefinement(**{"refinement_method": "interpolation"}) # type: ignore
with pytest.raises(KeyError):
refinement.AbstractRefinement(**{"refinement_method": "wta"}) # type: ignore
@staticmethod
def test_refinement_method_subpixel():
"""
test refinement
"""
cv = np.zeros((3, 3, 5, 5))
cv[:, :, 2, 2] = np.ones([3, 3])
cv[:, :, 2, 3] = np.ones([3, 3])
cv[:, :, 3, 2] = np.ones([3, 3])
cv[:, :, 3, 3] = np.ones([3, 3])
c_row = [0, 1, 2]
c_col = [0, 1, 2]
# First pixel in the image that is fully computable (aggregation windows are complete)
row = np.arange(c_row[0], c_row[-1] + 1)
col = np.arange(c_col[0], c_col[-1] + 1)
disparity_range_col = np.arange(-2, 2 + 1)
disparity_range_row = np.arange(-2, 2 + 1)
cost_volumes_test = xr.Dataset(
{"cost_volumes": (["row", "col", "disp_col", "disp_row"], cv)},
coords={"row": row, "col": col, "disp_col": disparity_range_col, "disp_row": disparity_range_row},
)
cost_volumes_test.attrs["measure"] = "zncc"
cost_volumes_test.attrs["window_size"] = 1
cost_volumes_test.attrs["type_measure"] = "max"
data = np.array(
([[0.4833878, 0.4833878, 0.4833878], [0.4833878, 0.4833878, 0.4833878], [0.4833878, 0.4833878, 0.4833878]]),
dtype=np.float64,
)
dataset_disp_map = common.dataset_disp_maps(data, data)
test = refinement.AbstractRefinement(**{"refinement_method": "interpolation"}) # type: ignore
delta_x, delta_y = test.refinement_method(cost_volumes_test, dataset_disp_map)
np.testing.assert_allclose(data, delta_y, rtol=1e-06)
np.testing.assert_allclose(data, delta_x, rtol=1e-06)
@staticmethod
def test_refinement_method_pixel():
"""
test refinement
"""
cv = np.zeros((3, 3, 5, 5))
cv[:, :, 1, 3] = np.ones([3, 3])
c_row = [0, 1, 2]
c_col = [0, 1, 2]
# First pixel in the image that is fully computable (aggregation windows are complete)
row = np.arange(c_row[0], c_row[-1] + 1)
col = np.arange(c_col[0], c_col[-1] + 1)
disparity_range_col = np.arange(-2, 2 + 1)
disparity_range_row = np.arange(-2, 2 + 1)
cost_volumes_test = xr.Dataset(
{"cost_volumes": (["row", "col", "disp_col", "disp_row"], cv)},
coords={"row": row, "col": col, "disp_col": disparity_range_col, "disp_row": disparity_range_row},
)
cost_volumes_test.attrs["measure"] = "zncc"
cost_volumes_test.attrs["window_size"] = 1
cost_volumes_test.attrs["type_measure"] = "max"
gt_delta_y = np.array(
([[-1, -1, -1], [-1, -1, -1], [-1, -1, -1]]),
dtype=np.float64,
)
gt_delta_x = np.array(
([[1, 1, 1], [1, 1, 1], [1, 1, 1]]),
dtype=np.float64,
)
dataset_disp_map = common.dataset_disp_maps(gt_delta_y, gt_delta_x)
test = refinement.AbstractRefinement(**{"refinement_method": "interpolation"}) # type: ignore
delta_x, delta_y = test.refinement_method(cost_volumes_test, dataset_disp_map)
np.testing.assert_allclose(gt_delta_y, delta_y, rtol=1e-06)
np.testing.assert_allclose(gt_delta_x, delta_x, rtol=1e-06)
| 32.407143
| 120
| 0.60745
| 3,664
| 0.807582
| 0
| 0
| 3,528
| 0.777606
| 0
| 0
| 1,535
| 0.338329
|
c474f216680e6a9b4d600c4b0a1221fea638bba3
| 9,353
|
py
|
Python
|
goblet/tests/test_scheduler.py
|
Aaron-Gill/goblet
|
30c0dd73b2f39e443adb2ccda6f9009e980c53ee
|
[
"Apache-2.0"
] | null | null | null |
goblet/tests/test_scheduler.py
|
Aaron-Gill/goblet
|
30c0dd73b2f39e443adb2ccda6f9009e980c53ee
|
[
"Apache-2.0"
] | null | null | null |
goblet/tests/test_scheduler.py
|
Aaron-Gill/goblet
|
30c0dd73b2f39e443adb2ccda6f9009e980c53ee
|
[
"Apache-2.0"
] | null | null | null |
from unittest.mock import Mock
from goblet import Goblet
from goblet.resources.scheduler import Scheduler
from goblet.test_utils import (
get_responses,
get_response,
mock_dummy_function,
dummy_function,
)
class TestScheduler:
def test_add_schedule(self, monkeypatch):
app = Goblet(function_name="goblet_example")
monkeypatch.setenv("GOOGLE_PROJECT", "TEST_PROJECT")
monkeypatch.setenv("GOOGLE_LOCATION", "us-central1")
app.schedule("* * * * *", description="test")(dummy_function)
scheduler = app.handlers["schedule"]
assert len(scheduler.resources) == 1
scheule_json = {
"name": "projects/TEST_PROJECT/locations/us-central1/jobs/goblet_example-dummy_function",
"schedule": "* * * * *",
"timeZone": "UTC",
"description": "test",
"attemptDeadline": None,
"retry_config": None,
"httpTarget": {
"body": None,
"headers": {
"X-Goblet-Type": "schedule",
"X-Goblet-Name": "dummy_function",
},
"httpMethod": "GET",
"oidcToken": {},
},
}
assert scheduler.resources["dummy_function"]["job_json"] == scheule_json
assert scheduler.resources["dummy_function"]["func"] == dummy_function
def test_multiple_schedules(self, monkeypatch):
app = Goblet(function_name="goblet_example")
monkeypatch.setenv("GOOGLE_PROJECT", "TEST_PROJECT")
monkeypatch.setenv("GOOGLE_LOCATION", "us-central1")
app.schedule("1 * * * *", description="test")(dummy_function)
app.schedule("2 * * * *", headers={"test": "header"})(dummy_function)
app.schedule("3 * * * *", httpMethod="POST")(dummy_function)
scheduler = app.handlers["schedule"]
assert len(scheduler.resources) == 3
scheule_json = {
"name": "projects/TEST_PROJECT/locations/us-central1/jobs/goblet_example-dummy_function",
"schedule": "1 * * * *",
"timeZone": "UTC",
"description": "test",
"attemptDeadline": None,
"retry_config": None,
"httpTarget": {
"body": None,
"headers": {
"X-Goblet-Type": "schedule",
"X-Goblet-Name": "dummy_function",
},
"httpMethod": "GET",
"oidcToken": {},
},
}
assert scheduler.resources["dummy_function"]["job_json"] == scheule_json
assert (
scheduler.resources["dummy_function-2"]["job_json"]["httpTarget"][
"headers"
]["test"]
== "header"
)
assert (
scheduler.resources["dummy_function-2"]["job_json"]["httpTarget"][
"headers"
]["X-Goblet-Name"]
== "dummy_function-2"
)
assert (
scheduler.resources["dummy_function-3"]["job_json"]["httpTarget"][
"headers"
]["X-Goblet-Name"]
== "dummy_function-3"
)
assert (
scheduler.resources["dummy_function-3"]["job_json"]["httpTarget"][
"httpMethod"
]
== "POST"
)
def test_call_scheduler(self, monkeypatch):
app = Goblet(function_name="goblet_example")
monkeypatch.setenv("GOOGLE_PROJECT", "TEST_PROJECT")
monkeypatch.setenv("GOOGLE_LOCATION", "us-central1")
mock = Mock()
app.schedule("* * * * *", description="test")(mock_dummy_function(mock))
headers = {
"X-Goblet-Name": "dummy_function",
"X-Goblet-Type": "schedule",
"X-Cloudscheduler": True,
}
mock_event = Mock()
mock_event.headers = headers
app(mock_event, None)
assert mock.call_count == 1
def test_deploy_schedule(self, monkeypatch):
monkeypatch.setenv("GOOGLE_PROJECT", "goblet")
monkeypatch.setenv("GOOGLE_LOCATION", "us-central1")
monkeypatch.setenv("GOBLET_TEST_NAME", "schedule-deploy")
monkeypatch.setenv("GOBLET_HTTP_TEST", "REPLAY")
goblet_name = "goblet_example"
scheduler = Scheduler(goblet_name)
scheduler.register_job(
"test-job", None, kwargs={"schedule": "* * * * *", "kwargs": {}}
)
scheduler.deploy()
responses = get_responses("schedule-deploy")
assert goblet_name in responses[0]["body"]["name"]
assert (
responses[1]["body"]["httpTarget"]["headers"]["X-Goblet-Name"] == "test-job"
)
assert (
responses[1]["body"]["httpTarget"]["headers"]["X-Goblet-Type"] == "schedule"
)
assert responses[1]["body"]["schedule"] == "* * * * *"
def test_deploy_schedule_cloudrun(self, monkeypatch):
monkeypatch.setenv("GOOGLE_PROJECT", "goblet")
monkeypatch.setenv("GOOGLE_LOCATION", "us-central1")
monkeypatch.setenv("GOBLET_TEST_NAME", "schedule-deploy-cloudrun")
monkeypatch.setenv("GOBLET_HTTP_TEST", "REPLAY")
scheduler = Scheduler("goblet", backend="cloudrun")
cloudrun_url = "https://goblet-12345.a.run.app"
service_account = "SERVICE_ACCOUNT@developer.gserviceaccount.com"
scheduler.register_job(
"test-job", None, kwargs={"schedule": "* * * * *", "kwargs": {}}
)
scheduler._deploy(config={"scheduler": {"serviceAccount": service_account}})
responses = get_responses("schedule-deploy-cloudrun")
assert responses[0]["body"]["status"]["url"] == cloudrun_url
assert (
responses[1]["body"]["httpTarget"]["oidcToken"]["serviceAccountEmail"]
== service_account
)
assert (
responses[1]["body"]["httpTarget"]["oidcToken"]["audience"] == cloudrun_url
)
assert responses[1]["body"]["schedule"] == "* * * * *"
def test_deploy_multiple_schedule(self, monkeypatch):
monkeypatch.setenv("GOOGLE_PROJECT", "goblet")
monkeypatch.setenv("GOOGLE_LOCATION", "us-central1")
monkeypatch.setenv("GOBLET_TEST_NAME", "schedule-deploy-multiple")
monkeypatch.setenv("GOBLET_HTTP_TEST", "REPLAY")
goblet_name = "goblet-test-schedule"
scheduler = Scheduler(goblet_name)
scheduler.register_job(
"test-job", None, kwargs={"schedule": "* * 1 * *", "kwargs": {}}
)
scheduler.register_job(
"test-job",
None,
kwargs={"schedule": "* * 2 * *", "kwargs": {"httpMethod": "POST"}},
)
scheduler.register_job(
"test-job",
None,
kwargs={
"schedule": "* * 3 * *",
"kwargs": {"headers": {"X-HEADER": "header"}},
},
)
scheduler.deploy()
post_job_1 = get_response(
"schedule-deploy-multiple",
"post-v1-projects-goblet-locations-us-central1-jobs_1.json",
)
post_job_2 = get_response(
"schedule-deploy-multiple",
"post-v1-projects-goblet-locations-us-central1-jobs_2.json",
)
post_job_3 = get_response(
"schedule-deploy-multiple",
"post-v1-projects-goblet-locations-us-central1-jobs_3.json",
)
assert (
post_job_1["body"]["httpTarget"]["headers"]["X-Goblet-Name"] == "test-job"
)
assert (
post_job_2["body"]["httpTarget"]["headers"]["X-Goblet-Name"] == "test-job-2"
)
assert post_job_2["body"]["httpTarget"]["httpMethod"] == "POST"
assert (
post_job_3["body"]["httpTarget"]["headers"]["X-Goblet-Name"] == "test-job-3"
)
assert post_job_3["body"]["httpTarget"]["headers"]["X-HEADER"] == "header"
def test_destroy_schedule(self, monkeypatch):
monkeypatch.setenv("GOOGLE_PROJECT", "goblet")
monkeypatch.setenv("GOOGLE_LOCATION", "us-central1")
monkeypatch.setenv("GOBLET_TEST_NAME", "schedule-destroy")
monkeypatch.setenv("GOBLET_HTTP_TEST", "REPLAY")
goblet_name = "goblet_example"
scheduler = Scheduler(goblet_name)
scheduler.register_job(
"test-job", None, kwargs={"schedule": "* * * * *", "kwargs": {}}
)
scheduler.destroy()
responses = get_responses("schedule-destroy")
assert len(responses) == 1
assert responses[0]["body"] == {}
def test_sync_schedule(self, monkeypatch):
monkeypatch.setenv("GOOGLE_PROJECT", "goblet")
monkeypatch.setenv("GOOGLE_LOCATION", "us-central1")
monkeypatch.setenv("GOBLET_TEST_NAME", "schedule-sync")
monkeypatch.setenv("GOBLET_HTTP_TEST", "REPLAY")
goblet_name = "goblet"
scheduler = Scheduler(goblet_name)
scheduler.register_job(
"scheduled_job", None, kwargs={"schedule": "* * * * *", "kwargs": {}}
)
scheduler.sync(dryrun=True)
scheduler.sync(dryrun=False)
responses = get_responses("schedule-sync")
assert len(responses) == 3
assert responses[1] == responses[2]
assert responses[0]["body"] == {}
| 36.678431
| 101
| 0.5626
| 9,128
| 0.975944
| 0
| 0
| 0
| 0
| 0
| 0
| 3,303
| 0.353149
|
c475cdfc5c22b9c5d0eee35b59b44abcb5b1b364
| 1,027
|
py
|
Python
|
arachnado/rpc/sites.py
|
wigginzz/arachnado
|
8de92625262958e886263b4ccb189f4fc62d7400
|
[
"MIT"
] | 2
|
2017-12-26T14:50:14.000Z
|
2018-06-12T07:04:08.000Z
|
arachnado/rpc/sites.py
|
wigginzz/arachnado
|
8de92625262958e886263b4ccb189f4fc62d7400
|
[
"MIT"
] | null | null | null |
arachnado/rpc/sites.py
|
wigginzz/arachnado
|
8de92625262958e886263b4ccb189f4fc62d7400
|
[
"MIT"
] | null | null | null |
import logging
from functools import partial
from arachnado.storages.mongotail import MongoTailStorage
class Sites(object):
""" 'Known sites' object exposed via JSON-RPC """
logger = logging.getLogger(__name__)
def __init__(self, handler, site_storage, **kwargs):
self.handler = handler
self.storage = site_storage # type: MongoTailStorage
def list(self):
return self.storage.fetch()
def post(self, site):
self.storage.create(site)
def patch(self, site):
self.storage.update(site)
def delete(self, site):
self.storage.delete(site)
def subscribe(self):
for event_name in self.storage.available_events:
self.storage.subscribe(
event_name,
partial(self._publish, event=event_name)
)
def _on_close(self):
self.storage.unsubscribe(self.storage.available_events)
def _publish(self, event, data):
self.handler.write_event('sites.{}'.format(event), data)
| 26.333333
| 64
| 0.650438
| 920
| 0.895813
| 0
| 0
| 0
| 0
| 0
| 0
| 83
| 0.080818
|
c476f31483a0cfb0e93a77ded50e7c656f3f727f
| 16,628
|
py
|
Python
|
src/players.py
|
deacona/the-ball-is-round
|
8e91a72084d13d754deb82e4852fa37a86a77084
|
[
"MIT"
] | null | null | null |
src/players.py
|
deacona/the-ball-is-round
|
8e91a72084d13d754deb82e4852fa37a86a77084
|
[
"MIT"
] | null | null | null |
src/players.py
|
deacona/the-ball-is-round
|
8e91a72084d13d754deb82e4852fa37a86a77084
|
[
"MIT"
] | null | null | null |
"""players module.
Used for players data processes
"""
import numpy as np
import pandas as pd
import src.config as config
import src.utilities as utilities
from src.utilities import logging
pd.set_option("display.max_columns", 500)
pd.set_option("display.expand_frame_repr", False)
# master_file = config.MASTER_FILES["ftb_players"]
# distance_columns = ["Age", "ChancesInvolved", "DefensiveActions", "FoulsCommited", "FoulsSuffered", "Height", "Minutes", "NPG+A", "Points", "Weight", "SuccessfulPasses"]
def get_outfile(source_name):
"""Return outfile stub for given source.
INPUT:
source_name: String containing name of the data source
OUTPUT:
outfile_stub: Stub to use when saving output
"""
logging.info("Mapping {0} to outfile".format(source_name))
if source_name == "tmk_cnt":
outfile_stub = "players_contract"
elif source_name == "tmk_psm":
outfile_stub = "players_performance"
logging.debug(outfile_stub)
return outfile_stub
def clean_data(source_name, directory=config.MASTER_DIR):
"""Clean raw player data and save processed version.
INPUT:
source_name: String containing name of the data source
directory: Directory to save output to
OUTPUT:
df: Dataframe containing the cleaned data
"""
logging.info("Loading {0} data".format(source_name))
if source_name == "tmk_cnt":
source_header = [
"Shirt number",
"Position",
"Name",
"Date of birth",
"Nationality",
"Height",
"Foot",
"Joined",
"Signed from",
"Contract expires",
"Market value",
]
drop_cols = ["Nationality", "Signed from", "Competition"]
notna_cols = ["Market value"]
elif source_name == "tmk_psm":
source_header = [
"Shirt number",
"Position",
"Name",
"Age",
"Nationality",
"In squad",
"Games started",
"Goals",
"Assists",
"Yellow cards",
"Second yellow cards",
"Red cards",
"Substitutions on",
"Substitutions off",
"PPG",
"Minutes played",
]
drop_cols = ["Nationality"]
notna_cols = ["In squad"]
df = utilities.folder_loader(
source_name[:3], source_name, "comp_season", source_header=source_header
)
## Name and Position are mis-aligned in the source files
df["Name"].fillna(method="bfill", inplace=True)
df["Position"] = df.Name.shift(-1)
df.loc[df.Position == df.Name, "Position"] = df.Name.shift(-2)
df.drop(axis=1, columns=drop_cols, inplace=True)
df.dropna(subset=notna_cols, inplace=True)
df = df.apply(lambda x: x.str.strip() if x.dtype == "object" else x)
df = df.replace("-", np.nan)
df = df.replace("Was not used during this season", np.nan)
df = df.replace("Not in squad during this season", np.nan)
df = df.replace("Not used during this season", np.nan)
df["Shirt number"] = pd.to_numeric(df["Shirt number"], downcast="integer")
df["Position group"] = None
df.loc[
(df.Position.str.upper().str.contains("KEEPER"))
| (df.Position.str.upper().str.contains("GOAL")),
"Position group",
] = "G"
df.loc[
(df.Position.str.upper().str.contains("BACK"))
| (df.Position.str.upper().str.contains("DEF")),
"Position group",
] = "D"
df.loc[
(df.Position.str.upper().str.contains("MID"))
| (df.Position.str.upper().str.contains("MIT"))
| (df.Position.str.upper().str.contains("WING")),
"Position group",
] = "M"
df.loc[
(df.Position.str.upper().str.contains("STRIKER"))
| (df.Position.str.upper().str.contains("FORW")),
"Position group",
] = "F"
if source_name == "tmk_cnt":
df["Age"] = (
df["Date of birth"].str.extract(r".*([0-9]{2})", expand=False).astype("int")
)
df["Date of birth"] = pd.to_datetime(
df["Date of birth"].str.extract(r"(.*) \([0-9]{2}\)", expand=False),
format="%b %d, %Y",
)
df["Joined"] = pd.to_datetime(df.Joined, format="%b %d, %Y")
df["Contract expires"] = pd.to_datetime(
df["Contract expires"], format="%d.%m.%Y"
)
df["Height"] = (
df["Height"]
.str.strip()
.str.replace(" ", "")
.str.replace(",", "")
.str.replace("m", "")
.replace({"-": np.nan, "": np.nan})
.astype(float)
)
df.loc[
df.Name.isin(df[df.Height.notna()].Name.values)
& df.Name.isin(df[df.Height.isna()].Name.values),
"Height",
] = (
df.loc[
df.Name.isin(df[df.Height.notna()].Name.values)
& df.Name.isin(df[df.Height.isna()].Name.values)
]
.sort_values(by=["Name", "Season"])
.Height.fillna(method="bfill")
)
df.loc[
df.Name.isin(df[df.Foot.notna()].Name.values)
& df.Name.isin(df[df.Foot.isna()].Name.values),
"Foot",
] = (
df.loc[
df.Name.isin(df[df.Foot.notna()].Name.values)
& df.Name.isin(df[df.Foot.isna()].Name.values)
]
.sort_values(by=["Name", "Season"])
.Foot.fillna(method="bfill")
)
df["Market value"] = (
df["Market value"]
.str.strip()
.replace({"-": np.nan})
.replace(r"[£kmTh\.]", "", regex=True)
.astype(float)
* df["Market value"]
.str.extract(r"[\d\.]+([kmTh\.]+)", expand=False)
.fillna(1)
.replace(["k", "Th.", "m"], [10 ** 3, 10 ** 3, 10 ** 6])
.astype(int)
/ 10 ** 6
)
elif source_name == "tmk_psm":
df["PPG"] = df["PPG"].str.strip().replace(r"[,]", ".", regex=True).astype(float)
df["Minutes played"] = (
df["Minutes played"]
.str.strip()
.replace(r"[.\']", "", regex=True)
.astype(float)
)
df[
[
"In squad",
"Games started",
"Goals",
"Assists",
"Yellow cards",
"Second yellow cards",
"Red cards",
"Substitutions on",
"Substitutions off",
"PPG",
"Minutes played",
]
] = df[
[
"In squad",
"Games started",
"Goals",
"Assists",
"Yellow cards",
"Second yellow cards",
"Red cards",
"Substitutions on",
"Substitutions off",
"PPG",
"Minutes played",
]
].fillna(
0
)
df[
[
"In squad",
"Games started",
"Goals",
"Assists",
"Yellow cards",
"Second yellow cards",
"Red cards",
"Substitutions on",
"Substitutions off",
"PPG",
"Minutes played",
]
] = df[
[
"In squad",
"Games started",
"Goals",
"Assists",
"Yellow cards",
"Second yellow cards",
"Red cards",
"Substitutions on",
"Substitutions off",
"PPG",
"Minutes played",
]
].astype(
float
)
logging.debug(df.describe(include="all"))
logging.info("Saving processed data to ")
utilities.save_master(df, get_outfile(source_name), directory=directory)
return df
# def get_players():
# """
# INPUT:
# None
# OUTPUT:
# df - Dataframe of aggregated player data
# """
# logging.info("Fetching aggregated player data")
# # fetch from master csv
# # df = pd.read_csv(master_file, sep='|', encoding="ISO-8859-1")
# df = utilities.get_master("players")
# # filter unwanted records
# df = df[(df["Season"] >= "s1314") & (df["Competition"].isin(["chm", "cpo", "prm"]))]
# df.dropna(subset=["Name"], inplace=True)
# # select columns
# group_key = "Name"
# max_cols = ["Age", "Height", "Weight"]
# # p90_cols = ["AerialsWon", "ChancesInvolved", "DefensiveActions", "Dispossesed", "Dribbles", "FoulsCommited", "FoulsSuffered", "NPG+A", "SuccessfulPasses"]
# p90_cols = [
# "AerialsWon",
# "Assists",
# "BadControl",
# "Blocks",
# "CalledOffside",
# "Clearances",
# "Crosses",
# "Dispossesed",
# "Dribbles",
# "DribblesAgainst",
# "FirstYellowCards",
# "FoulsCommited",
# "FoulsSuffered",
# "GoalsConceded",
# "Interceptions",
# "KeyPasses",
# "LongBalls",
# "NonPenaltyGoals",
# "OffsidesWon",
# "OwnGoals",
# "Passes",
# "PenaltyGoals",
# "RedCards",
# "Saves",
# "Shots",
# "ShotsFaced",
# "ShotsOnTarget",
# "Tackles",
# "ThroughBalls",
# "YellowCards",
# ]
# pGm_cols = ["Appearances", "Minutes", "Points"]
# sum_cols = p90_cols + pGm_cols
# selected_columns = [group_key] + max_cols + sum_cols
# df = df[selected_columns]
# # aggregate to player level
# df_max = df[[group_key] + max_cols].groupby(group_key).max()
# df_sum = df[[group_key] + sum_cols].groupby(group_key).sum()
# df = pd.concat([df_max, df_sum], axis=1)
# df = df[(df["Minutes"] >= 900)]
# # convert action totals to per90
# for col in p90_cols:
# df[col + "P90"] = 90 * df[col] / df["Minutes"]
# for col in pGm_cols:
# df[col + "PGm"] = df[col] / df["Appearances"]
# for col in sum_cols:
# del df[col]
# del df["AppearancesPGm"]
# logging.debug(df.describe(include="all"))
# return df
# def find_similar():
# players = get_players()
# # print players
# print("\nNumber of players included: " + str(len(players)))
# # Normalize all of the numeric columns
# players_normalized = (players - players.mean()) / players.std()
# players_normalized.fillna(0, inplace=True)
# # players_normalized.info()
# # print players_normalized.describe(include="all")
# # print players_normalized.index.values
# for (
# name
# ) in (
# players_normalized.index.values
# ): # ["Adam Clayton", "Ben Gibson", "Daniel Ayala", "Tomas Mejias"]:
# # print "\n###############################"
# print("\n" + name, end=" ")
# # selected_player = players.loc[name]
# # print selected_player.name
# # print selected_player.to_frame().T #.name
# # Normalize all of the numeric columns
# selected_normalized = players_normalized.loc[name]
# # print selected_normalized
# # Find the distance between select player and everyone else.
# euclidean_distances = players_normalized.apply(
# lambda row: distance.euclidean(row, selected_normalized), axis=1
# )
# # Create a new dataframe with distances.
# distance_frame = pd.DataFrame(
# data={"dist": euclidean_distances, "idx": euclidean_distances.index}
# )
# distance_frame.sort_values("dist", inplace=True)
# most_similar_players = distance_frame.iloc[1:4]["idx"]
# # most_similar_players = players.loc[nearest_neighbours] #["Name"]
# # print most_similar_players
# print("... is similar to... ", end=" ")
# print(list(most_similar_players.index.values))
# def make_prediction():
# players = get_players()
# pred_col = "AssistsP90"
# x_columns = list(players.columns.values)
# x_columns.remove(pred_col)
# y_column = [pred_col]
# # # The columns that we will be making predictions with.
# # x_columns = ['Age', 'Height', 'Weight', 'AerialsWonP90', 'AssistsP90', 'BadControlP90', 'BlocksP90', 'CalledOffsideP90', 'ClearancesP90', 'CrossesP90', 'DispossesedP90', 'DribblesP90', 'DribblesAgainstP90', 'FirstYellowCardsP90', 'FoulsCommitedP90', 'FoulsSufferedP90', 'GoalsConcededP90', 'InterceptionsP90', 'KeyPassesP90', 'LongBallsP90', 'NonPenaltyGoalsP90', 'OffsidesWonP90', 'OwnGoalsP90', 'PassesP90', 'PenaltyGoalsP90', 'RedCardsP90', 'SavesP90', 'ShotsP90', 'ShotsFacedP90', 'ShotsOnTargetP90', 'TacklesP90', 'ThroughBallsP90', 'YellowCardsP90', 'MinutesPGm']
# # print x_columns
# # # The column that we want to predict.
# # y_column = [pred_col]
# # print y_column
# ###Generating training and testing sets
# # Randomly shuffle the index of nba.
# random_indices = permutation(players.index)
# # Set a cutoff for how many items we want in the test set (in this case 1/3 of the items)
# test_cutoff = math.floor(len(players) / 3)
# # Generate the test set by taking the first 1/3 of the randomly shuffled indices.
# test = players.loc[random_indices[1:test_cutoff]]
# test.fillna(0, inplace=True)
# # test.info()
# # print test.describe(include="all")
# # Generate the train set with the rest of the data.
# train = players.loc[random_indices[test_cutoff:]]
# train.fillna(0, inplace=True)
# # train.info()
# # print train.describe(include="all")
# ###Using sklearn for k nearest neighbors
# # print "Using sklearn for k nearest neighbors..."
# from sklearn.neighbors import KNeighborsRegressor
# # Create the knn model.
# # Look at the five closest neighbors.
# knn = KNeighborsRegressor(n_neighbors=5)
# # print knn
# # Fit the model on the training data.
# knn.fit(train[x_columns], train[y_column])
# # print knn
# # Make point predictions on the test set using the fit model.
# predictions = knn.predict(test[x_columns])
# # print "\nPredicted PointsPGm:"
# # print predictions.shape
# ###Computing error
# # Get the actual values for the test set.
# actual = test[y_column].copy()
# # Compute the mean squared error of our predictions.
# mse = (((predictions - actual) ** 2).sum()) / len(predictions)
# print("\nMean Squared Error:")
# print(mse)
# actual["Predicted" + pred_col] = predictions
# actual["Diff"] = actual[pred_col] - actual["Predicted" + pred_col]
# print("\nActual and Predicted " + pred_col + ":")
# print(actual.sort_values(["Diff"], ascending=False))
# def test_opinions():
# players = get_players()
# players = players.reset_index()
# players = players[
# players["Name"].isin(
# [
# "Alvaro Negredo",
# "Patrick Bamford",
# "Jordan Rhodes",
# "Garcia Kike",
# "Cristhian Stuani",
# "David Nugent",
# "Danny Graham",
# "Jelle Vossen",
# "Kei Kamara",
# ]
# )
# ]
# # df_info(players)
# players["ShotAccuracy"] = players["ShotsOnTargetP90"] / players["ShotsP90"]
# players["ShotEfficiency"] = (
# players["NonPenaltyGoalsP90"] + players["PenaltyGoalsP90"].fillna(0)
# ) / players["ShotsP90"]
# players["ShotPercentage"] = (
# players["NonPenaltyGoalsP90"] + players["PenaltyGoalsP90"].fillna(0)
# ) / players["ShotsOnTargetP90"]
# players = players[
# [
# "Name",
# "NonPenaltyGoalsP90",
# "PenaltyGoalsP90",
# "ShotsP90",
# "ShotsOnTargetP90",
# "ShotAccuracy",
# "ShotEfficiency",
# "ShotPercentage",
# ]
# ]
# # df_info(players)
# print(players.describe())
# print(players)
def main():
"""Use the Main for CLI usage."""
logging.info("Executing players module")
clean_data("tmk_cnt")
clean_data("tmk_psm")
# get_players()
# find_similar()
# make_prediction()
# test_opinions()
if __name__ == "__main__":
main()
| 31.793499
| 580
| 0.53446
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 10,830
| 0.651272
|
c47739874e06f42c7eb96ea82d6382fed8af2e9d
| 2,035
|
py
|
Python
|
Z_ALL_FILE/Py/code_qry.py
|
omikabir/omEngin
|
b8c04a5c2c12ffc3d0b67c2ceba9e5741d3f9195
|
[
"Apache-2.0"
] | null | null | null |
Z_ALL_FILE/Py/code_qry.py
|
omikabir/omEngin
|
b8c04a5c2c12ffc3d0b67c2ceba9e5741d3f9195
|
[
"Apache-2.0"
] | null | null | null |
Z_ALL_FILE/Py/code_qry.py
|
omikabir/omEngin
|
b8c04a5c2c12ffc3d0b67c2ceba9e5741d3f9195
|
[
"Apache-2.0"
] | 1
|
2021-04-29T21:46:02.000Z
|
2021-04-29T21:46:02.000Z
|
import pandas as pd
import os
#opt = itertools.islice(ls, len(ls))
#st = map(lambda x : )
def parsecode(txt):
df = pd.read_csv(os.getcwd() + '\\OMDB.csv')
ls = df['Code'].to_list()
code = []
q = 0
for i in range(len(ls)):
text = txt
if ls[i] in text:
n = text.find(ls[i])
st = text[n:n+7]
code.append(st)
txt = txt.replace(ls[i],'')
q = q + 1
else:
if q == 0:
return ''
else:
return code
def qry_by_code(code, tbl = None, col = None):
if tbl is None and col is None:
a1 = "select Incident_Notification,Down_Time,Up_Time,Major_Cause,Action_Taken,Link_ID_Site_ID,Incident_ID from incident_tracker_v2 where ("
a2 = " No_of_2G_Impacted_sites Like '%" + code + "%' or No_of_3G_Impacted_sites like '%" + code + "%' or No_of_4G_Impacted_Sites like '%" + code + "%' or Incident_Notification Like '%" + code
a3 = "%') order by Down_Time desc"
aa = a1 + a2 + a3
return aa
else:
return ""
def codechk(txt):
rs = parsecode(txt.upper())
st = 0
print('ret val', rs)
if len(rs) == 1:
code = rs[0]
rn = 0
try:
cd = int(code[6:7])
qry = qry_by_code(code)
conn = pyodbc.connect(soc)
df = pd.read(qry, con = conn)
if df.shape[0] != 0:
if df.shape[0] > 3:
st = "last 3 incident out of " + df.shape[0]
rn = 3
else:
st = "incident found " + df.shape[0] + chr(10)
rn = df.shape[0]
for i in range(rn):
tmp = chr(10)
for j in df:
tmp = tmp + chr(10) + df.loc[i,j]
else:
st = st + chr(10) + str(i) + tmp
except:
print('not code')
return st
else:
return st
| 28.263889
| 200
| 0.456511
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 455
| 0.223587
|
c478a3bd10411c7f1ec8a901267dc3442748c724
| 1,463
|
py
|
Python
|
eats/tests/common/base_test_setup.py
|
Etiqa/eats
|
8c8e2da93d0014f6fbb208185712c5526dba1174
|
[
"BSD-2-Clause"
] | null | null | null |
eats/tests/common/base_test_setup.py
|
Etiqa/eats
|
8c8e2da93d0014f6fbb208185712c5526dba1174
|
[
"BSD-2-Clause"
] | 5
|
2021-03-18T21:34:44.000Z
|
2022-03-11T23:35:23.000Z
|
eats/tests/common/base_test_setup.py
|
Etiqa/eats
|
8c8e2da93d0014f6fbb208185712c5526dba1174
|
[
"BSD-2-Clause"
] | null | null | null |
import socket
import unittest
from eats.webdriver import PytractorWebDriver
from eats.tests.common import SimpleWebServerProcess as SimpleServer
def _get_local_ip_addr():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("gmail.com",80))
local_ip_addr = s.getsockname()[0]
s.close()
return local_ip_addr
class PytractorTestBaseSetup(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.process = SimpleServer()
cls.process.run()
def setUp(self):
self.base_url = "http://{}:{}".format(_get_local_ip_addr(), SimpleServer.PORT)
self.driver = self.get_driver()
self.driver.ignore_synchronization = False
@classmethod
def tearDownClass(cls):
cls.process.stop()
def tearDown(self):
self.driver.quit()
class FirefoxRemoteWebDriverTest(object):
def get_driver(self):
return PytractorWebDriver(
test_timeout=3000,
command_executor='http://{}:4444/wd/hub'.format(_get_local_ip_addr()),
desired_capabilities={'browserName': 'firefox', 'version': '', 'platform': 'ANY'}
)
class ChromeRemoteWebDriverTest(object):
def get_driver(self):
return PytractorWebDriver(
test_timeout=3000,
command_executor='http://{}:4444/wd/hub'.format(_get_local_ip_addr()),
desired_capabilities={'browserName': 'chrome', 'version': '', 'platform': 'ANY'}
)
| 30.479167
| 93
| 0.663705
| 1,119
| 0.764867
| 0
| 0
| 167
| 0.114149
| 0
| 0
| 166
| 0.113465
|
c47907817d94beb66a4ec9f0e248f596065c0464
| 231
|
py
|
Python
|
autoprep/service/sqlite_project_service.py
|
haginot/auto-prep
|
b1de3eceba5b82432e7042e7e62270df467ed828
|
[
"Apache-2.0"
] | null | null | null |
autoprep/service/sqlite_project_service.py
|
haginot/auto-prep
|
b1de3eceba5b82432e7042e7e62270df467ed828
|
[
"Apache-2.0"
] | 4
|
2019-01-15T01:55:46.000Z
|
2019-02-21T04:15:25.000Z
|
autoprep/service/sqlite_project_service.py
|
haginot/auto-prep
|
b1de3eceba5b82432e7042e7e62270df467ed828
|
[
"Apache-2.0"
] | null | null | null |
from autoprep.service.project_service import ProjectService
class SQLiteProjectService(ProjectService):
def get_projects(self):
pass
def get_project(self):
pass
def save_project(self):
pass
| 16.5
| 59
| 0.69697
| 168
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
c479ce0c9f3fb47a8ec7bf6ff4db304b73d1a05c
| 2,262
|
py
|
Python
|
p1_navigation/model.py
|
Alexandr0s93/deep-reinforcement-learning
|
02a508d25d2ba3c76c76a8410b3ae27f0d14e13f
|
[
"MIT"
] | null | null | null |
p1_navigation/model.py
|
Alexandr0s93/deep-reinforcement-learning
|
02a508d25d2ba3c76c76a8410b3ae27f0d14e13f
|
[
"MIT"
] | null | null | null |
p1_navigation/model.py
|
Alexandr0s93/deep-reinforcement-learning
|
02a508d25d2ba3c76c76a8410b3ae27f0d14e13f
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
class QNetwork(nn.Module):
"""Actor (Policy) Model using a Single DQN."""
def __init__(self, state_size, action_size, seed):
"""Initialize parameters and build model.
Params
======
state_size (int): Dimension of each state
action_size (int): Dimension of each action
seed (int): Random seed
"""
super(QNetwork, self).__init__()
self.seed = torch.manual_seed(seed)
# Define Deep Q-Network Layers
self.dqn_layers = nn.Sequential(
nn.Linear(state_size, 128),
nn.ReLU(),
nn.Linear(128, 64),
nn.ReLU(),
nn.Linear(64, 32),
nn.ReLU(),
nn.Linear(32, action_size)
)
def forward(self, state):
"""Build a network that maps state -> action values."""
q_values = self.dqn_layers(state)
return q_values
class DuelQNetwork(nn.Module):
"""Actor (Policy) Model using a Duel DQN."""
def __init__(self, state_size, action_size, seed):
"""Initialize parameters and build model.
Params
======
state_size (int): Dimension of each state
action_size (int): Dimension of each action
seed (int): Random seed
"""
super(DuelQNetwork, self).__init__()
self.seed = torch.manual_seed(seed)
# Define Feature Layers
self.feature_layers = nn.Sequential(
nn.Linear(state_size, 128),
nn.ReLU(),
nn.Linear(128, 64),
nn.ReLU(),
nn.Linear(64, 32),
nn.ReLU()
)
# Define Value Stream
self.value_stream = nn.Sequential(
nn.Linear(32, 1)
)
# Define Advantage Layers
self.advantage_stream = nn.Sequential(
nn.Linear(32, action_size)
)
def forward(self, state):
"""Build a network that maps state -> action values."""
x = self.feature_layers(state)
values = self.value_stream(x)
advantages = self.advantage_stream(x)
q_values = values + (advantages - advantages.mean())
return q_values
| 30.16
| 63
| 0.545977
| 2,219
| 0.98099
| 0
| 0
| 0
| 0
| 0
| 0
| 757
| 0.33466
|
c479cee1b61267e6a98fae5c6efa9dd6f54fec33
| 74
|
py
|
Python
|
const.py
|
TakosukeGH/pmx_bone_importer
|
412cc066867cb0e0fd889101630277f9f9ba3a6a
|
[
"MIT"
] | null | null | null |
const.py
|
TakosukeGH/pmx_bone_importer
|
412cc066867cb0e0fd889101630277f9f9ba3a6a
|
[
"MIT"
] | null | null | null |
const.py
|
TakosukeGH/pmx_bone_importer
|
412cc066867cb0e0fd889101630277f9f9ba3a6a
|
[
"MIT"
] | 1
|
2019-10-05T01:18:54.000Z
|
2019-10-05T01:18:54.000Z
|
ADDON_NAME = "pmx_bone_importer"
LOG_FILE_NAME = "pmx_bone_importer.log"
| 18.5
| 39
| 0.810811
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 42
| 0.567568
|
c47bf0eadf4438f1d2983cdc88c09d3954cd62d8
| 17,789
|
py
|
Python
|
pox/lib/interfaceio/__init__.py
|
korrigans84/pox_network
|
cd58d95d97c94b3d139bc2026fd1be0a30987911
|
[
"Apache-2.0"
] | 416
|
2015-01-05T18:16:36.000Z
|
2022-03-28T21:44:26.000Z
|
pox/lib/interfaceio/__init__.py
|
korrigans84/pox_network
|
cd58d95d97c94b3d139bc2026fd1be0a30987911
|
[
"Apache-2.0"
] | 140
|
2015-01-18T23:32:34.000Z
|
2022-03-17T05:40:24.000Z
|
pox/lib/interfaceio/__init__.py
|
korrigans84/pox_network
|
cd58d95d97c94b3d139bc2026fd1be0a30987911
|
[
"Apache-2.0"
] | 344
|
2015-01-08T06:44:23.000Z
|
2022-03-26T04:06:27.000Z
|
# Copyright 2017 James McCauley
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Input and output from network interfaces.
This wraps PCap, TunTap, etc., to provide a simple, universal, cooperative
interface to network interfaces.
Currently limited to Linux.
"""
from pox.lib.pxpcap import PCap
from queue import Queue
from pox.lib.revent import Event, EventMixin
from pox.lib.ioworker.io_loop import ReadLoop
from pox.core import core
import struct
from fcntl import ioctl
import socket
from pox.lib.addresses import EthAddr, IPAddr
from pox.lib.addresses import parse_cidr, cidr_to_netmask
import os
import ctypes
IFNAMESIZ = 16
IFREQ_SIZE = 40
# from linux/if_tun.h
TUNSETIFF = 0x400454ca
TUNGETIFF = 0x800454d2
IFF_TUN = 0x0001
IFF_TAP = 0x0002
IFF_NO_PI = 0x1000
IFF_ONE_QUEUE = 0x2000
IFF_VNET_HDR = 0x4000
IFF_TUN_EXCL = 0x8000
IFF_MULTI_QUEUE = 0x0100
IFF_ATTACH_QUEUE = 0x0200
IFF_DETACH_QUEUE = 0x0400
IFF_PERSIST = 0x0800
IFF_NOFILTER = 0x1000
#from linux/if.h (flags)
IFF_UP = 1<<0
IFF_BROADCAST = 1<<1
IFF_DEBUG = 1<<2
IFF_LOOPBACK = 1<<3
IFF_POINTOPOINT = 1<<4
IFF_NOTRAILERS = 1<<5
IFF_RUNNING = 1<<6
IFF_NOARP = 1<<7
IFF_PROMISC = 1<<8
IFF_ALLMULTI = 1<<9
IFF_MASTER = 1<<10
IFF_SLAVE = 1<<11
IFF_MULTICAST = 1<<12
IFF_PORTSEL = 1<<13
IFF_AUTOMEDIA = 1<<14
IFF_DYNAMIC = 1<<15
IFF_LOWER_UP = 1<<16
IFF_DORMANT = 1<<17
IFF_ECHO = 1<<18
# Unless IFF_NO_PI, there's a header on packets:
# 16 bits of flags
# 16 bits (big endian?) protocol number
# from /usr/include/linux/sockios.h
SIOCGIFHWADDR = 0x8927
SIOCGIFMTU = 0x8921
SIOCSIFMTU = 0x8922
SIOCGIFFLAGS = 0x8913
SIOCSIFFLAGS = 0x8914
SIOCSIFHWADDR = 0x8924
SIOCGIFNETMASK = 0x891b
SIOCSIFNETMASK = 0x891c
SIOCGIFADDR = 0x8915
SIOCSIFADDR = 0x8916
SIOCGIFBRDADDR = 0x8919
SIOCSIFBRDADDR = 0x891a
SIOCSIFNAME = 0x8923
SIOCADDRT = 0x890B # rtentry (route.h) for IPv4, in6_rtmsg for IPv6
SIOCDELRT = 0x890C
# from /usr/include/linux/if_arp.h
ARPHRD_ETHER = 1
ARPHRD_IEEE802 = 1
ARPHRD_IEEE1394 = 24
ARPHRD_EUI64 = 27
ARPHRD_LOOPBACK = 772
ARPHRD_IPGRE = 778
ARPHRD_IEE802_TR = 800
ARPHRD_IEE80211 = 801
ARPHRD_IEE80211_PRISM = 802
ARPHRD_IEE80211_RADIOTAP = 803
ARPHRD_IP6GRE = 823
class rtentry (object):
"""
Wrapper for Linux rtentry
Only tries to capture IPv4 usage.
Possibly better done with ctypes.
"""
# flags
RTF_UP = 0x0001 # usable
RTF_GATEWAY = 0x0002 # dst is gateway
RTF_HOST = 0x0004 # host route
RTF_REINSTATE = 0x0008 # reinstate after timeout
RTF_DYNAMIC = 0x0010 # created dynamically (by redirect)
RTF_MODIFIED = 0x0020 # modified dynamically (by redirect)
RTF_MSS = 0x0040 # use specific MSS for this route
RTF_WINDOW = 0x0080 # use per-route window clamping
RTF_IRTT = 0x0100 # use initial RTT
RTF_REJECT = 0x0200 # reject route
# fields
rt_hash = 0
rt_dst = IPAddr("0.0.0.0")
rt_gateway = IPAddr("0.0.0.0")
rt_genmask = IPAddr("0.0.0.0")
rt_flags = 0
rt_refcnt = 0
rt_use = 0
rt_ifp = 0 # ptr to struct ifnet
rt_metric = 0
rt_dev = None # device name
rt_mss = 0
rt_window = 0 # window clamping
rt_irtt = 0 # initial RTT
def pack (self):
if self.rt_dev:
s = ctypes.c_char_p(self.rt_dev + "\0") # Null terminator necessary?
dev = ctypes.cast(s, ctypes.c_void_p).value
self._buf = s # You must use the resulting packed string before changing
# rt_dev!
else:
dev = 0
return struct.pack("L16s16s16shhLPhPLLH",
self.rt_hash,
sockaddr_in(self.rt_dst).pack(),
sockaddr_in(self.rt_gateway).pack(),
sockaddr_in(self.rt_genmask).pack(),
self.rt_flags,
self.rt_refcnt,
self.rt_use,
self.rt_ifp,
self.rt_metric,
dev,
self.rt_mss,
self.rt_window,
self.rt_irtt)
class sockaddr_in (object):
"""
Wrapper for sockaddr_in
"""
sin_family = socket.AF_INET
sin_port = 0
sin_addr = IPAddr("0.0.0.0")
def __init__ (self, addr=None, port=None):
if addr is not None:
self.sin_addr = IPAddr(addr)
if port is not None:
self.sin_port = port
def pack (self):
r = struct.pack("hH", self.sin_family, self.sin_port)
r += self.sin_addr.raw
r += ("\0" * 8)
return r
class Interface (object):
"""
Simple interface to tun/tap driver
Currently only for Linux. IIRC, shouldn't be too hard to adapt for BSD.
Other OSes will probably need a fair amount of work.
"""
#TODO: Setters
def __init__ (self, name):
self._name = name
def __str__ (self):
return "%s('%s')" % (type(self).__name__, self.name)
@property
def name (self):
return self._name.rstrip("\0")
@name.setter
def name (self, value):
if len(value) > IFNAMESIZ: raise RuntimeError("Name too long")
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ifr = struct.pack(str(IFNAMESIZ) + "s", self.name)
ifr += value
ifr += "\0" * (IFREQ_SIZE - len(ifr))
ret = ioctl(sock, SIOCSIFNAME, ifr)
self._name = value
@property
def ipv6_enabled (self):
f = file("/proc/sys/net/ipv6/conf/%s/disable_ipv6" % (self.name,), "r")
with f:
return f.read()[0] == "0" # Note inversion!
@ipv6_enabled.setter
def ipv6_enabled (self, value):
f = file("/proc/sys/net/ipv6/conf/%s/disable_ipv6" % (self.name,), "w")
with f:
f.write("0" if value else "1") # Note inversion!
@property
def ip_forwarding (self):
f = file("/proc/sys/net/ipv4/conf/%s/forwarding" % (self.name,), "r")
with f:
return f.read()[0] == "1"
@ip_forwarding.setter
def ip_forwarding (self, value):
f = file("/proc/sys/net/ipv4/conf/%s/forwarding" % (self.name,), "w")
with f:
f.write("1" if value else "0")
@property
def mtu (self):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ifr = struct.pack(str(IFNAMESIZ) + "s", self.name)
ifr += "\0" * (IFREQ_SIZE - len(ifr))
ret = ioctl(sock, SIOCGIFMTU, ifr)
return struct.unpack("I", ret[IFNAMESIZ:][:4])[0]
@mtu.setter
def mtu (self, value):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ifr = struct.pack(str(IFNAMESIZ) + "sI", self.name, value)
ifr += "\0" * (IFREQ_SIZE - len(ifr))
ret = ioctl(sock, SIOCSIFMTU, ifr)
@property
def flags (self):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ifr = struct.pack(str(IFNAMESIZ) + "s", self.name)
ifr += "\0" * (IFREQ_SIZE - len(ifr))
ret = ioctl(sock, SIOCGIFFLAGS, ifr)
return struct.unpack("H", ret[IFNAMESIZ:IFNAMESIZ+2])[0]
@flags.setter
def flags (self, value):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ifr = struct.pack(str(IFNAMESIZ) + "sH", self.name, value)
ifr += "\0" * (IFREQ_SIZE - len(ifr))
ret = ioctl(sock, SIOCSIFFLAGS, ifr)
def set_flags (self, flags, on=True):
if on:
self.flags |= flags
else:
self.unset_flags(flags)
def unset_flags (self, flags):
self.flags = self.flags & (flags ^ 0xffFF)
@property
def promiscuous (self):
return bool(self.flags & IFF_PROMISC)
@promiscuous.setter
def promiscuous (self, value):
self.set_flags(IFF_PROMISC, value)
@property
def is_up (self):
return (self.flags & IFF_UP) != 0
@is_up.setter
def is_up (self, value):
self.set_flags(IFF_UP, value)
@property
def is_running (self):
return (self.flags & IFF_RUNNING) != 0
@property
def arp_enabled (self):
return (self.flags & IFF_NOARP) == 0
@arp_enabled.setter
def arp_enabled (self, value):
self.set_flags(IFF_NOARP, not value)
@property
def ip_addr (self):
try:
return self._ioctl_get_ipv4(SIOCGIFADDR)
except IOError as e:
if e.errno == 99: return None
raise
@ip_addr.setter
def ip_addr (self, value):
return self._ioctl_set_ipv4(SIOCSIFADDR, value)
@property
def netmask (self):
try:
return self._ioctl_get_ipv4(SIOCGIFNETMASK)
except IOError as e:
if e.errno == 99: return None
raise
@netmask.setter
def netmask (self, value):
return self._ioctl_set_ipv4(SIOCSIFNETMASK, value)
@property
def broadcast_addr (self):
try:
return self._ioctl_get_ipv4(SIOCGIFBRDADDR)
except IOError as e:
if e.errno == 99: return None
raise
@broadcast_addr.setter
def broadcast_addr (self, value):
return self._ioctl_set_ipv4(SIOCSIFBRDADDR, value)
@property
def eth_addr (self):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ifr = struct.pack(str(IFNAMESIZ) + "s", self.name)
ifr += "\0" * (IFREQ_SIZE - len(ifr))
ret = ioctl(sock, SIOCGIFHWADDR, ifr)
sa = ret[IFNAMESIZ:] # sockaddr
return self._get_eth(sa)
@eth_addr.setter
def eth_addr (self, value):
value = EthAddr(value).raw
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ifr = struct.pack(str(IFNAMESIZ) + "sH", self.name, ARPHRD_ETHER)
ifr += value # Append to sockaddr
ifr += "\0" * (IFREQ_SIZE - len(ifr))
ret = ioctl(sock, SIOCSIFHWADDR, ifr)
def _ioctl_get_ipv4 (self, which):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ifr = struct.pack(str(IFNAMESIZ) + "s", self.name)
ifr += "\0" * (IFREQ_SIZE - len(ifr))
ret = ioctl(sock, which, ifr)
return self._get_ipv4(ret[IFNAMESIZ:])
def _ioctl_set_ipv4 (self, which, value):
value = IPAddr(value)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ifr = struct.pack(str(IFNAMESIZ) + "sHHI", self.name, socket.AF_INET, 0,
value.toUnsigned(networkOrder=True))
ifr += "\0" * (IFREQ_SIZE - len(ifr))
ret = ioctl(sock, which, ifr)
@staticmethod
def _get_ipv4 (sa):
sa_family = struct.unpack("H", sa[:2])[0]
if sa_family == socket.AF_INET:
return IPAddr(sa[4:8])
else:
raise RuntimeError("Unsupported hardware type %s for %s (expected %s)"
% (sa_family, self, socket.AF_INET))
@staticmethod
def _get_eth (sa):
sa_family = struct.unpack("H", sa[:2])[0]
if sa_family == ARPHRD_ETHER:
return EthAddr(sa[2:8])
else:
raise RuntimeError("Unsupported hardware type %s (expected %s)"
% (sa_family, ARPHRD_ETHER))
def add_default_route (self, *args, **kw):
return self.add_route("0.0.0.0/0", *args, **kw)
def add_route (self, network, gateway=None, dev=(), metric=0):
"""
Add routing table entry
If dev is unspecified, it defaults to this device
"""
return self._add_del_route(network, gateway, dev, metric, SIOCADDRT)
def del_route (self, network, gateway=None, dev=(), metric=0):
"""
Remove a routing table entry
If dev is unspecified, it defaults to this device
"""
return self._add_del_route(network, gateway, dev, metric, SIOCDELRT)
def _add_del_route (self, network, gateway=None, dev=(), metric=0,
command=None):
"""
Add or remove a routing table entry
If dev is unspecified, it defaults to this device
"""
r = rtentry()
if isinstance(network, tuple):
addr,mask = network
addr = str(addr)
if isinstance(mask, int):
mask = cidr_to_netmask(mask)
mask = str(mask)
network = "%s/%s" % (addr,mask)
host = False
if isinstance(network, IPAddr) or (isinstance(network, str)
and "/" not in network):
host = True
network,bits = parse_cidr(network)
r.rt_dst = network
r.rt_genmask = cidr_to_netmask(bits)
if gateway is not None:
r.rt_gateway = IPAddr(gateway)
r.rt_flags |= r.RTF_GATEWAY
r.rt_metric = metric
if dev is (): dev = self
if isinstance(dev, Interface): dev = dev.name
if dev: r.rt_dev = dev
if host: r.rt_flags |= r.RTF_HOST
r.rt_flags |= r.RTF_UP
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
rv = ioctl(sock, command, r.pack())
class TunTap (object):
"""
Simple wrapper for tun/tap interfaces
Looks like a file-like object. You should be able to read/write it, select
on it, etc.
"""
def __init__ (self, name=None, tun=False, raw=False):
"""
Create tun or tap
By default, it creates a new tun or tap with a default name. If you
specify a name, it will either try to create it (if it doesn't exist),
or try to use an existing interface (for which you must have permission).
Defaults to tap (Ethernet) mode. Specify tun=True for tun (IP) mode.
Specify raw=True to skip the 32 bits of flag/protocol metadata.
"""
if name is None: name = ""
openflags = os.O_RDWR
try:
openflow |= os.O_BINARY
except:
pass
self._f = os.open("/dev/net/tun", openflags)
# an ifreq is IFREQ_SIZE bytes long, starting with an interface name
# (IFNAMESIZ bytes) followed by a big union.
self.is_tun = tun
self.is_tap = not tun
self.is_raw = raw
flags = 0
if tun: flags |= IFF_TUN
else: flags |= IFF_TAP
if raw: flags |= IFF_NO_PI
ifr = struct.pack(str(IFNAMESIZ) + "sH", name, flags)
ifr += "\0" * (IFREQ_SIZE - len(ifr))
ret = ioctl(self.fileno(), TUNSETIFF, ifr)
self.name = ret[:IFNAMESIZ]
iflags = flags
ifr = struct.pack(str(IFNAMESIZ) + "sH", name, 0)
ifr += "\0" * (IFREQ_SIZE - len(ifr))
ret = ioctl(self.fileno(), TUNGETIFF, ifr)
flags = struct.unpack("H", ret[IFNAMESIZ:IFNAMESIZ+2])[0]
self.is_tun = (flags & IFF_TUN) == IFF_TUN
self.is_tap = not self.is_tun
#self.is_raw = (flags & IFF_NO_PI) == IFF_NO_PI
def fileno (self):
return self._f
def write (self, data):
return os.write(self.fileno(), data)
def read (self, n):
return os.read(self.fileno(), n)
def close (self):
return os.close(self.fileno())
@property
def eth_addr (self):
return Interface(self.name).eth_addr
class RXData (Event):
"""
Event fired when an interface receives data
"""
def __init__ (self, interface, data):
self.interface = interface
self.data = data
class PCapInterface (Interface, EventMixin):
_eventMixin_events = set([
RXData,
])
def __init__ (self, name):
Interface.__init__(self, name)
EventMixin.__init__(self)
self._q = Queue()
p = PCap(name, callback=self._pcap_cb, start=False)
p.set_direction(True, False) # Incoming, not outgoing
p.start()
self.pcap = p
core.add_listener(self._handle_GoingDownEvent)
def _handle_GoingDownEvent (self, event):
self.close()
def send (self, data):
if self.pcap is None: return
self.pcap.inject(data)
def _pcap_cb (self, obj, data, sec, usec, length):
"""
Handles incoming data from pcap
This may not be on the right thread, so we just push it to a thread-safe
queue and poke the cooperative thread, which will pop it later.
"""
do_read = self._q.empty()
self._q.put((obj,data))
if do_read: core.callLater(self._queue_read)
def _queue_read (self):
anything = False
for _ in range(10): # as most X at once
try:
data = self._q.get(False)
self._q.task_done()
anything = True
except:
break
pcap,data = data
self.raiseEventNoErrors(RXData, self, data)
if anything:
# Check for remainders later
core.callLater(self._queue_read)
def __del__ (self):
self.close()
def close (self):
if self.pcap:
self.pcap.close()
self.pcap = None
class TapInterface (Interface, EventMixin):
_eventMixin_events = set([
RXData,
])
io_loop = None
max_read_size = 1600
default_send_protocol = None
def __init__ (self, name="", tun=False, raw=False, protocol=None):
self.tap = None
self.last_flags = None
self.last_protocol = None
if protocol: self.default_send_protocol = protocol
self.io_loop = ReadLoop.singleton
Interface.__init__(self, name)
EventMixin.__init__(self)
self.tap = TunTap(name, raw=raw, tun=tun)
if not name: self._name = self.tap.name
self.io_loop.add(self)
@property
def is_tap (self):
return self.tap.is_tap
@property
def is_tun (self):
return self.tap.is_tun
def send (self, data, flags=0, protocol=None):
if not self.tap.is_raw:
if protocol is None: protocol = self.default_send_protocol or 0
#FIXME: In the "0" case above, should we fall back to using the Etherype
# in the packet?
if flags or protocol:
flags = struct.pack("!HH", flags, protocol) # Flags reversed?
else:
flags = "\0\0\0\0"
data = flags + data
self.tap.write(data)
def _do_rx (self):
data = self.tap.read(self.max_read_size)
if not self.tap.is_raw:
flags,proto = struct.unpack("!HH", data[:4])
#FIXME: This may invert the flags...
self.last_flags = flags
self.last_protocol = proto
data = data[4:] # Cut off header
self.raiseEvent(RXData, self, data)
def fileno (self):
# Support fileno so that this can be used in IO loop directly
return self.tap.fileno()
def close (self):
if self.tap:
self.tap.close()
self.tap = None
self.io_loop.remove(self)
def __del__ (self):
self.close()
| 27.034954
| 78
| 0.652426
| 15,027
| 0.844736
| 0
| 0
| 4,902
| 0.275564
| 0
| 0
| 3,910
| 0.219799
|
c47c240782affe27a9180c58c326bd1012c03ca6
| 5,754
|
py
|
Python
|
icarus_simulator/strategies/atk_geo_constraint/geo_constr_strat.py
|
RubenFr/ICARUS-framework
|
e57a1f50c3bb9522b2a279fee6b625628afd056f
|
[
"MIT"
] | 5
|
2021-08-31T08:07:41.000Z
|
2022-01-04T02:09:25.000Z
|
icarus_simulator/strategies/atk_geo_constraint/geo_constr_strat.py
|
RubenFr/ICARUS-framework
|
e57a1f50c3bb9522b2a279fee6b625628afd056f
|
[
"MIT"
] | 3
|
2021-09-23T09:06:35.000Z
|
2021-12-08T04:53:01.000Z
|
icarus_simulator/strategies/atk_geo_constraint/geo_constr_strat.py
|
RubenFr/ICARUS-framework
|
e57a1f50c3bb9522b2a279fee6b625628afd056f
|
[
"MIT"
] | 2
|
2022-01-19T17:50:56.000Z
|
2022-03-06T18:59:41.000Z
|
# 2020 Tommaso Ciussani and Giacomo Giuliari
import os
import json
import numpy as np
from typing import Set, List
from geopy.distance import great_circle
from scipy.spatial.ckdtree import cKDTree
from shapely.geometry import Polygon, shape, Point
from icarus_simulator.sat_core.coordinate_util import geo2cart
from icarus_simulator.strategies.atk_geo_constraint.base_geo_constraint_strat import (
BaseGeoConstraintStrat,
)
from icarus_simulator.structure_definitions import GridPos
dirname = os.path.dirname(__file__)
strategies_dirname = os.path.split(dirname)[0]
library_dirname = os.path.split(strategies_dirname)[0]
data_dirname = os.path.join(library_dirname, "data")
COUNTRIES_FILE: str = os.path.join(data_dirname, "natural_earth_world_small.geo.json")
class GeoConstrStrat(BaseGeoConstraintStrat):
def __init__(self, geo_names: List[str], **kwargs):
super().__init__()
self.geo_names = geo_names
if len(kwargs) > 0:
pass # Appease the unused param inspection
@property
def name(self) -> str:
return "geo"
@property
def param_description(self) -> str:
return ",".join(self.geo_names)
def compute(self, grid_pos: GridPos) -> Set[int]:
allowed = set()
geo_data = load_country_geojson()
for s in self.geo_names:
allowed.update(get_allowed_gridpoints(s, grid_pos, geo_data))
return allowed
# noinspection PyTypeChecker
def get_allowed_gridpoints(geo_location: str, grid_pos: GridPos, geo_data) -> Set[int]:
# Get a list of all possible source points
if geo_location in geo_data["countries"]:
indices = [geo_data["countries"][geo_location]]
elif geo_location in geo_data["subregions"]:
indices = geo_data["subregions"][geo_location]
elif geo_location in geo_data["continents"]:
indices = geo_data["continents"][geo_location]
else:
raise ValueError("Invalid geographic constraint")
geometries = [geo_data["geometries"][index] for index in indices]
allowed_points = set()
# Create a unique shape, union of all shapes in the region, and take the points include within
shp = Polygon()
for idx, geo in enumerate(geometries):
shp = shp.union(shape(geo))
for idx, pos in grid_pos.items():
if Point(pos.lat, pos.lon).within(shp):
allowed_points.add(idx)
# Extract the border points
x, y = [], []
if shp.geom_type == "MultiPolygon":
for idx, shap in enumerate(shp.geoms):
if True:
x1, y1 = shap.exterior.xy
x.extend(x1)
y.extend(y1)
else:
x1, y1 = shp.exterior.xy
x.extend(x1)
y.extend(y1)
# plotter.plot_points({idx: GeodeticPosInfo({"lat": x[idx], "lon": y[idx], "elev": 0.0})
# for idx in range(len(x))}, "GRID", "TEST", "aa", "asas",)
grid_cart = np.zeros((len(grid_pos), 3))
grid_map = {}
i = 0
for idx, pos in grid_pos.items():
grid_map[i] = idx
grid_cart[i] = geo2cart({"elev": 0, "lon": pos.lon, "lat": pos.lat})
i += 1
# Put the homogeneous grid into a KD-tree and query the border points to include also point slightly in the sea
kd = cKDTree(grid_cart)
for idx in range(len(x)):
_, closest_grid_idx = kd.query(
geo2cart({"elev": 0, "lon": y[idx], "lat": x[idx]}), k=1
)
grid_id = grid_map[closest_grid_idx]
if (
great_circle(
(grid_pos[grid_id].lat, grid_pos[grid_id].lon), (x[idx], y[idx])
).meters
< 300000
):
# 300000 -> number elaborated to keep the out-of-coast values without including wrong points
allowed_points.add(grid_map[closest_grid_idx])
return allowed_points
# noinspection PyTypeChecker
def load_country_geojson():
new_data = {"geometries": [], "countries": {}, "continents": {}, "subregions": {}}
with open(COUNTRIES_FILE, encoding="utf-8") as f:
data = json.load(f)
new_data["geometries"] = [""] * len(data["features"])
for idx, feature in enumerate(data["features"]):
props = feature["properties"]
code = props["iso_a3"]
if code == "-99":
continue
continent = props["continent"]
subregion = props["region_wb"]
subregion2 = props["subregion"]
if continent not in new_data["continents"]:
new_data["continents"][continent] = []
if subregion not in new_data["subregions"]:
new_data["subregions"][subregion] = []
if subregion2 not in new_data["subregions"]:
new_data["subregions"][subregion2] = []
new_data["continents"][continent].append(idx)
new_data["subregions"][subregion].append(idx)
new_data["subregions"][subregion2].append(idx)
new_data["countries"][code] = idx
new_data["geometries"][idx] = feature["geometry"]
geom = new_data["geometries"][idx]
if geom["type"] == "MultiPolygon":
for l1 in range(len(geom["coordinates"])):
for l2 in range(len(geom["coordinates"][l1])):
for l3 in range(len(geom["coordinates"][l1][l2])):
geom["coordinates"][l1][l2][l3] = geom["coordinates"][l1][l2][
l3
][::-1]
elif geom["type"] == "Polygon":
for l1 in range(len(geom["coordinates"])):
for l2 in range(len(geom["coordinates"][l1])):
geom["coordinates"][l1][l2] = geom["coordinates"][l1][l2][::-1]
print(f"Available subregions: {list(new_data['subregions'].keys())}")
return new_data
| 38.36
| 115
| 0.616093
| 656
| 0.114008
| 0
| 0
| 146
| 0.025374
| 0
| 0
| 1,395
| 0.24244
|
c47c8df17ea394b09ef2defebfcd36f91bad20ef
| 8,861
|
py
|
Python
|
grafeas/models/deployable_deployment_details.py
|
nyc/client-python
|
e73eab8953abf239305080673f7c96a54b776f72
|
[
"Apache-2.0"
] | null | null | null |
grafeas/models/deployable_deployment_details.py
|
nyc/client-python
|
e73eab8953abf239305080673f7c96a54b776f72
|
[
"Apache-2.0"
] | null | null | null |
grafeas/models/deployable_deployment_details.py
|
nyc/client-python
|
e73eab8953abf239305080673f7c96a54b776f72
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Grafeas API
An API to insert and retrieve annotations on cloud artifacts. # noqa: E501
OpenAPI spec version: v1alpha1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from grafeas.models.deployment_details_platform import DeploymentDetailsPlatform # noqa: F401,E501
class DeployableDeploymentDetails(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'user_email': 'str',
'deploy_time': 'datetime',
'undeploy_time': 'datetime',
'config': 'str',
'address': 'str',
'resource_uri': 'list[str]',
'platform': 'DeploymentDetailsPlatform'
}
attribute_map = {
'user_email': 'user_email',
'deploy_time': 'deploy_time',
'undeploy_time': 'undeploy_time',
'config': 'config',
'address': 'address',
'resource_uri': 'resource_uri',
'platform': 'platform'
}
def __init__(self, user_email=None, deploy_time=None, undeploy_time=None, config=None, address=None, resource_uri=None, platform=None): # noqa: E501
"""DeployableDeploymentDetails - a model defined in Swagger""" # noqa: E501
self._user_email = None
self._deploy_time = None
self._undeploy_time = None
self._config = None
self._address = None
self._resource_uri = None
self._platform = None
self.discriminator = None
if user_email is not None:
self.user_email = user_email
if deploy_time is not None:
self.deploy_time = deploy_time
if undeploy_time is not None:
self.undeploy_time = undeploy_time
if config is not None:
self.config = config
if address is not None:
self.address = address
if resource_uri is not None:
self.resource_uri = resource_uri
if platform is not None:
self.platform = platform
@property
def user_email(self):
"""Gets the user_email of this DeployableDeploymentDetails. # noqa: E501
Identity of the user that triggered this deployment. # noqa: E501
:return: The user_email of this DeployableDeploymentDetails. # noqa: E501
:rtype: str
"""
return self._user_email
@user_email.setter
def user_email(self, user_email):
"""Sets the user_email of this DeployableDeploymentDetails.
Identity of the user that triggered this deployment. # noqa: E501
:param user_email: The user_email of this DeployableDeploymentDetails. # noqa: E501
:type: str
"""
self._user_email = user_email
@property
def deploy_time(self):
"""Gets the deploy_time of this DeployableDeploymentDetails. # noqa: E501
Beginning of the lifetime of this deployment. # noqa: E501
:return: The deploy_time of this DeployableDeploymentDetails. # noqa: E501
:rtype: datetime
"""
return self._deploy_time
@deploy_time.setter
def deploy_time(self, deploy_time):
"""Sets the deploy_time of this DeployableDeploymentDetails.
Beginning of the lifetime of this deployment. # noqa: E501
:param deploy_time: The deploy_time of this DeployableDeploymentDetails. # noqa: E501
:type: datetime
"""
self._deploy_time = deploy_time
@property
def undeploy_time(self):
"""Gets the undeploy_time of this DeployableDeploymentDetails. # noqa: E501
End of the lifetime of this deployment. # noqa: E501
:return: The undeploy_time of this DeployableDeploymentDetails. # noqa: E501
:rtype: datetime
"""
return self._undeploy_time
@undeploy_time.setter
def undeploy_time(self, undeploy_time):
"""Sets the undeploy_time of this DeployableDeploymentDetails.
End of the lifetime of this deployment. # noqa: E501
:param undeploy_time: The undeploy_time of this DeployableDeploymentDetails. # noqa: E501
:type: datetime
"""
self._undeploy_time = undeploy_time
@property
def config(self):
"""Gets the config of this DeployableDeploymentDetails. # noqa: E501
Configuration used to create this deployment. # noqa: E501
:return: The config of this DeployableDeploymentDetails. # noqa: E501
:rtype: str
"""
return self._config
@config.setter
def config(self, config):
"""Sets the config of this DeployableDeploymentDetails.
Configuration used to create this deployment. # noqa: E501
:param config: The config of this DeployableDeploymentDetails. # noqa: E501
:type: str
"""
self._config = config
@property
def address(self):
"""Gets the address of this DeployableDeploymentDetails. # noqa: E501
Address of the runtime element hosting this deployment. # noqa: E501
:return: The address of this DeployableDeploymentDetails. # noqa: E501
:rtype: str
"""
return self._address
@address.setter
def address(self, address):
"""Sets the address of this DeployableDeploymentDetails.
Address of the runtime element hosting this deployment. # noqa: E501
:param address: The address of this DeployableDeploymentDetails. # noqa: E501
:type: str
"""
self._address = address
@property
def resource_uri(self):
"""Gets the resource_uri of this DeployableDeploymentDetails. # noqa: E501
Output only. Resource URI for the artifact being deployed taken from the deployable field with the same name. # noqa: E501
:return: The resource_uri of this DeployableDeploymentDetails. # noqa: E501
:rtype: list[str]
"""
return self._resource_uri
@resource_uri.setter
def resource_uri(self, resource_uri):
"""Sets the resource_uri of this DeployableDeploymentDetails.
Output only. Resource URI for the artifact being deployed taken from the deployable field with the same name. # noqa: E501
:param resource_uri: The resource_uri of this DeployableDeploymentDetails. # noqa: E501
:type: list[str]
"""
self._resource_uri = resource_uri
@property
def platform(self):
"""Gets the platform of this DeployableDeploymentDetails. # noqa: E501
Platform hosting this deployment. # noqa: E501
:return: The platform of this DeployableDeploymentDetails. # noqa: E501
:rtype: DeploymentDetailsPlatform
"""
return self._platform
@platform.setter
def platform(self, platform):
"""Sets the platform of this DeployableDeploymentDetails.
Platform hosting this deployment. # noqa: E501
:param platform: The platform of this DeployableDeploymentDetails. # noqa: E501
:type: DeploymentDetailsPlatform
"""
self._platform = platform
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DeployableDeploymentDetails):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 31.091228
| 153
| 0.623632
| 8,473
| 0.956213
| 0
| 0
| 5,003
| 0.564609
| 0
| 0
| 5,055
| 0.570477
|
c47e515541dd250050db71c9315d649403e7ce2b
| 1,575
|
py
|
Python
|
lib/python/test/test_trans.py
|
qxo/cat
|
08170af3c8e2ae3724036833d67312964721c99b
|
[
"Apache-2.0"
] | 5
|
2018-12-13T17:46:39.000Z
|
2022-03-29T02:07:47.000Z
|
lib/python/test/test_trans.py
|
qxo/cat
|
08170af3c8e2ae3724036833d67312964721c99b
|
[
"Apache-2.0"
] | 42
|
2019-12-08T18:41:13.000Z
|
2021-08-28T13:08:55.000Z
|
lib/python/test/test_trans.py
|
qxo/cat
|
08170af3c8e2ae3724036833d67312964721c99b
|
[
"Apache-2.0"
] | 8
|
2018-12-25T04:19:01.000Z
|
2021-03-24T17:02:44.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: stdrickforce (Tengyuan Fan)
# Email: <stdrickforce@gmail.com> <fantengyuan@baixing.com>
import cat
import time
def ignore_exception(func):
def wraps(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception:
pass
return wraps
@ignore_exception
@cat.transaction("Trans", "T1")
def test1():
'''
Use via decorator
'''
print(1 / 0) # NOTE will cause ZeroDivisionException
def test2():
'''
Use via context manager
'''
def do_something():
import random
if random.random() < 0.1:
raise Exception("error occured!")
with cat.Transaction("Trans", "T2") as t:
cat.log_event("Event", "E2")
try:
do_something()
except Exception:
t.set_status(cat.CAT_ERROR)
t.add_data("context-manager")
t.add_data("foo", "bar")
def test3():
try:
trans = cat.Transaction("Trans", "T3")
trans.add_data("content")
trans.add_data("key", "val")
trans.set_status("error")
trans.set_duration(500)
trans.set_duration_start(time.time() * 1000 - 30 * 1000)
trans.set_timestamp(time.time() * 1000 - 30 * 1000)
finally:
# NOTE don't forget to complete the transaction!
trans.complete()
if __name__ == '__main__':
cat.init("pycat", debug=True, logview=False)
for i in range(100):
test1()
test2()
test3()
time.sleep(0.01)
time.sleep(1)
| 22.183099
| 64
| 0.572698
| 0
| 0
| 0
| 0
| 158
| 0.100317
| 0
| 0
| 429
| 0.272381
|
c47eb0be6f206f7a309aab7d8baf760825081212
| 19,781
|
py
|
Python
|
src/ui/ui_hw_recovery_wdg.py
|
frosted97/dash-masternode-tool
|
d824740309ab878d745e41d39f274e952111542f
|
[
"MIT"
] | 75
|
2017-03-20T06:33:14.000Z
|
2022-02-15T16:16:45.000Z
|
src/ui/ui_hw_recovery_wdg.py
|
frosted97/dash-masternode-tool
|
d824740309ab878d745e41d39f274e952111542f
|
[
"MIT"
] | 42
|
2017-10-25T06:34:54.000Z
|
2022-02-10T20:53:46.000Z
|
src/ui/ui_hw_recovery_wdg.py
|
frosted97/dash-masternode-tool
|
d824740309ab878d745e41d39f274e952111542f
|
[
"MIT"
] | 98
|
2017-03-20T05:27:36.000Z
|
2022-03-20T05:03:08.000Z
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file ui_hw_recovery_wdg.ui
#
# Created by: PyQt5 UI code generator
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_WdgRecoverHw(object):
def setupUi(self, WdgRecoverHw):
WdgRecoverHw.setObjectName("WdgRecoverHw")
WdgRecoverHw.resize(587, 352)
self.verticalLayout_4 = QtWidgets.QVBoxLayout(WdgRecoverHw)
self.verticalLayout_4.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_4.setSpacing(6)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.pages = QtWidgets.QStackedWidget(WdgRecoverHw)
self.pages.setObjectName("pages")
self.page0 = QtWidgets.QWidget()
self.page0.setObjectName("page0")
self.verticalLayout = QtWidgets.QVBoxLayout(self.page0)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setSpacing(6)
self.verticalLayout.setObjectName("verticalLayout")
self.label_2 = QtWidgets.QLabel(self.page0)
self.label_2.setObjectName("label_2")
self.verticalLayout.addWidget(self.label_2)
self.gbSeedSource = QtWidgets.QGroupBox(self.page0)
self.gbSeedSource.setTitle("")
self.gbSeedSource.setFlat(False)
self.gbSeedSource.setObjectName("gbSeedSource")
self.verticalLayout_5 = QtWidgets.QVBoxLayout(self.gbSeedSource)
self.verticalLayout_5.setContentsMargins(6, 6, 6, 6)
self.verticalLayout_5.setSpacing(8)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.rbSeedSourceHwScreen = QtWidgets.QRadioButton(self.gbSeedSource)
self.rbSeedSourceHwScreen.setChecked(False)
self.rbSeedSourceHwScreen.setObjectName("rbSeedSourceHwScreen")
self.verticalLayout_5.addWidget(self.rbSeedSourceHwScreen)
self.rbSeedSourceAppWords = QtWidgets.QRadioButton(self.gbSeedSource)
self.rbSeedSourceAppWords.setChecked(False)
self.rbSeedSourceAppWords.setObjectName("rbSeedSourceAppWords")
self.verticalLayout_5.addWidget(self.rbSeedSourceAppWords)
self.rbSeedSourceAppEntropy = QtWidgets.QRadioButton(self.gbSeedSource)
self.rbSeedSourceAppEntropy.setObjectName("rbSeedSourceAppEntropy")
self.verticalLayout_5.addWidget(self.rbSeedSourceAppEntropy)
self.verticalLayout.addWidget(self.gbSeedSource)
self.lblActionTypeMessage = QtWidgets.QLabel(self.page0)
self.lblActionTypeMessage.setWordWrap(True)
self.lblActionTypeMessage.setObjectName("lblActionTypeMessage")
self.verticalLayout.addWidget(self.lblActionTypeMessage)
spacerItem = QtWidgets.QSpacerItem(20, 288, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem)
self.pages.addWidget(self.page0)
self.page1 = QtWidgets.QWidget()
self.page1.setObjectName("page1")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.page1)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_2.setSpacing(6)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.label = QtWidgets.QLabel(self.page1)
self.label.setObjectName("label")
self.verticalLayout_2.addWidget(self.label)
self.gbNumberOfMnemonicWords = QtWidgets.QGroupBox(self.page1)
self.gbNumberOfMnemonicWords.setTitle("")
self.gbNumberOfMnemonicWords.setObjectName("gbNumberOfMnemonicWords")
self.verticalLayout_8 = QtWidgets.QVBoxLayout(self.gbNumberOfMnemonicWords)
self.verticalLayout_8.setContentsMargins(6, 6, 6, 6)
self.verticalLayout_8.setSpacing(8)
self.verticalLayout_8.setObjectName("verticalLayout_8")
self.rbWordsCount24 = QtWidgets.QRadioButton(self.gbNumberOfMnemonicWords)
self.rbWordsCount24.setChecked(True)
self.rbWordsCount24.setObjectName("rbWordsCount24")
self.verticalLayout_8.addWidget(self.rbWordsCount24)
self.rbWordsCount18 = QtWidgets.QRadioButton(self.gbNumberOfMnemonicWords)
self.rbWordsCount18.setObjectName("rbWordsCount18")
self.verticalLayout_8.addWidget(self.rbWordsCount18)
self.rbWordsCount12 = QtWidgets.QRadioButton(self.gbNumberOfMnemonicWords)
self.rbWordsCount12.setObjectName("rbWordsCount12")
self.verticalLayout_8.addWidget(self.rbWordsCount12)
self.verticalLayout_2.addWidget(self.gbNumberOfMnemonicWords)
self.lblPage1Message = QtWidgets.QLabel(self.page1)
self.lblPage1Message.setText("")
self.lblPage1Message.setWordWrap(True)
self.lblPage1Message.setObjectName("lblPage1Message")
self.verticalLayout_2.addWidget(self.lblPage1Message)
spacerItem1 = QtWidgets.QSpacerItem(20, 310, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout_2.addItem(spacerItem1)
self.pages.addWidget(self.page1)
self.page2 = QtWidgets.QWidget()
self.page2.setObjectName("page2")
self.verticalLayout_6 = QtWidgets.QVBoxLayout(self.page2)
self.verticalLayout_6.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_6.setSpacing(6)
self.verticalLayout_6.setObjectName("verticalLayout_6")
self.lblStep1HexEntropy = QtWidgets.QLabel(self.page2)
self.lblStep1HexEntropy.setObjectName("lblStep1HexEntropy")
self.verticalLayout_6.addWidget(self.lblStep1HexEntropy)
self.edtHexEntropy = QtWidgets.QLineEdit(self.page2)
self.edtHexEntropy.setObjectName("edtHexEntropy")
self.verticalLayout_6.addWidget(self.edtHexEntropy)
spacerItem2 = QtWidgets.QSpacerItem(20, 365, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout_6.addItem(spacerItem2)
self.pages.addWidget(self.page2)
self.page3 = QtWidgets.QWidget()
self.page3.setObjectName("page3")
self.verticalLayout_7 = QtWidgets.QVBoxLayout(self.page3)
self.verticalLayout_7.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_7.setSpacing(6)
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.lblStepWordListTitle = QtWidgets.QLabel(self.page3)
self.lblStepWordListTitle.setWordWrap(True)
self.lblStepWordListTitle.setOpenExternalLinks(True)
self.lblStepWordListTitle.setObjectName("lblStepWordListTitle")
self.verticalLayout_7.addWidget(self.lblStepWordListTitle)
self.pages.addWidget(self.page3)
self.page4 = QtWidgets.QWidget()
self.page4.setObjectName("page4")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.page4)
self.verticalLayout_3.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_3.setSpacing(6)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.lblStep1HexEntropy_2 = QtWidgets.QLabel(self.page4)
self.lblStep1HexEntropy_2.setObjectName("lblStep1HexEntropy_2")
self.verticalLayout_3.addWidget(self.lblStep1HexEntropy_2)
self.gridLayout_2 = QtWidgets.QGridLayout()
self.gridLayout_2.setSpacing(6)
self.gridLayout_2.setObjectName("gridLayout_2")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.lblPinMessage = QtWidgets.QLabel(self.page4)
self.lblPinMessage.setText("")
self.lblPinMessage.setWordWrap(False)
self.lblPinMessage.setObjectName("lblPinMessage")
self.horizontalLayout_3.addWidget(self.lblPinMessage)
self.edtPrimaryPIN = QtWidgets.QLineEdit(self.page4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.edtPrimaryPIN.sizePolicy().hasHeightForWidth())
self.edtPrimaryPIN.setSizePolicy(sizePolicy)
self.edtPrimaryPIN.setLayoutDirection(QtCore.Qt.LeftToRight)
self.edtPrimaryPIN.setEchoMode(QtWidgets.QLineEdit.Password)
self.edtPrimaryPIN.setObjectName("edtPrimaryPIN")
self.horizontalLayout_3.addWidget(self.edtPrimaryPIN)
self.btnShowPIN = QtWidgets.QToolButton(self.page4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnShowPIN.sizePolicy().hasHeightForWidth())
self.btnShowPIN.setSizePolicy(sizePolicy)
self.btnShowPIN.setMinimumSize(QtCore.QSize(21, 21))
self.btnShowPIN.setMaximumSize(QtCore.QSize(21, 21))
self.btnShowPIN.setText("")
self.btnShowPIN.setObjectName("btnShowPIN")
self.horizontalLayout_3.addWidget(self.btnShowPIN)
self.edtSecondaryPIN = QtWidgets.QLineEdit(self.page4)
self.edtSecondaryPIN.setEchoMode(QtWidgets.QLineEdit.Password)
self.edtSecondaryPIN.setObjectName("edtSecondaryPIN")
self.horizontalLayout_3.addWidget(self.edtSecondaryPIN)
self.btnShowSecondaryPIN = QtWidgets.QToolButton(self.page4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnShowSecondaryPIN.sizePolicy().hasHeightForWidth())
self.btnShowSecondaryPIN.setSizePolicy(sizePolicy)
self.btnShowSecondaryPIN.setMinimumSize(QtCore.QSize(21, 21))
self.btnShowSecondaryPIN.setMaximumSize(QtCore.QSize(21, 21))
self.btnShowSecondaryPIN.setText("")
self.btnShowSecondaryPIN.setObjectName("btnShowSecondaryPIN")
self.horizontalLayout_3.addWidget(self.btnShowSecondaryPIN)
self.gridLayout_2.addLayout(self.horizontalLayout_3, 2, 1, 1, 1)
self.chbUsePassphrase = QtWidgets.QCheckBox(self.page4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.chbUsePassphrase.sizePolicy().hasHeightForWidth())
self.chbUsePassphrase.setSizePolicy(sizePolicy)
self.chbUsePassphrase.setLayoutDirection(QtCore.Qt.RightToLeft)
self.chbUsePassphrase.setText("Use passphrase")
self.chbUsePassphrase.setObjectName("chbUsePassphrase")
self.gridLayout_2.addWidget(self.chbUsePassphrase, 3, 0, 1, 1)
self.chbUsePIN = QtWidgets.QCheckBox(self.page4)
self.chbUsePIN.setLayoutDirection(QtCore.Qt.RightToLeft)
self.chbUsePIN.setChecked(True)
self.chbUsePIN.setObjectName("chbUsePIN")
self.gridLayout_2.addWidget(self.chbUsePIN, 2, 0, 1, 1)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.edtDeviceLabel = QtWidgets.QLineEdit(self.page4)
self.edtDeviceLabel.setPlaceholderText("")
self.edtDeviceLabel.setObjectName("edtDeviceLabel")
self.horizontalLayout.addWidget(self.edtDeviceLabel)
self.gridLayout_2.addLayout(self.horizontalLayout, 1, 1, 1, 1)
self.lblDeviceLabel = QtWidgets.QLabel(self.page4)
self.lblDeviceLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.lblDeviceLabel.setObjectName("lblDeviceLabel")
self.gridLayout_2.addWidget(self.lblDeviceLabel, 1, 0, 1, 1)
self.horizontalLayout_4 = QtWidgets.QHBoxLayout()
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.lblPassphraseMessage = QtWidgets.QLabel(self.page4)
self.lblPassphraseMessage.setText("")
self.lblPassphraseMessage.setWordWrap(False)
self.lblPassphraseMessage.setObjectName("lblPassphraseMessage")
self.horizontalLayout_4.addWidget(self.lblPassphraseMessage)
self.edtPassphrase = QtWidgets.QLineEdit(self.page4)
self.edtPassphrase.setEchoMode(QtWidgets.QLineEdit.Password)
self.edtPassphrase.setObjectName("edtPassphrase")
self.horizontalLayout_4.addWidget(self.edtPassphrase)
self.btnShowPassphrase = QtWidgets.QToolButton(self.page4)
self.btnShowPassphrase.setMinimumSize(QtCore.QSize(21, 21))
self.btnShowPassphrase.setMaximumSize(QtCore.QSize(21, 21))
self.btnShowPassphrase.setText("")
self.btnShowPassphrase.setObjectName("btnShowPassphrase")
self.horizontalLayout_4.addWidget(self.btnShowPassphrase)
spacerItem3 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_4.addItem(spacerItem3)
self.gridLayout_2.addLayout(self.horizontalLayout_4, 3, 1, 1, 1)
self.lblDeviceWordsInputType = QtWidgets.QLabel(self.page4)
self.lblDeviceWordsInputType.setAlignment(QtCore.Qt.AlignCenter)
self.lblDeviceWordsInputType.setObjectName("lblDeviceWordsInputType")
self.gridLayout_2.addWidget(self.lblDeviceWordsInputType, 0, 0, 1, 1)
self.gbDeviceWordsInputType = QtWidgets.QGroupBox(self.page4)
self.gbDeviceWordsInputType.setObjectName("gbDeviceWordsInputType")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.gbDeviceWordsInputType)
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.rbScrambledWords = QtWidgets.QRadioButton(self.gbDeviceWordsInputType)
self.rbScrambledWords.setLayoutDirection(QtCore.Qt.LeftToRight)
self.rbScrambledWords.setChecked(True)
self.rbScrambledWords.setObjectName("rbScrambledWords")
self.horizontalLayout_2.addWidget(self.rbScrambledWords)
self.rbWordsMatrix = QtWidgets.QRadioButton(self.gbDeviceWordsInputType)
self.rbWordsMatrix.setLayoutDirection(QtCore.Qt.LeftToRight)
self.rbWordsMatrix.setChecked(False)
self.rbWordsMatrix.setObjectName("rbWordsMatrix")
self.horizontalLayout_2.addWidget(self.rbWordsMatrix)
spacerItem4 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem4)
self.gridLayout_2.addWidget(self.gbDeviceWordsInputType, 0, 1, 1, 1)
self.verticalLayout_3.addLayout(self.gridLayout_2)
self.lblOptionsPageMessage = QtWidgets.QLabel(self.page4)
self.lblOptionsPageMessage.setText("")
self.lblOptionsPageMessage.setObjectName("lblOptionsPageMessage")
self.verticalLayout_3.addWidget(self.lblOptionsPageMessage)
spacerItem5 = QtWidgets.QSpacerItem(20, 293, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout_3.addItem(spacerItem5)
self.lblOptionsEntropy = QtWidgets.QLabel(self.page4)
self.lblOptionsEntropy.setStyleSheet("font-size:11px")
self.lblOptionsEntropy.setWordWrap(True)
self.lblOptionsEntropy.setOpenExternalLinks(True)
self.lblOptionsEntropy.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByMouse|QtCore.Qt.TextSelectableByKeyboard|QtCore.Qt.TextSelectableByMouse)
self.lblOptionsEntropy.setObjectName("lblOptionsEntropy")
self.verticalLayout_3.addWidget(self.lblOptionsEntropy)
self.btnPreviewAddresses = QtWidgets.QPushButton(self.page4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnPreviewAddresses.sizePolicy().hasHeightForWidth())
self.btnPreviewAddresses.setSizePolicy(sizePolicy)
self.btnPreviewAddresses.setLayoutDirection(QtCore.Qt.LeftToRight)
self.btnPreviewAddresses.setAutoDefault(False)
self.btnPreviewAddresses.setObjectName("btnPreviewAddresses")
self.verticalLayout_3.addWidget(self.btnPreviewAddresses)
self.pages.addWidget(self.page4)
self.verticalLayout_4.addWidget(self.pages)
self.retranslateUi(WdgRecoverHw)
self.pages.setCurrentIndex(4)
QtCore.QMetaObject.connectSlotsByName(WdgRecoverHw)
def retranslateUi(self, WdgRecoverHw):
_translate = QtCore.QCoreApplication.translate
WdgRecoverHw.setWindowTitle(_translate("WdgRecoverHw", "Form"))
self.label_2.setText(_translate("WdgRecoverHw", "<b>Source of the recovery seed</b>"))
self.rbSeedSourceHwScreen.setText(_translate("WdgRecoverHw", "Recover from seed words using hardware wallet screen (secure)"))
self.rbSeedSourceAppWords.setText(_translate("WdgRecoverHw", "Recover from seed words using in-app editor (convenient but insecure)"))
self.rbSeedSourceAppEntropy.setText(_translate("WdgRecoverHw", "Recover from hexadecimal entropy (insecure)"))
self.lblActionTypeMessage.setText(_translate("WdgRecoverHw", "..."))
self.label.setText(_translate("WdgRecoverHw", "<b>Number of words of the recovery seed</b>"))
self.rbWordsCount24.setText(_translate("WdgRecoverHw", "24"))
self.rbWordsCount18.setText(_translate("WdgRecoverHw", "18"))
self.rbWordsCount12.setText(_translate("WdgRecoverHw", "12"))
self.lblStep1HexEntropy.setText(_translate("WdgRecoverHw", "<b>Enter the hexadecimal entropy of the recovery seed</b>"))
self.edtHexEntropy.setPlaceholderText(_translate("WdgRecoverHw", "32/24/16-byte hexadecimal string"))
self.lblStepWordListTitle.setText(_translate("WdgRecoverHw", "<b>Enter the words of your recovery seed</b>"))
self.lblStep1HexEntropy_2.setText(_translate("WdgRecoverHw", "<b>Tune hardware wallet options as needed</b>"))
self.edtPrimaryPIN.setPlaceholderText(_translate("WdgRecoverHw", "PIN"))
self.btnShowPIN.setToolTip(_translate("WdgRecoverHw", "Show PIN"))
self.edtSecondaryPIN.setToolTip(_translate("WdgRecoverHw", "<html><head/><body><p>This PIN will be used to activate passphrase saved in your Ledger Nano S.</p></body></html>"))
self.edtSecondaryPIN.setPlaceholderText(_translate("WdgRecoverHw", "Secondary PIN"))
self.btnShowSecondaryPIN.setToolTip(_translate("WdgRecoverHw", "Show secondary PIN"))
self.chbUsePassphrase.setWhatsThis(_translate("WdgRecoverHw", "<html><head/><body><p>Check the link attached <a href="dash.org">dash.org</a></p></body></html>"))
self.chbUsePIN.setText(_translate("WdgRecoverHw", "Use PIN"))
self.lblDeviceLabel.setText(_translate("WdgRecoverHw", "Device label"))
self.edtPassphrase.setToolTip(_translate("WdgRecoverHw", "<html><head/><body><p>This passphrase (if used) will be saved in your Ledger Nano S device and will be secured with the secondary PIN .</p></body></html>"))
self.edtPassphrase.setPlaceholderText(_translate("WdgRecoverHw", "Passphrase"))
self.btnShowPassphrase.setToolTip(_translate("WdgRecoverHw", "Show passphrase"))
self.lblDeviceWordsInputType.setText(_translate("WdgRecoverHw", "Tnput type on device"))
self.rbScrambledWords.setText(_translate("WdgRecoverHw", "Scrambled words"))
self.rbWordsMatrix.setText(_translate("WdgRecoverHw", "Word matrix"))
self.lblOptionsEntropy.setText(_translate("WdgRecoverHw", "Entropy:"))
self.btnPreviewAddresses.setText(_translate("WdgRecoverHw", "Show preview"))
| 64.016181
| 222
| 0.745008
| 19,443
| 0.982913
| 0
| 0
| 0
| 0
| 0
| 0
| 2,711
| 0.137051
|
c47ed8028e53c0742399199be9ea4ca791d59010
| 1,108
|
py
|
Python
|
datahandler/analyser.py
|
ameliecordier/IIK
|
57b40d6b851a1c2369604049d1820e5b572c6227
|
[
"MIT"
] | null | null | null |
datahandler/analyser.py
|
ameliecordier/IIK
|
57b40d6b851a1c2369604049d1820e5b572c6227
|
[
"MIT"
] | null | null | null |
datahandler/analyser.py
|
ameliecordier/IIK
|
57b40d6b851a1c2369604049d1820e5b572c6227
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import csv
from matplotlib import pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
def isValid(p, ep):
return p in ep.patterns
# CLASS ANALYSER
class Analyser:
"""
Représentation d'un résultat d'analyse
"""
def __init__(self):
"""
:param results: contient les résultats de l'analyse
"""
self.results = []
def addResult(self, result):
"""
Ajoute une liste de résultats à l'ensemble des résultats
:param result: la ligne de résultats
:return: None
"""
self.results.append(result)
def __str__(self):
"""
Affichage des résultats sur la sortie standard
"""
return "Résultats : %r" % self.results
def toFile(self, filename):
with open(filename, "w") as outfile:
fieldnames = ['idxExpert', 'idxMining', 'pattern expert', 'pattern mining' , 'full pattern']
w = csv.DictWriter(outfile, delimiter=";", fieldnames=fieldnames)
w.writeheader()
w.writerows(self.results)
| 25.181818
| 104
| 0.598375
| 921
| 0.82453
| 0
| 0
| 0
| 0
| 0
| 0
| 484
| 0.433303
|
c47ef70151ad606b1f9596045a1960c4c4dec6a6
| 1,948
|
py
|
Python
|
binary_trees/next_right.py
|
xxaxdxcxx/miscellaneous-code
|
cdb88783f39e1b9a89fdb12f7cddfe62619e4357
|
[
"MIT"
] | null | null | null |
binary_trees/next_right.py
|
xxaxdxcxx/miscellaneous-code
|
cdb88783f39e1b9a89fdb12f7cddfe62619e4357
|
[
"MIT"
] | null | null | null |
binary_trees/next_right.py
|
xxaxdxcxx/miscellaneous-code
|
cdb88783f39e1b9a89fdb12f7cddfe62619e4357
|
[
"MIT"
] | null | null | null |
# Definition for binary tree with next pointer.
class TreeLinkNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
self.next = None
class Solution:
# @param root, a tree link node
# @return nothing
def connect(self, root):
node = root
current = None
candidate = None
next_start = None
if node is None:
return
while node is not None:
# loop through nodes in this level, assigning nexts
# assumption: previous level (node's level)
# has all nexts assigned correctly
# assign left's next to right if applicable
if node.left is not None:
# tells loop where to start for next level
if next_start is None:
next_start = node.left
if node.right is not None:
node.left.next = node.right
current = node.right
else:
current = node.left
else:
if node.right is not None:
if next_start is None:
next_start = node.right
current = node.right
else:
node = node.next
continue
while candidate is None:
node = node.next
if node is None:
break
if node.left is None:
if node.right is None:
continue
else:
candidate = node.right
else:
candidate = node.left
current.next = candidate
candidate = None
# end of inner loop, through nodes in a level
if node is None:
node = next_start
next_start = None
| 31.419355
| 63
| 0.464579
| 1,896
| 0.973306
| 0
| 0
| 0
| 0
| 0
| 0
| 353
| 0.181211
|
c47f26765a0cb339776a2ad95fc385826831ad79
| 982
|
py
|
Python
|
6.all_species/species_data/merge_species_data.py
|
oaxiom/episcan
|
b6616536d621ff02b92a7678f80b5bfbd38c6dc8
|
[
"MIT"
] | null | null | null |
6.all_species/species_data/merge_species_data.py
|
oaxiom/episcan
|
b6616536d621ff02b92a7678f80b5bfbd38c6dc8
|
[
"MIT"
] | null | null | null |
6.all_species/species_data/merge_species_data.py
|
oaxiom/episcan
|
b6616536d621ff02b92a7678f80b5bfbd38c6dc8
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import sys, os, glob
from glbase3 import *
all_species = glload('species_annotations/species.glb')
newl = []
for file in glob.glob('pep_counts/*.txt'):
oh = open(file, 'rt')
count = int(oh.readline().split()[0])
oh.close()
species_name = os.path.split(file)[1].split('.')[0].lower() # seems a simple rule
assembly_name = os.path.split(file)[1].replace('.txt', '')
if count < 5000:
continue
newl.append({'species': species_name, 'assembly_name': assembly_name, 'num_pep': count})
pep_counts = genelist()
pep_counts.load_list(newl)
all_species = all_species.map(genelist=pep_counts, key='species')
all_species = all_species.removeDuplicates('name')
print(all_species)
all_species = all_species.getColumns(['name', 'species', 'division' ,'num_pep', 'assembly_name'])
all_species.sort('name')
all_species.saveTSV('all_species.tsv')
all_species.save('all_species.glb')
# and add the peptide counts for all species
| 25.179487
| 97
| 0.701629
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 290
| 0.295316
|
c481812f6f75096a79bbca57dd3f97e48ea22078
| 3,845
|
py
|
Python
|
modules/lex_managers/lex_intent_manager.py
|
adamhamden/lex-bot
|
3c21b8d60607950c707b97ff5ba8491d40e31592
|
[
"MIT"
] | null | null | null |
modules/lex_managers/lex_intent_manager.py
|
adamhamden/lex-bot
|
3c21b8d60607950c707b97ff5ba8491d40e31592
|
[
"MIT"
] | null | null | null |
modules/lex_managers/lex_intent_manager.py
|
adamhamden/lex-bot
|
3c21b8d60607950c707b97ff5ba8491d40e31592
|
[
"MIT"
] | null | null | null |
import boto3
from prettytable import PrettyTable
class LexIntentManager:
def __init__(self):
self.client = boto3.client('lex-models')
def create_new_intent(self, intent_name, description="n/a", sample_utterances=[], slot_types=[]):
intent_info = {'name': intent_name,
'description': description,
'sampleUtterances': sample_utterances,
'fulfillmentActivity': {'type':'ReturnIntent'}}
try:
response = self.client.get_slot_type(name=intent_name, version='$LATEST')
intent_info['checksum'] = response['checksum']
except self.client.exceptions.NotFoundException:
pass
slots_info = self._slot_type_constructor(slot_types)
intent_info['slots'] = slots_info
self.client.put_intent(**intent_info)
print("Successfully created intent {}".format(intent_name))
def get_intent_list(self):
response = self.client.get_intents()
intent_list = []
for intent in response['intents']:
intent_list.append(intent['name'])
def print_intents(self):
response = self.client.get_intents()
table = PrettyTable()
table.field_names = ['intent_name', 'description', 'version']
for intent in response['intents']:
try:
table.add_row([intent['name'], intent['description'], intent['version']])
except KeyError:
table.add_row([intent['name'], "n/a", intent['version']])
print(table)
@staticmethod
def _slot_type_constructor(slot_types):
slots_info = []
for slot_type in slot_types:
slot_name = "sample_" + slot_type
slot_required = input("Will the slot {} be required [Required / Optional]: ".format(slot_type))
slot_version = '$LATEST'
slot_prompt = str(input("Provide an elicitation prompt for slot {}: ".format(slot_type)))
slot_max_attempts = int(input("What is the max attempts to allow when filling slot {}: ".format(slot_type)))
slot_sample_utterances = []
while True:
slot_sample_utterances.append(
str(input("Please enter a sample utterance for slot {}: ".format(slot_type))).replace("this",
"{" + slot_name + "}"))
if input("Would you like to add another utterance [True / False]: ") == "False":
break
print("{} - req: {} - prompt: {} - max_attempt: {} - sampleUtterances {}".format(slot_type, slot_required,
slot_prompt,
slot_max_attempts,
slot_sample_utterances))
slot_info = {'name': slot_name,
'slotConstraint': slot_required,
'slotType': slot_type,
'slotTypeVersion': slot_version,
'valueElicitationPrompt': {
'messages': [
{
'contentType': 'PlainText',
'content': slot_prompt,
},
],
'maxAttempts': slot_max_attempts,
},
'sampleUtterances': slot_sample_utterances
}
slots_info.append(slot_info)
return slots_info
| 39.639175
| 129
| 0.490507
| 3,792
| 0.986216
| 0
| 0
| 2,255
| 0.586476
| 0
| 0
| 783
| 0.203641
|
c4819144b63cb938bdc3a631c3adcbd846e22f52
| 80
|
py
|
Python
|
src/__init__.py
|
Victorpc98/CE888-Project
|
99c20adc78eb53ac4d3c87543ef8da1ef4d10adc
|
[
"MIT"
] | 1
|
2020-04-18T21:03:28.000Z
|
2020-04-18T21:03:28.000Z
|
src/__init__.py
|
Victorpc98/CE888-Project
|
99c20adc78eb53ac4d3c87543ef8da1ef4d10adc
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
Victorpc98/CE888-Project
|
99c20adc78eb53ac4d3c87543ef8da1ef4d10adc
|
[
"MIT"
] | null | null | null |
import sys
sys.path.append("..") # Adds higher directory to python modules path.
| 40
| 69
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 51
| 0.6375
|
c4821b9a95d728a178a666ea50065578f645972b
| 7,025
|
py
|
Python
|
wxtbx/wx4_compatibility.py
|
dperl-sol/cctbx_project
|
b9e390221a2bc4fd00b9122e97c3b79c632c6664
|
[
"BSD-3-Clause-LBNL"
] | 155
|
2016-11-23T12:52:16.000Z
|
2022-03-31T15:35:44.000Z
|
wxtbx/wx4_compatibility.py
|
dperl-sol/cctbx_project
|
b9e390221a2bc4fd00b9122e97c3b79c632c6664
|
[
"BSD-3-Clause-LBNL"
] | 590
|
2016-12-10T11:31:18.000Z
|
2022-03-30T23:10:09.000Z
|
wxtbx/wx4_compatibility.py
|
dperl-sol/cctbx_project
|
b9e390221a2bc4fd00b9122e97c3b79c632c6664
|
[
"BSD-3-Clause-LBNL"
] | 115
|
2016-11-15T08:17:28.000Z
|
2022-02-09T15:30:14.000Z
|
from __future__ import absolute_import, division, print_function
'''
Author : Lyubimov, A.Y.
Created : 04/14/2014
Last Changed: 11/05/2018
Description : wxPython 3-4 compatibility tools
The context managers, classes, and other tools below can be used to make the
GUI code compatible with wxPython 3 and 4. Mostly, the tools convert the
functions, enumerations, and classes which have been renamed in wxPython 4;
the name mismatches result in exceptions.
Use case 1: subclassing wx.PyControl or wx.Control:
from wxtbx import wx4_compatibility as wx4c
WxCtrl = wx4c.get_wx_mod(wx, wx.Control)
class MyCustomControl(WxCtrl): ...
Use case 2: brush style (NOTE: you can do that with fonts as well, but it
doesn't seem to be necessary):
from wxtbx import wx4_compatibility as wx4c
bkgrd = self.GetBackgroundColour()
with wx4c.set_brush_style(wx.BRUSHSTYLE_SOLID) as bstyle:
brush = wx.Brush(bkgrd, bstyle)
Use case 3: Toolbars
from wxtbx import wx4_compatibility as wx4c, bitmaps
class MyFrame(wx.Frame):
def __init__(self, parent, id, title, *args, **kwargs):
wx.Frame.__init__(self, parent, id, title, *args, **kwargs)
self.toolbar = wx4c.ToolBar(self, style=wx.TB_TEXT)
self.quit_button = self.toolbar.AddTool(toolId=wx.ID_ANY,
label='Quit',
kind=wx.ITEM_NORMAL,
bitmap=bitmaps.fetch_icon_bitmap('actions', 'exit')
shortHelp='Exit program')
...
self.SetToolBar(self.toolbar)
self.toolbar.Realize()
'''
import wx
from contextlib import contextmanager
import importlib
wx4 = wx.__version__[0] == '4'
modnames = [
('PyControl', 'Control'),
('PyDataObjectSimple', 'DataObjectSimple'),
('PyDropTarget', 'DropTarget'),
('PyEvtHandler', 'EvtHandler'),
('PyImageHandler', 'ImageHandler'),
('PyLocale', 'Locale'),
('PyLog', 'Log'),
('PyPanel', 'Panel'),
('PyPickerBase', 'PickerBase'),
('PyPreviewControlBar', 'PreviewControlBar'),
('PyPreviewFrame', 'PreviewFrame'),
('PyPrintPreview', 'PrintPreview'),
('PyScrolledWindow', 'ScrolledWindow'),
('PySimpleApp', 'App'),
('PyTextDataObject', 'TextDataObject'),
('PyTimer', 'Timer'),
('PyTipProvider', 'adv.TipProvider'),
('PyValidator', 'Validator'),
('PyWindow'', Window')
]
font_families = [
(wx.DEFAULT, wx.FONTFAMILY_DEFAULT),
(wx.DECORATIVE, wx.FONTFAMILY_DECORATIVE),
(wx.ROMAN, wx.FONTFAMILY_ROMAN),
(wx.SCRIPT, wx.FONTFAMILY_SCRIPT),
(wx.SWISS, wx.FONTFAMILY_SWISS),
(wx.MODERN, wx.FONTFAMILY_MODERN),
(wx.TELETYPE, wx.FONTFAMILY_TELETYPE)
]
font_weights = [
(wx.NORMAL, wx.FONTWEIGHT_NORMAL),
(wx.LIGHT, wx.FONTWEIGHT_LIGHT),
(wx.BOLD, wx.FONTWEIGHT_BOLD)
]
font_styles = [
(wx.NORMAL, wx.FONTSTYLE_NORMAL),
(wx.ITALIC, wx.FONTSTYLE_ITALIC),
(wx.SLANT, wx.FONTSTYLE_SLANT)
]
pen_styles = [
(wx.SOLID, wx.PENSTYLE_SOLID),
(wx.DOT, wx.PENSTYLE_DOT),
(wx.LONG_DASH, wx.PENSTYLE_LONG_DASH),
(wx.SHORT_DASH, wx.PENSTYLE_SHORT_DASH),
(wx.DOT_DASH, wx.PENSTYLE_DOT_DASH),
(wx.USER_DASH, wx.PENSTYLE_USER_DASH),
(wx.TRANSPARENT, wx.PENSTYLE_TRANSPARENT)
]
brush_styles = [
(wx.SOLID, wx.BRUSHSTYLE_SOLID),
(wx.TRANSPARENT, wx.BRUSHSTYLE_TRANSPARENT),
(wx.STIPPLE_MASK_OPAQUE, wx.BRUSHSTYLE_STIPPLE_MASK_OPAQUE),
(wx.STIPPLE_MASK, wx.BRUSHSTYLE_STIPPLE_MASK),
(wx.STIPPLE, wx.BRUSHSTYLE_STIPPLE),
(wx.BDIAGONAL_HATCH, wx.BRUSHSTYLE_BDIAGONAL_HATCH),
(wx.CROSSDIAG_HATCH, wx.BRUSHSTYLE_CROSSDIAG_HATCH),
(wx.FDIAGONAL_HATCH, wx.BRUSHSTYLE_FDIAGONAL_HATCH),
(wx.CROSS_HATCH, wx.BRUSHSTYLE_CROSS_HATCH),
(wx.HORIZONTAL_HATCH, wx.BRUSHSTYLE_HORIZONTAL_HATCH),
(wx.VERTICAL_HATCH, wx.BRUSHSTYLE_VERTICAL_HATCH),
]
def find_module(module):
for m in modnames:
if module.__name__ in m:
return m
def find_enum(enums, item):
for en in enums:
if item in en:
value = en[1] if wx4 else en[0]
return value
def get_wx_mod(base, module):
mname = find_module(module)[1] if wx4 else find_module(module)[0]
bname = base.__name__
if '.' in mname:
spl = [i for i in mname.split('.') if i != bname]
modname = '.'.join(spl[:-1])
mod = importlib.import_module('{}.{}'.format(bname, modname))
return getattr(mod, spl[-1])
else:
return getattr(base, mname)
@contextmanager
def wx_mod(base, module):
''' Identify and import the appropriate wxPython module '''
yield get_wx_mod(base, module)
@contextmanager
def set_font_style(style):
yield find_enum(font_styles, style)
@contextmanager
def set_font_weight(weight):
yield find_enum(font_weights, weight)
@contextmanager
def set_font_family(family):
yield find_enum(font_families, family)
@contextmanager
def set_pen_style(style):
yield find_enum(pen_styles, style)
@contextmanager
def set_brush_style(style):
yield find_enum(brush_styles, style)
@contextmanager
def create_measuring_context():
dc = wx.GraphicsContext.Create() if wx4 else \
wx.GraphicsContext.CreateMeasuringContext()
yield dc
class Wx3ToolBar(wx.ToolBar):
''' Special toolbar class that accepts wxPython 4-style AddTool command and
converts it to a wxPython 3-style AddLabelTool command '''
def __init__(self, parent, id=wx.ID_ANY, pos=wx.DefaultPosition,
size=wx.DefaultSize, style=wx.TB_HORIZONTAL, name='toolbar'):
wx.ToolBar.__init__(self, parent, id, pos, size, style, name)
def AddTool(self, toolId, label, bitmap, bmpDisabled=wx.NullBitmap,
kind=wx.ITEM_NORMAL, shortHelp='', longHelp='',
clientData=None):
''' Override to make this a very thin wrapper for AddLabelTool, which in
wxPython 3 is the same as AddTool in wxPython 4 '''
return self.AddLabelTool(id=toolId, label=label, bitmap=bitmap,
bmpDisabled=bmpDisabled, kind=kind,
shortHelp=shortHelp, longHelp=longHelp,
clientData=clientData)
class Wx4ToolBar(wx.ToolBar):
''' Special toolbar class that accepts wxPython 3-style AddLabelTool command
and converts it to a wxPython 4-style AddTool command '''
def __init__(self, parent, id=wx.ID_ANY, pos=wx.DefaultPosition,
size=wx.DefaultSize, style=wx.TB_HORIZONTAL, name='toolbar'):
wx.ToolBar.__init__(self, parent, id, pos, size, style, name)
def AddLabelTool(self, id, label, bitmap, bmpDisabled=wx.NullBitmap,
kind=wx.ITEM_NORMAL, shortHelp='', longHelp='',
clientData=None):
''' Override to make this a very thin wrapper for AddTool, which in
wxPython 4 is the same as AddLabelTool in wxPython 3 '''
return self.AddTool(toolId=id, label=label, bitmap=bitmap,
bmpDisabled=bmpDisabled, kind=kind,
shortHelp=shortHelp, longHelp=longHelp,
clientData=clientData)
# Use this ToolBar class to create toolbars in frames
ToolBar = Wx4ToolBar if wx4 else Wx3ToolBar
| 32.981221
| 96
| 0.691103
| 1,841
| 0.262064
| 588
| 0.083701
| 700
| 0.099644
| 0
| 0
| 2,739
| 0.389893
|
c483b92cbfbdabe1b45008c539e6179a5bd43a9f
| 1,548
|
py
|
Python
|
BMVC_version/utils.py
|
ZhengyuZhao/ACE
|
5065cde807fe689115849c55d440783d8a471901
|
[
"MIT"
] | 19
|
2020-05-13T07:51:00.000Z
|
2021-06-13T11:03:47.000Z
|
BMVC_version/utils.py
|
ZhengyuZhao/AdvCF
|
5065cde807fe689115849c55d440783d8a471901
|
[
"MIT"
] | 1
|
2020-09-09T09:39:28.000Z
|
2020-09-10T20:30:02.000Z
|
BMVC_version/utils.py
|
ZhengyuZhao/AdvCF
|
5065cde807fe689115849c55d440783d8a471901
|
[
"MIT"
] | 3
|
2020-09-05T11:32:23.000Z
|
2021-03-30T01:41:07.000Z
|
import torch
import torch.nn as nn
import csv
#image quantization
def quantization(x):
x_quan=torch.round(x*255)/255
return x_quan
#picecwise-linear color filter
def CF(img, param,pieces):
param=param[:,:,None,None]
color_curve_sum = torch.sum(param, 4) + 1e-30
total_image = img * 0
for i in range(pieces):
total_image += torch.clamp(img - 1.0 * i /pieces, 0, 1.0 / pieces) * param[:, :, :, :, i]
total_image *= pieces/ color_curve_sum
return total_image
#parsing the data annotation
def load_ground_truth(csv_filename):
image_id_list = []
label_ori_list = []
label_tar_list = []
with open(csv_filename) as csvfile:
reader = csv.DictReader(csvfile, delimiter=',')
for row in reader:
image_id_list.append( row['ImageId'] )
label_ori_list.append( int(row['TrueLabel']) )
label_tar_list.append( int(row['TargetClass']) )
return image_id_list,label_ori_list,label_tar_list
# simple Module to normalize an image
class Normalize(nn.Module):
def __init__(self, mean, std):
super(Normalize, self).__init__()
self.mean = torch.Tensor(mean)
self.std = torch.Tensor(std)
def forward(self, x):
return (x - self.mean.type_as(x)[None,:,None,None]) / self.std.type_as(x)[None,:,None,None]
# values are standard normalization for ImageNet images,
# from https://github.com/pytorch/examples/blob/master/imagenet/main.py
norm = Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
| 29.769231
| 99
| 0.660207
| 306
| 0.197674
| 0
| 0
| 0
| 0
| 0
| 0
| 278
| 0.179587
|
c4844ed8e45f32c88606465081cf2391a8999d1d
| 4,849
|
py
|
Python
|
lemonpie/_nbdev.py
|
corazonlabs/ehr_preprocessing
|
5bf3be1f04d9dc6db002b58331800b30cf668e69
|
[
"Apache-2.0"
] | 3
|
2021-04-03T01:16:18.000Z
|
2021-07-31T20:44:47.000Z
|
lemonpie/_nbdev.py
|
corazonlabs/ehr_preprocessing
|
5bf3be1f04d9dc6db002b58331800b30cf668e69
|
[
"Apache-2.0"
] | 5
|
2021-03-30T21:23:47.000Z
|
2022-02-26T10:17:12.000Z
|
lemonpie/_nbdev.py
|
vin00d/lemonpie
|
5bf3be1f04d9dc6db002b58331800b30cf668e69
|
[
"Apache-2.0"
] | 1
|
2020-11-26T00:35:28.000Z
|
2020-11-26T00:35:28.000Z
|
# AUTOGENERATED BY NBDEV! DO NOT EDIT!
__all__ = ["index", "modules", "custom_doc_links", "git_url"]
index = {"get_device": "00_basics.ipynb",
"settings_template": "00_basics.ipynb",
"read_settings": "00_basics.ipynb",
"DEVICE": "00_basics.ipynb",
"settings": "00_basics.ipynb",
"DATA_STORE": "00_basics.ipynb",
"LOG_STORE": "00_basics.ipynb",
"MODEL_STORE": "00_basics.ipynb",
"EXPERIMENT_STORE": "00_basics.ipynb",
"PATH_1K": "00_basics.ipynb",
"PATH_10K": "00_basics.ipynb",
"PATH_20K": "00_basics.ipynb",
"PATH_100K": "00_basics.ipynb",
"FILENAMES": "00_basics.ipynb",
"SYNTHEA_DATAGEN_DATES": "00_basics.ipynb",
"CONDITIONS": "00_basics.ipynb",
"LOG_NUMERICALIZE_EXCEP": "00_basics.ipynb",
"read_raw_ehrdata": "01_preprocessing_clean.ipynb",
"split_patients": "01_preprocessing_clean.ipynb",
"split_ehr_dataset": "01_preprocessing_clean.ipynb",
"cleanup_pts": "01_preprocessing_clean.ipynb",
"cleanup_obs": "01_preprocessing_clean.ipynb",
"cleanup_algs": "01_preprocessing_clean.ipynb",
"cleanup_crpls": "01_preprocessing_clean.ipynb",
"cleanup_meds": "01_preprocessing_clean.ipynb",
"cleanup_img": "01_preprocessing_clean.ipynb",
"cleanup_procs": "01_preprocessing_clean.ipynb",
"cleanup_cnds": "01_preprocessing_clean.ipynb",
"cleanup_immns": "01_preprocessing_clean.ipynb",
"cleanup_dataset": "01_preprocessing_clean.ipynb",
"extract_ys": "01_preprocessing_clean.ipynb",
"insert_age": "01_preprocessing_clean.ipynb",
"clean_raw_ehrdata": "01_preprocessing_clean.ipynb",
"load_cleaned_ehrdata": "01_preprocessing_clean.ipynb",
"load_ehr_vocabcodes": "01_preprocessing_clean.ipynb",
"EhrVocab": "02_preprocessing_vocab.ipynb",
"ObsVocab": "02_preprocessing_vocab.ipynb",
"EhrVocabList": "02_preprocessing_vocab.ipynb",
"get_all_emb_dims": "02_preprocessing_vocab.ipynb",
"collate_codes_offsts": "03_preprocessing_transform.ipynb",
"get_codenums_offsts": "03_preprocessing_transform.ipynb",
"get_demographics": "03_preprocessing_transform.ipynb",
"Patient": "03_preprocessing_transform.ipynb",
"get_pckl_dir": "03_preprocessing_transform.ipynb",
"PatientList": "03_preprocessing_transform.ipynb",
"cpu_cnt": "03_preprocessing_transform.ipynb",
"create_all_ptlists": "03_preprocessing_transform.ipynb",
"preprocess_ehr_dataset": "03_preprocessing_transform.ipynb",
"EHRDataSplits": "04_data.ipynb",
"LabelEHRData": "04_data.ipynb",
"EHRDataset": "04_data.ipynb",
"EHRData": "04_data.ipynb",
"accuracy": "05_metrics.ipynb",
"null_accuracy": "05_metrics.ipynb",
"ROC": "05_metrics.ipynb",
"MultiLabelROC": "05_metrics.ipynb",
"plot_rocs": "05_metrics.ipynb",
"plot_train_valid_rocs": "05_metrics.ipynb",
"auroc_score": "05_metrics.ipynb",
"auroc_ci": "05_metrics.ipynb",
"save_to_checkpoint": "06_learn.ipynb",
"load_from_checkpoint": "06_learn.ipynb",
"get_loss_fn": "06_learn.ipynb",
"RunHistory": "06_learn.ipynb",
"train": "06_learn.ipynb",
"evaluate": "06_learn.ipynb",
"fit": "06_learn.ipynb",
"predict": "06_learn.ipynb",
"plot_loss": "06_learn.ipynb",
"plot_losses": "06_learn.ipynb",
"plot_aurocs": "06_learn.ipynb",
"plot_train_valid_aurocs": "06_learn.ipynb",
"plot_fit_results": "06_learn.ipynb",
"summarize_prediction": "06_learn.ipynb",
"count_parameters": "06_learn.ipynb",
"dropout_mask": "07_models.ipynb",
"InputDropout": "07_models.ipynb",
"linear_layer": "07_models.ipynb",
"create_linear_layers": "07_models.ipynb",
"init_lstm": "07_models.ipynb",
"EHR_LSTM": "07_models.ipynb",
"init_cnn": "07_models.ipynb",
"conv_layer": "07_models.ipynb",
"EHR_CNN": "07_models.ipynb",
"get_data": "08_experiment.ipynb",
"get_optimizer": "08_experiment.ipynb",
"get_model": "08_experiment.ipynb",
"Experiment": "08_experiment.ipynb"}
modules = ["basics.py",
"preprocessing/clean.py",
"preprocessing/vocab.py",
"preprocessing/transform.py",
"data.py",
"metrics.py",
"learn.py",
"models.py",
"experiment.py"]
doc_url = "https://corazonlabs.github.io/lemonpie/"
git_url = "https://github.com/corazonlabs/lemonpie/tree/main/"
def custom_doc_links(name): return None
| 44.486239
| 70
| 0.630852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3,500
| 0.721798
|
c4847cc6bababbdf22257962d4c32b15d776c5ed
| 8,277
|
py
|
Python
|
tensorboard/plugins/graph_edit/c2graph_util.py
|
qzhong0605/tensorboardplugins
|
92bfc7ca96b933cdbdf074a08f26f5c715d8421d
|
[
"Apache-2.0"
] | null | null | null |
tensorboard/plugins/graph_edit/c2graph_util.py
|
qzhong0605/tensorboardplugins
|
92bfc7ca96b933cdbdf074a08f26f5c715d8421d
|
[
"Apache-2.0"
] | null | null | null |
tensorboard/plugins/graph_edit/c2graph_util.py
|
qzhong0605/tensorboardplugins
|
92bfc7ca96b933cdbdf074a08f26f5c715d8421d
|
[
"Apache-2.0"
] | null | null | null |
# Convert the caffe2 model into tensorboard GraphDef
#
# The details of caffe2 model is on the compat/proto/caffe2/caffe2.proto
# And the details of GraphDef model is on the compat/proto/graph.proto
#
################################################################################
from tensorboard.compat.proto import graph_pb2
from tensorboard.compat.proto import attr_value_pb2
from tensorboard.compat.proto import node_def_pb2
from tensorboard.compat.proto import tensor_shape_pb2
from tensorboard.compat.proto import tensor_pb2
from tensorboard.compat.proto import types_pb2
from tensorboard.compat.proto.caffe2 import caffe2_pb2
from tensorboard.util import tb_logging
from tensorboard.plugins.graph_edit import tbgraph_base
from google.protobuf import text_format
logger = tb_logging.get_logger()
class C2Graph(tbgraph_base.TBGraph):
""" In order to visualize the caffe2 model graph, it converts the caffe2
format model graph into the tensoboard-format model graph.
The information about caffe2 model is on the proto
`compat/proto/caffe2/caffe2.proto`. And the tensorboard model is on the
proto `compat/proto/graph.proto`
In order to avoid the same tensor name and they are built from the different
operators, we adopt the SSA form, which is used to differentiate different tensor
"""
def __init__(self, predict_net, init_net, predict_net_type="pb"):
super(C2Graph, self).__init__()
self._predict_net = caffe2_pb2.NetDef()
if predict_net_type == "pb":
with open(predict_net, "rb") as predict_stream:
self._predict_net.ParseFromString(predict_stream.read())
logger.info("parse caffe2 predict net {} with protobuf format".format(predict_net))
elif predict_net_type == "txt":
with open(predict_net, "r") as predict_stream:
text_format.Parse(predict_stream.read(), self._predict_net)
logger.info("parse caffe2 predict net {} with text format".format(predict_net))
else:
raise NotImplementedError("The predict net type: {} doesn't support".format(predict_net_type))
self._init_net = caffe2_pb2.NetDef()
with open(init_net, "rb") as init_stream:
self._init_net.ParseFromString(init_stream.read())
logger.info("load caffe2 init net {} with protobuf format".format(init_net))
# a map from node key to node, where the node key is globaly unique
self.nodes = {}
# a map from caffe2 operator to output, which is a SSA-format
self.c2_op_out = {}
# record the blob version for inplace-change
self.blob_version = {}
# a map from node name to shape info
self.shapes = {}
# a map from node name to dtype
self.types = {}
def _build_nodes_shapetype(self):
""" Build an inner node shape information given the weights information for network """
# add shape information
if self._init_net is None:
return
for init_op in self._init_net.op:
for init_arg in init_op.arg:
if init_arg.name == "shape":
self.shapes[init_op.output[0]] = init_arg.ints
elif init_arg.name == "values":
if len(init_arg.floats):
self.types[init_op.output[0]] = types_pb2.DT_FLOAT
elif len(init_arg.ints):
self.types[init_op.output[0]] = types_pb2.DT_INT64
elif len(init_arg.strings):
self.types[init_op.output[0]] = types_pb2.DT_STRING
else:
raise NotImplementedError("Not Supported Field: {}".format(init_arg))
def _add_node_shapetype(self, node, shape_name):
""" build an internal node shape map if given the weights information """
if shape_name in self.shapes:
tensor_shape = tensor_shape_pb2.TensorShapeProto()
for shape_i in self.shapes[shape_name]:
shape_dim = tensor_shape_pb2.TensorShapeProto.Dim()
shape_dim.size = shape_i
tensor_shape.dim.extend([shape_dim])
attr_value = attr_value_pb2.AttrValue()
attr_value.shape.CopyFrom(tensor_shape)
node.attr['shape'].CopyFrom(attr_value)
# add optional dtype
if shape_name in self.types:
attr_value = attr_value_pb2.AttrValue()
attr_value.type = self.types[shape_name]
node.attr['dtype'].CopyFrom(attr_value)
def _MakeSSAName(self, name):
""" It's used to make a unique name through a ssa-based format for `name`
"""
if name not in self.blob_version:
self.blob_version[name] = 0
else:
self.blob_version[name] += 1
ret_name = "{}_{}".format(name, self.blob_version[name])
return ret_name
def convert_to_nodes(self, c2_op):
""" Convert a caffe2 OperatorDef into TB nodes
The nodes for TensorBoard have only inputs and don't have outputs. Therefore
a caffe2 operator maybe converted into muliple nodes
Arg:
c2_op: a caffe2 OperatorDef
"""
new_node = node_def_pb2.NodeDef()
new_node.op = c2_op.type
for c2_input in c2_op.input:
if c2_input not in self.blob_version:
# These inputs are weights or input data for current
# tensorboard node. Therefore, the `op` is set to
# `Initialization`
in_node = node_def_pb2.NodeDef()
self._add_node_shapetype(in_node, c2_input)
self.blob_version[c2_input] = 0
in_node.name = '{}_{}'.format(c2_input, self.blob_version[c2_input])
in_node.op = "Initialization"
self.nodes["{}_{}".format(c2_input, 0)] = in_node
self._tb_graph.node.extend([in_node])
new_node.input.append('{}_{}'.format(c2_input, self.blob_version[c2_input]))
if len(c2_op.output) == 0:
# There are no outputs for current C2 operator. Therefore, the node
# name is set to C2 operation type
new_node.name = self._MakeSSAName(c2_op.type)
else:
new_node.name = self._MakeSSAName(c2_op.output[0])
# If more than one output, we build `Sibling` tensorboard node for
# other outpouts
for c2_output in c2_op.output[1:]:
sibling_node = node_def_pb2.NodeDef()
sibling_node.op = 'Sibling'
sibling_node.name = self._MakeSSAName(c2_output)
sibling_node.input.extend([new_node.name])
self._add_node_shapetype(sibling_node, c2_output)
self.nodes[sibling_node.name] = sibling_node
self._tb_graph.node.extend([sibling_node])
# add argument
for c2_arg in c2_op.arg:
attr = attr_value_pb2.AttrValue()
if c2_arg.HasField('i'):
attr.i = c2_arg.i
elif c2_arg.HasField('f'):
attr.f = c2_arg.f
elif c2_arg.HasField('s'):
attr.s = c2_arg.s
elif len(c2_arg.floats):
list_value = attr_value_pb2.AttrValue.ListValue()
list_value.f.extend(c2_args.floats)
attr.list = list_value
elif len(c2_arg.ints):
list_value = attr_value_pb2.AttrValue.ListValue()
list_value.i.extend(c2_arg.ints)
attr.list.CopyFrom(list_value)
elif len(c2_arg.strings):
list_value = attr_value_pb2.AttrValue.ListValue()
list_value.s.extend(c2_arg.strings)
attr.list.CopyFrom(list_value)
new_node.attr[c2_arg.name].CopyFrom(attr)
self._add_node_shapetype(new_node, c2_op.output[0])
self.nodes[new_node.name] = new_node
self._tb_graph.node.extend([new_node])
def ConvertNet(self):
""" Convert the full network of caffe2 into TB network """
self._build_nodes_shapetype()
for c2_op in self._predict_net.op:
self.convert_to_nodes(c2_op)
| 44.5
| 106
| 0.618461
| 7,467
| 0.902138
| 0
| 0
| 0
| 0
| 0
| 0
| 2,241
| 0.27075
|
c4852e08624ac34e2478471564d3403491679e03
| 1,251
|
py
|
Python
|
src/Homework2_1.py
|
alexaquino/TUM-AUTONAVx
|
95c6829fa2e31e1a11bf2c7726386593e7adbdce
|
[
"MIT"
] | null | null | null |
src/Homework2_1.py
|
alexaquino/TUM-AUTONAVx
|
95c6829fa2e31e1a11bf2c7726386593e7adbdce
|
[
"MIT"
] | null | null | null |
src/Homework2_1.py
|
alexaquino/TUM-AUTONAVx
|
95c6829fa2e31e1a11bf2c7726386593e7adbdce
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# The MIT License (MIT)
# Copyright (c) 2014 Alex Aquino dos Santos
# Technische Universität München (TUM)
# Autonomous Navigation for Flying Robots
# Homework 2.1
from plot import plot
class UserCode:
def __init__(self):
# initialize data you want to store in this object between calls to the measurement_callback() method
self.last_yaw_velocity = 0
self.max_roll_angle = 0
self.max_pitch_angle = 0
self.max_yaw_velocity = 0
def measurement_callback(self, t, dt, navdata):
'''
:param t: time since simulation start
:param dt: time since last call to measurement_callback
:param navdata: measurements of the quadrotor
'''
# add your plot commands here
self.max_roll_angle = max(self.max_roll_angle, abs(navdata.rotX))
self.max_pitch_angle = max(self.max_pitch_angle, abs(navdata.rotY))
self.max_yaw_velocity = max(self.max_yaw_velocity, abs((navdata.rotZ - self.last_yaw_velocity) / dt))
self.last_yaw_velocity = navdata.rotZ
plot("max_roll_angle", self.max_roll_angle)
plot("max_pitch_angle", self.max_pitch_angle)
plot("max_yaw_velocity", self.max_yaw_velocity)
| 34.75
| 109
| 0.686651
| 1,038
| 0.828412
| 0
| 0
| 0
| 0
| 0
| 0
| 542
| 0.432562
|
c4855377edb8f2377a14569ead5ae6f4b477315f
| 1,651
|
py
|
Python
|
src_tf/templates/tf_estimator_template/model/example.py
|
ashishpatel26/finch
|
bf2958c0f268575e5d51ad08fbc08b151cbea962
|
[
"MIT"
] | 1
|
2019-02-12T09:22:00.000Z
|
2019-02-12T09:22:00.000Z
|
src_tf/templates/tf_estimator_template/model/example.py
|
loopzxl/finch
|
bf2958c0f268575e5d51ad08fbc08b151cbea962
|
[
"MIT"
] | null | null | null |
src_tf/templates/tf_estimator_template/model/example.py
|
loopzxl/finch
|
bf2958c0f268575e5d51ad08fbc08b151cbea962
|
[
"MIT"
] | 1
|
2020-10-15T21:34:17.000Z
|
2020-10-15T21:34:17.000Z
|
from configs import args
import tensorflow as tf
def forward(x, mode):
is_training = (mode == tf.estimator.ModeKeys.TRAIN)
x = tf.contrib.layers.embed_sequence(x, args.vocab_size, args.embed_dim)
x = tf.layers.dropout(x, 0.2, training=is_training)
feat_map = []
for k_size in [3, 4, 5]:
_x = tf.layers.conv1d(x, args.filters, k_size, activation=tf.nn.relu)
_x = tf.layers.max_pooling1d(_x, _x.get_shape().as_list()[1], 1)
_x = tf.reshape(_x, (tf.shape(x)[0], args.filters))
feat_map.append(_x)
x = tf.concat(feat_map, -1)
x = tf.layers.dense(x, args.filters, tf.nn.relu)
logits = tf.layers.dense(x, args.n_class)
return logits
def model_fn(features, labels, mode):
logits = forward(features, mode)
if mode == tf.estimator.ModeKeys.PREDICT:
preds = tf.argmax(logits, -1)
return tf.estimator.EstimatorSpec(mode, predictions=preds)
if mode == tf.estimator.ModeKeys.TRAIN:
global_step = tf.train.get_global_step()
LR = {'start': 5e-3, 'end': 5e-4, 'steps': 1500}
lr_op = tf.train.exponential_decay(
LR['start'], global_step, LR['steps'], LR['end']/LR['start'])
loss_op = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=logits, labels=labels))
train_op = tf.train.AdamOptimizer(lr_op).minimize(
loss_op, global_step=global_step)
lth = tf.train.LoggingTensorHook({'lr': lr_op}, every_n_iter=100)
return tf.estimator.EstimatorSpec(
mode=mode, loss=loss_op, train_op=train_op, training_hooks=[lth])
| 34.395833
| 80
| 0.637795
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 49
| 0.029679
|
c485ee350fbe503865765122e5205b0c6d84fd8d
| 1,300
|
py
|
Python
|
{{cookiecutter.project_slug}}/core/management/commands/snippets/fastapi_project/core/security.py
|
claysllanxavier/django-cookiecutter
|
97de7ff4ed3dc94c32bf756a57aee0664a888cbc
|
[
"BSD-3-Clause"
] | 8
|
2021-08-13T17:48:27.000Z
|
2022-02-22T02:34:15.000Z
|
{{cookiecutter.project_slug}}/core/management/commands/snippets/fastapi_project/core/security.py
|
claysllanxavier/django-cookiecutter
|
97de7ff4ed3dc94c32bf756a57aee0664a888cbc
|
[
"BSD-3-Clause"
] | 2
|
2022-03-24T20:39:00.000Z
|
2022-03-24T20:39:48.000Z
|
{{cookiecutter.project_slug}}/core/management/commands/snippets/fastapi_project/core/security.py
|
claysllanxavier/django-cookiecutter
|
97de7ff4ed3dc94c32bf756a57aee0664a888cbc
|
[
"BSD-3-Clause"
] | 2
|
2021-09-21T00:05:27.000Z
|
2022-01-03T10:50:05.000Z
|
from datetime import datetime, timedelta
from typing import Any, Union
from jose import jwt
from passlib.context import CryptContext
from .config import settings
pwd_context = CryptContext(
default="django_pbkdf2_sha256",
schemes=["django_argon2", "django_bcrypt", "django_bcrypt_sha256",
"django_pbkdf2_sha256", "django_pbkdf2_sha1",
"django_disabled"])
ALGORITHM = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES = 60 * 24 * 8 # 8 days
'''
Arquivo de configuração de segurança dos tokens JWT
- Métodos de verificação e criação de hash de senha
- Método para criar o token jwt válido
'''
def verify_password(plain_password: str, hashed_password: str) -> bool:
return pwd_context.verify(plain_password, hashed_password)
def get_password_hash(password: str) -> str:
return pwd_context.hash(password)
def create_access_token(
subject: Union[str, Any], expires_delta: timedelta = None
) -> str:
if expires_delta:
expire = datetime.utcnow() + expires_delta
else:
expire = datetime.utcnow() + timedelta(
minutes=ACCESS_TOKEN_EXPIRE_MINUTES
)
to_encode = {"exp": expire, "sub": str(subject)}
encoded_jwt = jwt.encode(to_encode, settings.app_secret, algorithm=ALGORITHM)
return encoded_jwt
| 29.545455
| 81
| 0.713077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 319
| 0.243511
|
c487c6e672ed0de9246b310bca5ef690e836e2e6
| 10,241
|
py
|
Python
|
margarita/main.py
|
w0de/margarita
|
50c7c07b8ee3d5d6c801833be7c147533c33fd70
|
[
"Unlicense"
] | 3
|
2018-07-27T22:19:02.000Z
|
2019-09-06T18:08:58.000Z
|
margarita/main.py
|
w0de/margarita
|
50c7c07b8ee3d5d6c801833be7c147533c33fd70
|
[
"Unlicense"
] | null | null | null |
margarita/main.py
|
w0de/margarita
|
50c7c07b8ee3d5d6c801833be7c147533c33fd70
|
[
"Unlicense"
] | 1
|
2019-05-21T18:07:46.000Z
|
2019-05-21T18:07:46.000Z
|
#!/usr/bin/env python
from flask import Flask
from flask import jsonify, render_template, redirect
from flask import request, Response
from saml_auth import BaseAuth, SamlAuth
import os, sys
try:
import json
except ImportError:
# couldn't find json, try simplejson library
import simplejson as json
import getopt
from operator import itemgetter
from distutils.version import LooseVersion
from reposadolib import reposadocommon
apple_catalog_version_map = {
'index-10.14-10.13-10.12-10.11-10.10-10.9-mountainlion-lion-snowleopard-leopard.merged-1.sucatalog': '10.14',
'index-10.13-10.12-10.11-10.10-10.9-mountainlion-lion-snowleopard-leopard.merged-1.sucatalog': '10.13',
'index-10.12-10.11-10.10-10.9-mountainlion-lion-snowleopard-leopard.merged-1.sucatalog': '10.12',
'index-10.11-10.10-10.9-mountainlion-lion-snowleopard-leopard.merged-1.sucatalog': '10.11',
'index-10.10-10.9-mountainlion-lion-snowleopard-leopard.merged-1.sucatalog': '10.10',
'index-10.9-mountainlion-lion-snowleopard-leopard.merged-1.sucatalog': '10.9',
'index-mountainlion-lion-snowleopard-leopard.merged-1.sucatalog': '10.8',
'index-lion-snowleopard-leopard.merged-1.sucatalog': '10.7',
'index-leopard-snowleopard.merged-1.sucatalog': '10.6',
'index-leopard.merged-1.sucatalog': '10.5',
'index-1.sucatalog': '10.4',
'index.sucatalog': '10.4',
}
BASE_AUTH_CLASS = BaseAuth
def build_app():
app = Flask(__name__)
app.config.update(
{
"DEBUG": os.environ.get('DEBUG', False),
"LOCAL_DEBUG": os.environ.get('LOCAL_DEBUG', False),
"SECRET_KEY": os.environ.get("SECRET_KEY", "insecure"),
"SAML_PATH": os.environ.get(
"SAML_PATH",
os.path.join(os.path.dirname(os.path.dirname(__file__)), "saml"),
),
"SAML_AUTH_ENABLED": bool(os.environ.get("SAML_AUTH_ENABLED", False)),
}
)
if app.config["SAML_AUTH_ENABLED"]:
auth = SamlAuth(app, auth_path="saml2", exemptions=["/<name>", "/test", "/status"])
else:
auth = BASE_AUTH_CLASS(app, is_admin=(lambda: LOCAL_DEBUG), is_auth=(lambda: True))
return app, auth
app, auth = build_app()
# cache the keys of the catalog version map dict
apple_catalog_suffixes = apple_catalog_version_map.keys()
def versions_from_catalogs(cats):
'''Given an iterable of catalogs return the corresponding OS X versions'''
versions = set()
for cat in cats:
# take the last portion of the catalog URL path
short_cat = cat.split('/')[-1]
if short_cat in apple_catalog_suffixes:
versions.add(apple_catalog_version_map[short_cat])
return versions
def json_response(r):
'''Glue for wrapping raw JSON responses'''
return Response(json.dumps(r), status=200, mimetype='application/json')
@app.route('/')
def index():
return render_template('margarita.html')
@app.route('/branches', methods=['GET'])
def list_branches():
'''Returns catalog branch names and associated updates'''
catalog_branches = reposadocommon.getCatalogBranches()
return json_response(catalog_branches.keys())
def get_description_content(html):
if len(html) == 0:
return None
# in the interest of (attempted) speed, try to avoid regexps
lwrhtml = html.lower()
celem = 'p'
startloc = lwrhtml.find('<' + celem + '>')
if startloc == -1:
startloc = lwrhtml.find('<' + celem + ' ')
if startloc == -1:
celem = 'body'
startloc = lwrhtml.find('<' + celem)
if startloc != -1:
startloc += 6 # length of <body>
if startloc == -1:
# no <p> nor <body> tags. bail.
return None
endloc = lwrhtml.rfind('</' + celem + '>')
if endloc == -1:
endloc = len(html)
elif celem != 'body':
# if the element is a body tag, then don't include it.
# DOM parsing will just ignore it anyway
endloc += len(celem) + 3
return html[startloc:endloc]
def product_urls(cat_entry):
'''Retreive package URLs for a given reposado product CatalogEntry.
Will rewrite URLs to be served from local reposado repo if necessary.'''
packages = cat_entry.get('Packages', [])
pkg_urls = []
for package in packages:
pkg_urls.append({
'url': reposadocommon.rewriteOneURL(package['URL']),
'size': package['Size'],
})
return pkg_urls
@app.route('/products', methods=['GET'])
def products():
products = reposadocommon.getProductInfo()
catalog_branches = reposadocommon.getCatalogBranches()
prodlist = []
for prodid in products.keys():
if 'title' in products[prodid] and 'version' in products[prodid] and 'PostDate' in products[prodid]:
prod = {
'title': products[prodid]['title'],
'version': products[prodid]['version'],
'PostDate': products[prodid]['PostDate'].strftime('%Y-%m-%d'),
'description': get_description_content(products[prodid]['description']),
'id': prodid,
'depr': len(products[prodid].get('AppleCatalogs', [])) < 1,
'branches': [],
'oscatalogs': sorted(versions_from_catalogs(products[prodid].get('OriginalAppleCatalogs')), key=LooseVersion, reverse=True),
'packages': product_urls(products[prodid]['CatalogEntry']),
}
for branch in catalog_branches.keys():
if prodid in catalog_branches[branch]:
prod['branches'].append(branch)
prodlist.append(prod)
else:
print 'Invalid update!'
sprodlist = sorted(prodlist, key=itemgetter('PostDate'), reverse=True)
return json_response({'products': sprodlist, 'branches': catalog_branches.keys()})
@app.route('/new_branch/<branchname>', methods=['POST'])
def new_branch(branchname):
catalog_branches = reposadocommon.getCatalogBranches()
if branchname in catalog_branches:
reposadocommon.print_stderr('Branch %s already exists!', branchname)
abort(401)
catalog_branches[branchname] = []
reposadocommon.writeCatalogBranches(catalog_branches)
return jsonify(result='success')
@app.route('/delete_branch/<branchname>', methods=['POST'])
def delete_branch(branchname):
catalog_branches = reposadocommon.getCatalogBranches()
if not branchname in catalog_branches:
reposadocommon.print_stderr('Branch %s does not exist!', branchname)
return
del catalog_branches[branchname]
# this is not in the common library, so we have to duplicate code
# from repoutil
for catalog_URL in reposadocommon.pref('AppleCatalogURLs'):
localcatalogpath = reposadocommon.getLocalPathNameFromURL(catalog_URL)
# now strip the '.sucatalog' bit from the name
if localcatalogpath.endswith('.sucatalog'):
localcatalogpath = localcatalogpath[0:-10]
branchcatalogpath = localcatalogpath + '_' + branchname + '.sucatalog'
if os.path.exists(branchcatalogpath):
reposadocommon.print_stdout(
'Removing %s', os.path.basename(branchcatalogpath))
os.remove(branchcatalogpath)
reposadocommon.writeCatalogBranches(catalog_branches)
return jsonify(result=True);
@app.route('/add_all/<branchname>', methods=['POST'])
def add_all(branchname):
products = reposadocommon.getProductInfo()
catalog_branches = reposadocommon.getCatalogBranches()
catalog_branches[branchname] = products.keys()
reposadocommon.writeCatalogBranches(catalog_branches)
reposadocommon.writeAllBranchCatalogs()
return jsonify(result=True)
@app.route('/process_queue', methods=['POST'])
def process_queue():
catalog_branches = reposadocommon.getCatalogBranches()
for change in request.json:
prodId = change['productId']
branch = change['branch']
if branch not in catalog_branches.keys():
print 'No such catalog'
continue
if change['listed']:
# if this change /was/ listed, then unlist it
if prodId in catalog_branches[branch]:
print 'Removing product %s from branch %s' % (prodId, branch, )
catalog_branches[branch].remove(prodId)
else:
# if this change /was not/ listed, then list it
if prodId not in catalog_branches[branch]:
print 'Adding product %s to branch %s' % (prodId, branch, )
catalog_branches[branch].append(prodId)
print 'Writing catalogs'
reposadocommon.writeCatalogBranches(catalog_branches)
reposadocommon.writeAllBranchCatalogs()
return jsonify(result=True)
@app.route('/dup_apple/<branchname>', methods=['POST'])
def dup_apple(branchname):
catalog_branches = reposadocommon.getCatalogBranches()
if branchname not in catalog_branches.keys():
print 'No branch ' + branchname
return jsonify(result=False)
# generate list of (non-deprecated) updates
products = reposadocommon.getProductInfo()
prodlist = []
for prodid in products.keys():
if len(products[prodid].get('AppleCatalogs', [])) >= 1:
prodlist.append(prodid)
catalog_branches[branchname] = prodlist
print 'Writing catalogs'
reposadocommon.writeCatalogBranches(catalog_branches)
reposadocommon.writeAllBranchCatalogs()
return jsonify(result=True)
@app.route('/dup/<frombranch>/<tobranch>', methods=['POST'])
def dup(frombranch, tobranch):
catalog_branches = reposadocommon.getCatalogBranches()
if frombranch not in catalog_branches.keys() or tobranch not in catalog_branches.keys():
print 'No branch ' + branchname
return jsonify(result=False)
catalog_branches[tobranch] = catalog_branches[frombranch]
print 'Writing catalogs'
reposadocommon.writeCatalogBranches(catalog_branches)
reposadocommon.writeAllBranchCatalogs()
return jsonify(result=True)
@app.route('/config_data', methods=['POST'])
def config_data():
# catalog_branches = reposadocommon.getCatalogBranches()
check_prods = request.json
if len(check_prods) > 0:
cd_prods = reposadocommon.check_or_remove_config_data_attribute(check_prods, suppress_output=True)
else:
cd_prods = []
response_prods = {}
for prod_id in check_prods:
response_prods.update({prod_id: True if prod_id in cd_prods else False})
print response_prods
return json_response(response_prods)
@app.route('/remove_config_data/<product>', methods=['POST'])
def remove_config_data(product):
# catalog_branches = reposadocommon.getCatalogBranches()
check_prods = request.json
products = reposadocommon.check_or_remove_config_data_attribute([product, ], remove_attr=True, suppress_output=True)
return json_response(products)
@app.route('/status')
def status():
return jsonify(state='calmer than you')
| 31.804348
| 128
| 0.721023
| 0
| 0
| 0
| 0
| 6,308
| 0.615955
| 0
| 0
| 3,054
| 0.298213
|
c48919ef78498ed664eb6156c8117a86edb141da
| 3,344
|
py
|
Python
|
python/pato/transport/uart.py
|
kloper/pato
|
bfbbee4109227735934f990c5909616a6e8af0b9
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
python/pato/transport/uart.py
|
kloper/pato
|
bfbbee4109227735934f990c5909616a6e8af0b9
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
python/pato/transport/uart.py
|
kloper/pato
|
bfbbee4109227735934f990c5909616a6e8af0b9
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
# -*- python -*-
"""@file
@brief pyserial transport for pato
Copyright (c) 2014-2015 Dimitry Kloper <kloper@users.sf.net>.
All rights reserved.
@page License
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation
are those of the authors and should not be interpreted as representing
official policies, either expressed or implied, of the Pato Project.
"""
import serial
from util.protocol import ProtocolException
class Uart(object):
"""
@brief Communication transport using any UART TTL cable (FTDI)
A simple transport that allows python code running on PC to talk
with Pato via UART (using any UART cable or dongle e.g. FTDI),
while Pato is compiled with UART interface.
This requires python pyserial package to be installed.
"""
def __init__(self, *args, **kwargs):
"""
@brief Constructor
@param[in] args arguments for pyserial
@param[in] kwargs keyword arguments for pyserial
"""
self.serial = serial.Serial(*args, **kwargs)
def query(self, request):
"""
@brief Generic query (request/reply) method via pyserial interface.
Send request packet to Pato via serial interface and wait for reply
packet.
If send and/or receive return unexpected result,
@ref ProtocolException is thrown.
@param[in] request regular list of bytes representing packet to be sent
via the bridge.
@returns Received reply packet
@throws ProtocolException upon send or receive error
"""
bytes_written = self.serial.write(bytes(request))
if bytes_written != len(request):
raise ProtocolException("Failed to send request")
reply_size = 5
reply = self.serial.read(reply_size)
if len(reply) != reply_size:
raise ProtocolException("Failed to receive reply")
reply = [ord(c) for c in reply]
return reply
def close(self):
"""
@brief Close serial line to bridge
"""
self.serial.close()
| 34.122449
| 79
| 0.712022
| 1,658
| 0.495813
| 0
| 0
| 0
| 0
| 0
| 0
| 2,707
| 0.80951
|
c489ac681275868dff6ed544c5b85d56c81ef128
| 4,072
|
py
|
Python
|
PYQT5/Games/RockPapperScissorsGame.py
|
Amara-Manikanta/Python-GUI
|
0356e7cae7f1c51d0781bf431c386ee7262608b1
|
[
"MIT"
] | null | null | null |
PYQT5/Games/RockPapperScissorsGame.py
|
Amara-Manikanta/Python-GUI
|
0356e7cae7f1c51d0781bf431c386ee7262608b1
|
[
"MIT"
] | null | null | null |
PYQT5/Games/RockPapperScissorsGame.py
|
Amara-Manikanta/Python-GUI
|
0356e7cae7f1c51d0781bf431c386ee7262608b1
|
[
"MIT"
] | null | null | null |
import sys
from PyQt5.QtWidgets import *
from PyQt5.QtGui import QFont, QPixmap
from PyQt5.QtCore import QTimer
from random import randint
font = QFont("Times", 14)
buttonFont = QFont("Arial", 12)
computerScore = 0
playerScore = 0
class Windows(QWidget):
def __init__(self):
super().__init__()
self.setWindowTitle("Using Spinboxes")
self.setGeometry(350, 150, 550, 500)
self.UI()
def UI(self):
############################Score Borad#############################
self.scorecomputerText = QLabel("Computer Score : ", self)
self.scorecomputerText.move(30, 20)
self.scorecomputerText.setFont(font)
self.scorePlayerText = QLabel("Your Score : ", self)
self.scorePlayerText.setFont(font)
self.scorePlayerText.move(330, 20)
##########################Images###################################
self.imageComputer = QLabel(self)
self.imageComputer.setPixmap(QPixmap("Images/rock.png"))
self.imageComputer.move(50, 100)
self.imagePlayer = QLabel(self)
self.imagePlayer.setPixmap(QPixmap("Images/rock.png"))
self.imagePlayer.move(330, 100)
self.imagegame = QLabel(self)
self.imagegame.setPixmap(QPixmap("Images/game.png"))
self.imagegame.move(230, 145)
##################Buttons#########################
startButton = QPushButton("Start", self)
startButton.setFont(buttonFont)
startButton.move(90, 250)
startButton.clicked.connect(self.funcstart)
stopButton = QPushButton("Stop", self)
stopButton.setFont(buttonFont)
stopButton.move(350, 250)
stopButton.clicked.connect(self.funcstop)
######################Timer##########################
self.timer = QTimer(self)
self.timer.setInterval(50)
self.timer.timeout.connect(self.playGame)
self.show()
def playGame(self):
self.rndcomputer = randint(1, 3)
if self.rndcomputer == 1:
self.imageComputer.setPixmap(QPixmap("Images/rock.png"))
elif self.rndcomputer == 2:
self.imageComputer.setPixmap(QPixmap("Images/paper.png"))
else:
self.imageComputer.setPixmap(QPixmap("Images/scissors.png"))
self.rndplayer = randint(1, 3)
if self.rndplayer == 1:
self.imagePlayer.setPixmap(QPixmap("Images/rock.png"))
elif self.rndplayer == 2:
self.imagePlayer.setPixmap(QPixmap("Images/paper.png"))
else:
self.imagePlayer.setPixmap(QPixmap("Images/scissors.png"))
def funcstart(self):
self.timer.start()
def funcstop(self):
global computerScore
global playerScore
self.timer.stop()
if (self.rndcomputer == 1 and self.rndplayer == 1) or (self.rndcomputer == 2 and self.rndplayer == 2) or (
self.rndcomputer == 3 and self.rndplayer == 3):
mbox = QMessageBox.information(self, "Information", "Draw Game")
elif (self.rndcomputer == 1 and self.rndplayer == 2) or (self.rndcomputer == 2 and self.rndplayer == 3) or (
self.rndcomputer == 3 and self.rndplayer == 1):
mbox = QMessageBox.information(self, "Information", "you win!")
playerScore += 1
self.scorePlayerText.setText("Your Score:" + str(playerScore))
elif (self.rndcomputer == 1 and self.rndplayer == 3) or (self.rndcomputer == 2 and self.rndplayer == 1) or (
self.rndcomputer == 3 and self.rndplayer == 2):
mbox = QMessageBox.information(self, "Information", "Computer wins!")
computerScore += 1
self.scorecomputerText.setText("Computer Score:" + str(computerScore))
if computerScore == 5 or playerScore == 5:
mbox = QMessageBox.information(self, "Information", "Game Over")
sys.exit()
def main():
App = QApplication(sys.argv)
window = Windows()
sys.exit(App.exec_())
if __name__ == '__main__':
main()
| 34.218487
| 116
| 0.590128
| 3,699
| 0.908399
| 0
| 0
| 0
| 0
| 0
| 0
| 619
| 0.152014
|
c489f0bb6aee13c77e0b4caf8c6ecbaa282336f5
| 539
|
py
|
Python
|
services/neural/traindatabase.py
|
vitorecomp/hackaton-deep-learn
|
962eac133ac92d56d8a55136773c2afe4da2e0b5
|
[
"MIT"
] | null | null | null |
services/neural/traindatabase.py
|
vitorecomp/hackaton-deep-learn
|
962eac133ac92d56d8a55136773c2afe4da2e0b5
|
[
"MIT"
] | null | null | null |
services/neural/traindatabase.py
|
vitorecomp/hackaton-deep-learn
|
962eac133ac92d56d8a55136773c2afe4da2e0b5
|
[
"MIT"
] | null | null | null |
from os import walk
import h5py
import numpy as np
from config.Database import Base
from config.Database import engine
from config.Database import Session
from models.Music import Music
from kmeans.kmeans import Kmeans
mypath = './dataset/datatr/'
def main():
files = []
# 2 - generate database schema
Base.metadata.create_all(engine)
# 3 - create a new session
session = Session()
musics = session.query(Music).all()
musics, distances = Kmeans.split(musics)
session.commit()
return
if __name__ == "__main__":
main()
| 15.852941
| 41
| 0.736549
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 85
| 0.157699
|
c48abebb839f713d689a09683874c38aef9511d6
| 1,128
|
py
|
Python
|
projects/TGS_salt/binary_classifier/model.py
|
liaopeiyuan/ml-arsenal-public
|
f8938ce3cb58b35fc7cc20d096c39a85ec9780b2
|
[
"Apache-2.0"
] | 280
|
2018-10-21T01:07:18.000Z
|
2021-12-30T11:29:48.000Z
|
projects/TGS_salt/binary_classifier/model.py
|
liaopeiyuan/ml-arsenal-public
|
f8938ce3cb58b35fc7cc20d096c39a85ec9780b2
|
[
"Apache-2.0"
] | 3
|
2018-11-13T08:04:48.000Z
|
2020-04-17T09:20:03.000Z
|
projects/TGS_salt/binary_classifier/model.py
|
liaopeiyuan/ml-arsenal-public
|
f8938ce3cb58b35fc7cc20d096c39a85ec9780b2
|
[
"Apache-2.0"
] | 59
|
2018-10-21T04:38:23.000Z
|
2021-03-29T07:58:47.000Z
|
import torch.nn as nn
import pretrainedmodels
class classifier(nn.Module):
def __init__(self, model_name='resnet32'):
super(classifier, self).__init__()
# Load pretrained ImageNet model
self.model = pretrainedmodels.__dict__[model_name](num_classes=1000, pretrained='imagenet')
print(model_name + ' model settings:')
for var in pretrainedmodels.pretrained_settings[model_name]['imagenet']:
print('\t' + var + ': '+ str(pretrainedmodels.pretrained_settings[model_name]['imagenet'][var]))
# Define last layer for fine-tuning
dim_feats = self.model.last_linear.in_features
nb_classes = 1
self.model.last_linear = F.dropout2d(nn.Linear(dim_feats, nb_classes),p=0.50)
def forward(self, input):
return self.model(input)
def set_mode(self, mode):
self.mode = mode
if 'validation' in mode or 'test' in mode:
self.eval()
elif 'train' in mode:
self.train()
else:
raise NotImplementedError
| 31.333333
| 109
| 0.60461
| 1,074
| 0.952128
| 0
| 0
| 0
| 0
| 0
| 0
| 160
| 0.141844
|
6700a5bb5f070e2573ae2cc0040f1d1a36a7e4ca
| 13,050
|
py
|
Python
|
code/algorithm/assr.py
|
ShuhuaGao/bcn_opt_dc
|
93234f6b799670bc80daf83794c51841f1a24715
|
[
"MIT"
] | null | null | null |
code/algorithm/assr.py
|
ShuhuaGao/bcn_opt_dc
|
93234f6b799670bc80daf83794c51841f1a24715
|
[
"MIT"
] | null | null | null |
code/algorithm/assr.py
|
ShuhuaGao/bcn_opt_dc
|
93234f6b799670bc80daf83794c51841f1a24715
|
[
"MIT"
] | null | null | null |
"""
Given a Boolean function/network, get its algebraic state-space representation.
A logical vector `\delta_n^i` is represented by an integer `i` for space efficiency. Consequently, a logical matrix
is represented by a list, each element for one column, (also known as the "condensed form").
[1] Conversion from an infix expression to a postfix one:
https://runestone.academy/runestone/books/published/pythonds/BasicDS/InfixPrefixandPostfixExpressions.html
[2] Logical connectives: https://en.wikipedia.org/wiki/Logical_connective
Author: Gao Shuhua
"""
import operator
import os
from typing import List, Union, Tuple, Iterable, Dict
from .bcn import BooleanNetwork, BooleanControlNetwork
_COMMENT = '#'
_STATES = '[STATES]'
_CONTROLS = '[CONTROLS]'
class LogicalConnective:
"""
Represent a logical connective. https://en.wikipedia.org/wiki/Logical_connective
"""
def __init__(self, id: str, description: str, arity: int, precedence: int, function):
"""
Initialize a logical connective.
:param id: a unique description
:param description: a description text
:param arity: number of operands
:param precedence: operator precedence
:param function: callable, the underlying operation which accepts *arity* argments
"""
self.id = id
self.description = description
self.arity = arity
self.precedence = precedence # a smaller number means a higher precedence
self.function = function
def __str__(self):
return self.id
def __call__(self, *args):
return self.function(*args)
def _imply(a, b):
if a:
return b
return 1
def _xnor(a, b):
return a == b
LOGICAL_CONNECTIVES = {
'NOT': LogicalConnective('NOT', 'not', 1, 0, operator.not_),
'XOR': LogicalConnective('XOR', 'exclusive disjunction', 2, 1, operator.xor),
'AND': LogicalConnective('AND', 'and', 2, 2, operator.and_),
'OR': LogicalConnective('OR', 'or', 2, 3, operator.or_),
'IMPLY': LogicalConnective('IMPLY', 'implication', 2, 4, _imply),
'EQUIV': LogicalConnective('EQUIV', 'equivalent', 2, 5, _xnor)
}
def _infix_to_postfix(expression: str) -> List[Union[LogicalConnective, str]]:
"""
Convert an infix expression to its postfix form.
:param expression: infix, separated by spaces
:return: postfix expression, a list, whose element is an operator (LogicalConnective) or a variable (str)
"""
# parse tokens: handle ( and ) specially, which may not be separated by spaces, e.g., 'A OR (B AND C)'
items = expression.split()
tokens = []
for item in items:
token = ''
for c in item:
if c in '()':
if token:
tokens.append(token)
token = ''
tokens.append(c)
else:
token = token + c
if token:
tokens.append(token)
# conversion
op_stack = []
output = []
for token in tokens:
if token.upper() in LOGICAL_CONNECTIVES: # an operator
connective = LOGICAL_CONNECTIVES[token.upper()]
while op_stack and isinstance(op_stack[-1], LogicalConnective) and \
op_stack[-1].precedence < connective.precedence:
output.append(op_stack.pop())
op_stack.append(connective)
elif token == '(':
op_stack.append(token)
elif token == ')':
left_parenthesis_found = False
while op_stack:
top = op_stack.pop()
if top == '(':
left_parenthesis_found = True
break
else:
output.append(top)
if not left_parenthesis_found:
raise RuntimeError("Unmatched parentheses are encountered: an extra ')'!")
elif token.upper() in ['1', 'TRUE']:
output.append('TRUE')
elif token.upper() in ['0', 'FALSE']:
output.append('FALSE')
else: # a variable
output.append(token)
while op_stack:
top = op_stack.pop()
if top == '(':
raise RuntimeError("Unmatched parentheses are encountered: an extra '('!")
output.append(top)
return output
def _evaluate_postfix(expression, values: {}):
"""
Evaluate a postfix expression with the given parameter values.
:param expression: postfix
:param values: a dict: variable --> value (0/1 or False/True)
:return: a Boolean variable, or 0/1
"""
operand_stack = []
for token in expression:
if isinstance(token, str): # a variable
if token in values:
val = values[token]
operand_stack.append(val)
elif token == 'TRUE':
operand_stack.append(True)
elif token == 'FALSE':
operand_stack.append(False)
else:
raise RuntimeError(f"Unrecognized variable: '{token}'")
else: # a logical connective
arguments = []
for _ in range(token.arity):
arguments.append(operand_stack.pop())
result = token(*arguments[::-1])
operand_stack.append(result)
return operand_stack.pop()
def _assr_function(pf_expr: List[Union[LogicalConnective, str]], states: List[str], controls: List[str]) -> List[int]:
"""
Compute the ASSR for a Boolean function.
:param pf_expr: the postfix expression of a Boolean function
:param states: the state variables
:param controls: the control inputs. If `None`, then no inputs.
:return: the structure matrix, a list of length MN
"""
n = len(states)
m = len(controls)
N = 2 ** n
M = 2 ** m
MN = M * N
all_variables = controls + states
structure_matrix = [None] * MN
# enumerate the binary sequences to get the truth table
for h in range(MN):
bh = f'{h:0{m+n}b}'
values = {var: int(val) for var, val in zip(all_variables, bh)}
output = _evaluate_postfix(pf_expr, values)
k = MN - h
if output: # 1 (True)
structure_matrix[k - 1] = 1
else:
structure_matrix[k - 1] = 2
return structure_matrix
def _tokenize(state_to_expr: Dict[str, str], controls: Iterable[str]=None) -> Tuple[Dict[str, List[Union[LogicalConnective, str]]], List[str]]:
"""
(1) Parse the `exprs` into postfix forms
(2) Infer the control inputs, if `controls` is `None`
:return: the tokenized expressions and the controls
"""
state_to_pf_expr = {s: _infix_to_postfix(e) for s, e in state_to_expr.items()}
if controls is None:
# infer controls
controls = []
for pf_expr in state_to_pf_expr.values():
for t in pf_expr:
if isinstance(t, str): # t is a variable, or 'TRUE' or 'FALSE'
if t not in ['TRUE', 'FALSE'] and t not in state_to_pf_expr: # a control
if t not in controls:
controls.append(t)
else:
controls = list(controls)
# validate
for s, pf_expr in state_to_pf_expr.items():
for t in pf_expr:
if isinstance(t, str):
assert t in state_to_pf_expr or t in controls, f"Unrecognized variable: '{t}' in equation of {s}"
return state_to_pf_expr, controls
def _assr_network(state_to_pf_expr: Dict[str, List[Union[LogicalConnective, str]]], states: List[str],
controls: List[str], verbose: bool=True) -> List[int]:
"""
Get the ASSR of a Boolean (control) network.
:param state_to_pf_expr: state -> its postfix expression
:param states: state variables
:param controls: control inputs.
:return: network transition matrix, each column is represented by an integer
"""
assert len(state_to_pf_expr) == len(states), 'The number of Boolean functions must be equal to the number of state states'
# get the structure matrix of each state (i.e., its Boolean equation)
state_to_sms = {}
for s, pf_expr in state_to_pf_expr.items():
if verbose:
print(f'\tComputing the structure matrix for state {s} ...')
state_to_sms[s] = _assr_function(pf_expr, states, controls)
n = len(states)
m = len(controls)
transition_matrix = [None] * (2 ** m * 2 ** n)
stp = lambda i, j: (i - 1) * 2 + j
if verbose:
print('\tComposing the complete network transition matrix...')
for k in range(len(transition_matrix)): # k-th column
r = 1
for s in states:
sm = state_to_sms[s]
r = stp(r, sm[k])
transition_matrix[k] = r
return transition_matrix
def build_ASSR(source: Union[str, Iterable[str]], states: List[str]=None,
controls: List[str]=None, verbose: bool=True) -> Union[BooleanNetwork, BooleanControlNetwork]:
"""
Build the ASSR for a given Boolean network in a string form.
Each Boolean function is given by the form: state = f(states, controls).
If a text file is given, each Boolean function is provided per line, and '#' starts a comment line
:param source: str or a list of str. (1) str: a single Boolean function or a text file, which contains one or more
Boolean functions (i.e., a network), each per line; (2) a list of str: multiple Boolean functions
:param states: state variables. If `None`, then inferred automatically.
:param controls: control inputs. If this a Boolean network with no inputs, then give it an empty List.
If `None`, then inferred automatically.
:param verbose: whether to print more information
:return: a Boolean network if there are no inputs; otherwise, a Boolean control network
.. note::
If the states and controls are inferred, the order of states corresponds to the line order, whereas the order
of controls depend on their appearance order in the equations. To precisely control the order (especially for
controls), two additional lines may be appended after the state equations that begin with "[STATES]" or "[CONTROLS]".
For example, line "[STATES] AKT MKK EGFR" specifies the state order (AKT, MKK, EGFR).
Of course, both "[STATES]" and "[CONTROLS]" lines are optional.
The non-None arguments `states` and `controls` have higher precedence than "[STATES]" and "[CONTROLS]" lines respectively.
"""
# get the strings of a network
net = []
if isinstance(source, str):
if os.path.isfile(source):
if verbose:
print(f'User provided a network file: {source}\nParsing...')
with open(source, 'r') as f:
for line in f:
line = line.strip()
if line.startswith(_COMMENT):
continue
elif line.startswith(_STATES):
if states is None:
words = line.split()
states = [w.strip() for w in words[1:]]
elif line.startswith(_CONTROLS):
if controls is None:
words = line.split()
controls = [w.strip() for w in words[1:]]
else:
if line: # skip empty lines if any
net.append(line)
else:
if verbose:
print(f'User provided a single Boolean equation.')
net.append(source)
else:
if verbose:
print(f'User provided a list of Boolean equations.')
net = list(source)
# extract the states and equations
state_to_expr = {}
inferred_states = []
for eq in net:
state, expr = eq.split('=')
state = state.strip()
expr = expr.strip()
if states is not None:
assert state in states, f'Unexpected state {state} is encountered!'
else:
inferred_states.append(state)
assert state not in state_to_expr, f'More than one equation is provided for state {state}'
state_to_expr[state] = expr
if states is not None:
for s in states:
assert s in state_to_expr, f'The equation for state {s} is missing'
else:
states = inferred_states
if verbose:
print('Tokenizing...')
# tokenize
state_to_pf_expr, controls = _tokenize(state_to_expr, controls)
assert set(states).isdisjoint(controls), 'States and controls should be disjoint'
if verbose:
print(f'States are {states}')
print(f'Controls are {controls}')
print('Computing...')
# get the ASSR the network
L = _assr_network(state_to_pf_expr, states, controls, verbose)
# wrap them into a Boolean (control) network
m = len(controls)
n = len(states)
if m == 0:
return BooleanNetwork(n, L, states)
return BooleanControlNetwork(n, m, L, states, controls)
| 39.071856
| 144
| 0.604828
| 865
| 0.066284
| 0
| 0
| 0
| 0
| 0
| 0
| 5,316
| 0.407356
|
6701184b0bdf306dd90792d6a104891f22b55364
| 4,953
|
py
|
Python
|
datasets/voc_dataset.py
|
ming71/DAL
|
48cd29fdbf5eeea1b5b642bd1f04bbf1863b31e3
|
[
"Apache-2.0"
] | 206
|
2020-09-12T06:17:00.000Z
|
2022-03-28T08:05:51.000Z
|
datasets/voc_dataset.py
|
JOOCHANN/DAL
|
0f379de70ba01c6c9162f4e980a8bd2491976e9c
|
[
"Apache-2.0"
] | 47
|
2020-10-21T06:14:18.000Z
|
2022-03-16T01:54:28.000Z
|
datasets/voc_dataset.py
|
JOOCHANN/DAL
|
0f379de70ba01c6c9162f4e980a8bd2491976e9c
|
[
"Apache-2.0"
] | 38
|
2020-10-22T10:39:51.000Z
|
2022-03-17T12:36:46.000Z
|
# --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# Extended by Linjie Deng
# --------------------------------------------------------
import os
import cv2
import numpy as np
import torch
import torch.utils.data as data
import xml.etree.ElementTree as ET
from utils.bbox import quad_2_rbox
class VOCDataset(data.Dataset):
""""""
def __init__(self,
dataset='trainval.txt',
augment = False,
level = 1,
random_flip=True):
self.image_set = dataset
self.data_path = self.image_set.strip('/ImageSets/Main/trainval.txt')
self.image_ext = [".jpg"]
self.image_list = self._load_image_names()
self.classes = ('__background__', 'aeroplane','bicycle','bird','boat',
'bottle','bus','car','cat','chair','cow','diningtable',
'dog','horse','motorbike','person','pottedplant',
'sheep','sofa','train','tvmonitor')
self.num_classes = len(self.classes)
self.class_to_ind = dict(zip(self.classes, range(self.num_classes)))
self.random_flip = random_flip
def __len__(self):
return len(self.image_list)
def __getitem__(self, index):
im_path = self._image_path_from_index(self.image_list[index])
im = cv2.cvtColor(cv2.imread(im_path, cv2.IMREAD_COLOR), cv2.COLOR_BGR2RGB)
roidb = self._load_pascal_annotation(self.image_list[index])
gt_inds = np.where(roidb['gt_classes'] != 0)[0]
bboxes = roidb['boxes'][gt_inds, :]
classes = roidb['gt_classes'][gt_inds]
if self.random_flip and np.random.rand() >= 0.5:
im = cv2.flip(im, 1, None)
oldxs = bboxes[:, 0::2].copy()
bboxes[:, 0::2] = im.shape[1] - oldxs - 1
gt_boxes = np.empty((len(gt_inds), 6), dtype=np.float32)
for i, bbox in enumerate(bboxes):
gt_boxes[i, :5] = quad_2_rbox(np.array(bbox))
gt_boxes[i, 5] = classes[i]
return {'image': im, 'boxes': gt_boxes}
def _load_image_names(self):
"""
Load the names listed in this dataset's image set file.
"""
image_set_file = self.image_set
if not os.path.exists(image_set_file):
'Path does not exist: {}'.format(image_set_file)
image_names = []
else:
with open(image_set_file) as f:
image_names = [x.strip() for x in f.readlines()]
return image_names
def _image_path_from_index(self, index):
"""
Construct an image path from the image's "index" identifier.
"""
image_path = None
image_exist = False
for image_ext in self.image_ext:
image_path = os.path.join(self.data_path, 'JPEGImages', index + image_ext)
if os.path.exists(image_path):
image_exist = True
break
if not image_exist:
raise Exception('Image path does not exist: {}'.format(
os.path.join(self.data_path, 'JPEGImages', index))
)
return image_path
def _load_pascal_annotation(self, index):
"""
Load image and bounding boxes info from XML file in the PASCAL VOC format.
"""
filename = os.path.join(self.data_path, 'Annotations', index + '.xml')
tree = ET.parse(filename)
objs = tree.findall('object')
boxes, gt_classes = [], []
for _, obj in enumerate(objs):
difficult = int(obj.find('difficult').text)
is_latin = obj.find('language') is None or obj.find('language').text == 'Latin'
bnd_box = obj.find('bndbox')
box = [
float(bnd_box.find('xmin').text),
float(bnd_box.find('ymin').text),
float(bnd_box.find('xmax').text),
float(bnd_box.find('ymin').text),
float(bnd_box.find('xmax').text),
float(bnd_box.find('ymax').text),
float(bnd_box.find('xmin').text),
float(bnd_box.find('ymax').text),
]
label = self.class_to_ind[obj.find('name').text.lower().strip()]
if difficult:
continue
# if self.only_latin and not is_latin:
# continue
boxes.append(box)
gt_classes.append(label)
return {'boxes': np.array(boxes, dtype=np.int32), 'gt_classes': np.array(gt_classes)}
def image_path_at(self, i):
"""
Return the absolute path to image i in the image sequence.
"""
return self._image_path_from_index(self.image_list[i])
def return_class(self, id):
id = int(id)
return self.classes[id]
if __name__ == '__main__':
pass
| 37.240602
| 93
| 0.557238
| 4,484
| 0.90531
| 0
| 0
| 0
| 0
| 0
| 0
| 1,166
| 0.235413
|
67046e56ceee4d6e7815e597ff49d092a5c53d48
| 1,907
|
py
|
Python
|
neploid.py
|
GravityI/neploid
|
4b68e682fcda97a95d155bea288aa90740842b66
|
[
"MIT"
] | null | null | null |
neploid.py
|
GravityI/neploid
|
4b68e682fcda97a95d155bea288aa90740842b66
|
[
"MIT"
] | null | null | null |
neploid.py
|
GravityI/neploid
|
4b68e682fcda97a95d155bea288aa90740842b66
|
[
"MIT"
] | null | null | null |
import discord
import random
import asyncio
import logging
import urllib.request
from discord.ext import commands
bot = commands.Bot(command_prefix='nep ', description= "Nep Nep")
counter = 0
countTask = None
@bot.event
async def on_ready():
print('Logged in as')
print(bot.user.name)
# print(bot.user.id)
print('------')
@bot.command()
async def nep(ctx):
await ctx.send("NEP NEP")
@bot.command(pass_context = True)
async def guessWhat(ctx):
await ctx.send(str(ctx.message.author.display_name) + " officially learned how to code a Discord bot")
async def countdown(channel):
global counter
while not bot.is_closed():
counter += 1
await channel.send("Count is at " + str(counter))
await asyncio.sleep(3)
@bot.command(pass_context = True, aliases = ["collect"])
async def sc(ctx):
global countTask
await ctx.send("Countdown Started!")
countTask = bot.loop.create_task(countdown(ctx.message.channel))
@bot.command(pass_context = True, aliases = ["cancel", "stop"])
async def cc(ctx):
global countTask
await ctx.send("Countdown Cancelled!")
countTask.cancel()
@bot.command(pass_context = True)
async def pm(ctx, *content):
if ctx.author.dm_channel is not None:
await ctx.author.dm_channel.send(content)
else:
await ctx.author.create_dm()
sendString = ''
for c in content:
sendString += c + ' '
await ctx.author.dm_channel.send(sendString)
@bot.command(aliases = ['nh'])
async def nhentai(ctx):
rurl = "https://nhentai.net/random/"
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'}
accessHurl = urllib.request.urlopen(urllib.request.Request(rurl, headers = headers))
await ctx.send(accessHurl.geturl())
token = "insert token here"
bot.run(token)
| 28.893939
| 153
| 0.681699
| 0
| 0
| 0
| 0
| 1,448
| 0.759308
| 1,391
| 0.729418
| 387
| 0.202937
|
6706396f498d795e0d71e25c46fb2f83e80c424d
| 1,025
|
py
|
Python
|
odoo/base-addons/l10n_tr/__manifest__.py
|
LucasBorges-Santos/docker-odoo
|
53987bbd61f6119669b5f801ee2ad54695084a21
|
[
"MIT"
] | null | null | null |
odoo/base-addons/l10n_tr/__manifest__.py
|
LucasBorges-Santos/docker-odoo
|
53987bbd61f6119669b5f801ee2ad54695084a21
|
[
"MIT"
] | null | null | null |
odoo/base-addons/l10n_tr/__manifest__.py
|
LucasBorges-Santos/docker-odoo
|
53987bbd61f6119669b5f801ee2ad54695084a21
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Turkey - Accounting',
'version': '1.0',
'category': 'Localization',
'description': """
Türkiye için Tek düzen hesap planı şablonu Odoo Modülü.
==========================================================
Bu modül kurulduktan sonra, Muhasebe yapılandırma sihirbazı çalışır
* Sihirbaz sizden hesap planı şablonu, planın kurulacağı şirket, banka hesap
bilgileriniz, ilgili para birimi gibi bilgiler isteyecek.
""",
'author': 'Ahmet Altınışık, Can Tecim',
'maintainer':'https://launchpad.net/~openerp-turkey, http://www.cantecim.com',
'depends': [
'account',
],
'data': [
'data/l10n_tr_chart_data.xml',
'data/account.account.template.csv',
'data/l10n_tr_chart_post_data.xml',
'data/account_data.xml',
'data/account_tax_template_data.xml',
'data/account_chart_template_data.xml',
],
'license': 'LGPL-3',
}
| 33.064516
| 82
| 0.61561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 888
| 0.845714
|
6706ffad81c03f382360a4810c2bf16d4cc561bb
| 4,364
|
py
|
Python
|
Source Codes/SMF_Python/smf_main.py
|
mmaher22/iCV-SBR
|
72effab621a9f8f5cee0d584b5a2f0e98524ffd6
|
[
"MIT"
] | 20
|
2020-08-25T06:10:14.000Z
|
2022-03-27T15:42:55.000Z
|
Source Codes/SMF_Python/smf_main.py
|
mmaher22/iCV-SBR
|
72effab621a9f8f5cee0d584b5a2f0e98524ffd6
|
[
"MIT"
] | null | null | null |
Source Codes/SMF_Python/smf_main.py
|
mmaher22/iCV-SBR
|
72effab621a9f8f5cee0d584b5a2f0e98524ffd6
|
[
"MIT"
] | 7
|
2020-09-25T15:12:53.000Z
|
2022-03-25T15:23:43.000Z
|
import os
import time
import argparse
import pandas as pd
from smf import SessionMF
parser = argparse.ArgumentParser()
parser.add_argument('--K', type=int, default=20, help="K items to be used in Recall@K and MRR@K")
parser.add_argument('--factors', type=int, default=100, help="Number of latent factors.")
parser.add_argument('--batch', type=int, default=32, help="Batch size for the training process")
parser.add_argument('--momentum', type=float, default=0.0, help="Momentum of the optimizer adagrad_sub")
parser.add_argument('--regularization', type=float, default=0.0001, help="Regularization Amount of the objective function")
parser.add_argument('--dropout', type=float, default=0.0, help="Share of items that are randomly discarded from the current session while training")
parser.add_argument('--skip', type=float, default=0.0, help="Probability that an item is skiped and the next one is used as the positive example")
parser.add_argument('--neg_samples', type=int, default=2048, help="Number of items that are sampled as negative examples")
parser.add_argument('--activation', type=str, default='linear', help="Final activation function (linear, sigmoid, uf_sigmoid, hard_sigmoid, relu, softmax, softsign, softplus, tanh)")
parser.add_argument('--objective', type=str, default='bpr_max', help="Loss Function (bpr_max, top1_max, bpr, top1)")
parser.add_argument('--epochs', type=int, default=10, help="Number of Epochs")
parser.add_argument('--lr', type=float, default=0.001, help="Learning Rate")
parser.add_argument('--itemid', default='ItemID', type=str)
parser.add_argument('--sessionid', default='SessionID', type=str)
parser.add_argument('--valid_data', default='recSys15Valid.txt', type=str)
parser.add_argument('--train_data', default='recSys15TrainOnly.txt', type=str)
parser.add_argument('--data_folder', default='/home/icvuser/Desktop/Recsys cleaned data/RecSys15 Dataset Splits', type=str)
# Get the arguments
args = parser.parse_args()
train_data = os.path.join(args.data_folder, args.train_data)
x_train = pd.read_csv(train_data)
x_train.sort_values(args.sessionid, inplace=True)
x_train = x_train.iloc[-int(len(x_train) / 64) :] #just take 1/64 last instances
valid_data = os.path.join(args.data_folder, args.valid_data)
x_valid = pd.read_csv(valid_data)
x_valid.sort_values(args.sessionid, inplace=True)
print('Finished Reading Data \nStart Model Fitting...')
# Fitting Model
t1 = time.time()
model = SessionMF(factors = args.factors, session_key = args.sessionid, item_key = args.itemid,
batch = args.batch, momentum = args.momentum, regularization = args.regularization,
dropout = args.dropout, skip = args.skip, samples = args.neg_samples,
activation = args.activation, objective = args.objective, epochs = args.epochs, learning_rate = args.lr)
model.fit(x_train)
t2 = time.time()
print('End Model Fitting with total time =', t2 - t1, '\n Start Predictions...')
# Test Set Evaluation
test_size = 0.0
hit = 0.0
MRR = 0.0
cur_length = 0
cur_session = -1
last_items = []
t1 = time.time()
index_item = x_valid.columns.get_loc(args.itemid)
index_session = x_valid.columns.get_loc(args.sessionid)
train_items = model.unique_items
counter = 0
for row in x_valid.itertuples( index=False ):
counter += 1
if counter % 10000 == 0:
print('Finished Prediction for ', counter, 'items.')
session_id, item_id = row[index_session], row[index_item]
if session_id != cur_session:
cur_session = session_id
last_items = []
cur_length = 0
if item_id in model.item_map.keys():
if len(last_items) > cur_length: #make prediction
cur_length += 1
test_size += 1
# Predict the most similar items to items
predictions = model.predict_next(last_items, K = args.K)
# Evaluation
rank = 0
for predicted_item in predictions:
#print(predicted_item, item_id, '###')
rank += 1
if int(predicted_item) == item_id:
hit += 1.0
MRR += 1/rank
break
last_items.append(item_id)
t2 = time.time()
print('Recall: {}'.format(hit / test_size))
print ('\nMRR: {}'.format(MRR / test_size))
print('End Model Predictions with total time =', t2 - t1)
| 47.956044
| 182
| 0.695921
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,353
| 0.310037
|
6707397442e36941efca1b5ee8ee3696d4dcdf31
| 25,163
|
py
|
Python
|
sdks/python/appcenter_sdk/models/Device.py
|
Brantone/appcenter-sdks
|
eeb063ecf79908b6e341fb00196d2cd9dc8f3262
|
[
"MIT"
] | null | null | null |
sdks/python/appcenter_sdk/models/Device.py
|
Brantone/appcenter-sdks
|
eeb063ecf79908b6e341fb00196d2cd9dc8f3262
|
[
"MIT"
] | 6
|
2019-10-23T06:38:53.000Z
|
2022-01-22T07:57:58.000Z
|
sdks/python/appcenter_sdk/models/Device.py
|
Brantone/appcenter-sdks
|
eeb063ecf79908b6e341fb00196d2cd9dc8f3262
|
[
"MIT"
] | 2
|
2019-10-23T06:31:05.000Z
|
2021-08-21T17:32:47.000Z
|
# coding: utf-8
"""
App Center Client
Microsoft Visual Studio App Center API # noqa: E501
OpenAPI spec version: preview
Contact: benedetto.abbenanti@gmail.com
Project Repository: https://github.com/b3nab/appcenter-sdks
"""
import pprint
import re # noqa: F401
import six
class Device(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'sdk_name': 'string',
'sdk_version': 'string',
'wrapper_sdk_version': 'string',
'wrapper_sdk_name': 'string',
'model': 'string',
'oem_name': 'string',
'os_name': 'string',
'os_version': 'string',
'os_build': 'string',
'os_api_level': 'integer',
'locale': 'string',
'time_zone_offset': 'integer',
'screen_size': 'string',
'app_version': 'string',
'carrier_name': 'string',
'carrier_code': 'string',
'carrier_country': 'string',
'app_build': 'string',
'app_namespace': 'string',
'live_update_release_label': 'string',
'live_update_deployment_key': 'string',
'live_update_package_hash': 'string',
'wrapper_runtime_version': 'string'
}
attribute_map = {
'sdk_name': 'sdk_name',
'sdk_version': 'sdk_version',
'wrapper_sdk_version': 'wrapper_sdk_version',
'wrapper_sdk_name': 'wrapper_sdk_name',
'model': 'model',
'oem_name': 'oem_name',
'os_name': 'os_name',
'os_version': 'os_version',
'os_build': 'os_build',
'os_api_level': 'os_api_level',
'locale': 'locale',
'time_zone_offset': 'time_zone_offset',
'screen_size': 'screen_size',
'app_version': 'app_version',
'carrier_name': 'carrier_name',
'carrier_code': 'carrier_code',
'carrier_country': 'carrier_country',
'app_build': 'app_build',
'app_namespace': 'app_namespace',
'live_update_release_label': 'live_update_release_label',
'live_update_deployment_key': 'live_update_deployment_key',
'live_update_package_hash': 'live_update_package_hash',
'wrapper_runtime_version': 'wrapper_runtime_version'
}
def __init__(self, sdk_name=None, sdk_version=None, wrapper_sdk_version=None, wrapper_sdk_name=None, model=None, oem_name=None, os_name=None, os_version=None, os_build=None, os_api_level=None, locale=None, time_zone_offset=None, screen_size=None, app_version=None, carrier_name=None, carrier_code=None, carrier_country=None, app_build=None, app_namespace=None, live_update_release_label=None, live_update_deployment_key=None, live_update_package_hash=None, wrapper_runtime_version=None): # noqa: E501
"""Device - a model defined in Swagger""" # noqa: E501
self._sdk_name = None
self._sdk_version = None
self._wrapper_sdk_version = None
self._wrapper_sdk_name = None
self._model = None
self._oem_name = None
self._os_name = None
self._os_version = None
self._os_build = None
self._os_api_level = None
self._locale = None
self._time_zone_offset = None
self._screen_size = None
self._app_version = None
self._carrier_name = None
self._carrier_code = None
self._carrier_country = None
self._app_build = None
self._app_namespace = None
self._live_update_release_label = None
self._live_update_deployment_key = None
self._live_update_package_hash = None
self._wrapper_runtime_version = None
self.discriminator = None
self.sdk_name = sdk_name
self.sdk_version = sdk_version
if wrapper_sdk_version is not None:
self.wrapper_sdk_version = wrapper_sdk_version
if wrapper_sdk_name is not None:
self.wrapper_sdk_name = wrapper_sdk_name
if model is not None:
self.model = model
if oem_name is not None:
self.oem_name = oem_name
self.os_name = os_name
self.os_version = os_version
if os_build is not None:
self.os_build = os_build
if os_api_level is not None:
self.os_api_level = os_api_level
self.locale = locale
self.time_zone_offset = time_zone_offset
if screen_size is not None:
self.screen_size = screen_size
self.app_version = app_version
if carrier_name is not None:
self.carrier_name = carrier_name
if carrier_code is not None:
self.carrier_code = carrier_code
if carrier_country is not None:
self.carrier_country = carrier_country
self.app_build = app_build
if app_namespace is not None:
self.app_namespace = app_namespace
if live_update_release_label is not None:
self.live_update_release_label = live_update_release_label
if live_update_deployment_key is not None:
self.live_update_deployment_key = live_update_deployment_key
if live_update_package_hash is not None:
self.live_update_package_hash = live_update_package_hash
if wrapper_runtime_version is not None:
self.wrapper_runtime_version = wrapper_runtime_version
@property
def sdk_name(self):
"""Gets the sdk_name of this Device. # noqa: E501
Name of the SDK. Consists of the name of the SDK and the platform, e.g. "appcenter.ios", "hockeysdk.android".
# noqa: E501
:return: The sdk_name of this Device. # noqa: E501
:rtype: string
"""
return self._sdk_name
@sdk_name.setter
def sdk_name(self, sdk_name):
"""Sets the sdk_name of this Device.
Name of the SDK. Consists of the name of the SDK and the platform, e.g. "appcenter.ios", "hockeysdk.android".
# noqa: E501
:param sdk_name: The sdk_name of this Device. # noqa: E501
:type: string
"""
if sdk_name is None:
raise ValueError("Invalid value for `sdk_name`, must not be `None`") # noqa: E501
self._sdk_name = sdk_name
@property
def sdk_version(self):
"""Gets the sdk_version of this Device. # noqa: E501
Version of the SDK in semver format, e.g. "1.2.0" or "0.12.3-alpha.1".
# noqa: E501
:return: The sdk_version of this Device. # noqa: E501
:rtype: string
"""
return self._sdk_version
@sdk_version.setter
def sdk_version(self, sdk_version):
"""Sets the sdk_version of this Device.
Version of the SDK in semver format, e.g. "1.2.0" or "0.12.3-alpha.1".
# noqa: E501
:param sdk_version: The sdk_version of this Device. # noqa: E501
:type: string
"""
if sdk_version is None:
raise ValueError("Invalid value for `sdk_version`, must not be `None`") # noqa: E501
self._sdk_version = sdk_version
@property
def wrapper_sdk_version(self):
"""Gets the wrapper_sdk_version of this Device. # noqa: E501
Version of the wrapper SDK in semver format. When the SDK is embedding another base SDK (for example Xamarin.Android wraps Android), the Xamarin specific version is populated into this field while sdkVersion refers to the original Android SDK.
# noqa: E501
:return: The wrapper_sdk_version of this Device. # noqa: E501
:rtype: string
"""
return self._wrapper_sdk_version
@wrapper_sdk_version.setter
def wrapper_sdk_version(self, wrapper_sdk_version):
"""Sets the wrapper_sdk_version of this Device.
Version of the wrapper SDK in semver format. When the SDK is embedding another base SDK (for example Xamarin.Android wraps Android), the Xamarin specific version is populated into this field while sdkVersion refers to the original Android SDK.
# noqa: E501
:param wrapper_sdk_version: The wrapper_sdk_version of this Device. # noqa: E501
:type: string
"""
self._wrapper_sdk_version = wrapper_sdk_version
@property
def wrapper_sdk_name(self):
"""Gets the wrapper_sdk_name of this Device. # noqa: E501
Name of the wrapper SDK. Consists of the name of the SDK and the wrapper platform, e.g. "appcenter.xamarin", "hockeysdk.cordova".
# noqa: E501
:return: The wrapper_sdk_name of this Device. # noqa: E501
:rtype: string
"""
return self._wrapper_sdk_name
@wrapper_sdk_name.setter
def wrapper_sdk_name(self, wrapper_sdk_name):
"""Sets the wrapper_sdk_name of this Device.
Name of the wrapper SDK. Consists of the name of the SDK and the wrapper platform, e.g. "appcenter.xamarin", "hockeysdk.cordova".
# noqa: E501
:param wrapper_sdk_name: The wrapper_sdk_name of this Device. # noqa: E501
:type: string
"""
self._wrapper_sdk_name = wrapper_sdk_name
@property
def model(self):
"""Gets the model of this Device. # noqa: E501
Device model (example: iPad2,3).
# noqa: E501
:return: The model of this Device. # noqa: E501
:rtype: string
"""
return self._model
@model.setter
def model(self, model):
"""Sets the model of this Device.
Device model (example: iPad2,3).
# noqa: E501
:param model: The model of this Device. # noqa: E501
:type: string
"""
self._model = model
@property
def oem_name(self):
"""Gets the oem_name of this Device. # noqa: E501
Device manufacturer (example: HTC).
# noqa: E501
:return: The oem_name of this Device. # noqa: E501
:rtype: string
"""
return self._oem_name
@oem_name.setter
def oem_name(self, oem_name):
"""Sets the oem_name of this Device.
Device manufacturer (example: HTC).
# noqa: E501
:param oem_name: The oem_name of this Device. # noqa: E501
:type: string
"""
self._oem_name = oem_name
@property
def os_name(self):
"""Gets the os_name of this Device. # noqa: E501
OS name (example: iOS). The following OS names are standardized (non-exclusive): Android, iOS, macOS, tvOS, Windows.
# noqa: E501
:return: The os_name of this Device. # noqa: E501
:rtype: string
"""
return self._os_name
@os_name.setter
def os_name(self, os_name):
"""Sets the os_name of this Device.
OS name (example: iOS). The following OS names are standardized (non-exclusive): Android, iOS, macOS, tvOS, Windows.
# noqa: E501
:param os_name: The os_name of this Device. # noqa: E501
:type: string
"""
if os_name is None:
raise ValueError("Invalid value for `os_name`, must not be `None`") # noqa: E501
self._os_name = os_name
@property
def os_version(self):
"""Gets the os_version of this Device. # noqa: E501
OS version (example: 9.3.0).
# noqa: E501
:return: The os_version of this Device. # noqa: E501
:rtype: string
"""
return self._os_version
@os_version.setter
def os_version(self, os_version):
"""Sets the os_version of this Device.
OS version (example: 9.3.0).
# noqa: E501
:param os_version: The os_version of this Device. # noqa: E501
:type: string
"""
if os_version is None:
raise ValueError("Invalid value for `os_version`, must not be `None`") # noqa: E501
self._os_version = os_version
@property
def os_build(self):
"""Gets the os_build of this Device. # noqa: E501
OS build code (example: LMY47X).
# noqa: E501
:return: The os_build of this Device. # noqa: E501
:rtype: string
"""
return self._os_build
@os_build.setter
def os_build(self, os_build):
"""Sets the os_build of this Device.
OS build code (example: LMY47X).
# noqa: E501
:param os_build: The os_build of this Device. # noqa: E501
:type: string
"""
self._os_build = os_build
@property
def os_api_level(self):
"""Gets the os_api_level of this Device. # noqa: E501
API level when applicable like in Android (example: 15).
# noqa: E501
:return: The os_api_level of this Device. # noqa: E501
:rtype: integer
"""
return self._os_api_level
@os_api_level.setter
def os_api_level(self, os_api_level):
"""Sets the os_api_level of this Device.
API level when applicable like in Android (example: 15).
# noqa: E501
:param os_api_level: The os_api_level of this Device. # noqa: E501
:type: integer
"""
self._os_api_level = os_api_level
@property
def locale(self):
"""Gets the locale of this Device. # noqa: E501
Language code (example: en_US).
# noqa: E501
:return: The locale of this Device. # noqa: E501
:rtype: string
"""
return self._locale
@locale.setter
def locale(self, locale):
"""Sets the locale of this Device.
Language code (example: en_US).
# noqa: E501
:param locale: The locale of this Device. # noqa: E501
:type: string
"""
if locale is None:
raise ValueError("Invalid value for `locale`, must not be `None`") # noqa: E501
self._locale = locale
@property
def time_zone_offset(self):
"""Gets the time_zone_offset of this Device. # noqa: E501
The offset in minutes from UTC for the device time zone, including daylight savings time.
# noqa: E501
:return: The time_zone_offset of this Device. # noqa: E501
:rtype: integer
"""
return self._time_zone_offset
@time_zone_offset.setter
def time_zone_offset(self, time_zone_offset):
"""Sets the time_zone_offset of this Device.
The offset in minutes from UTC for the device time zone, including daylight savings time.
# noqa: E501
:param time_zone_offset: The time_zone_offset of this Device. # noqa: E501
:type: integer
"""
if time_zone_offset is None:
raise ValueError("Invalid value for `time_zone_offset`, must not be `None`") # noqa: E501
self._time_zone_offset = time_zone_offset
@property
def screen_size(self):
"""Gets the screen_size of this Device. # noqa: E501
Screen size of the device in pixels (example: 640x480).
# noqa: E501
:return: The screen_size of this Device. # noqa: E501
:rtype: string
"""
return self._screen_size
@screen_size.setter
def screen_size(self, screen_size):
"""Sets the screen_size of this Device.
Screen size of the device in pixels (example: 640x480).
# noqa: E501
:param screen_size: The screen_size of this Device. # noqa: E501
:type: string
"""
self._screen_size = screen_size
@property
def app_version(self):
"""Gets the app_version of this Device. # noqa: E501
Application version name, e.g. 1.1.0
# noqa: E501
:return: The app_version of this Device. # noqa: E501
:rtype: string
"""
return self._app_version
@app_version.setter
def app_version(self, app_version):
"""Sets the app_version of this Device.
Application version name, e.g. 1.1.0
# noqa: E501
:param app_version: The app_version of this Device. # noqa: E501
:type: string
"""
if app_version is None:
raise ValueError("Invalid value for `app_version`, must not be `None`") # noqa: E501
self._app_version = app_version
@property
def carrier_name(self):
"""Gets the carrier_name of this Device. # noqa: E501
Carrier name (for mobile devices).
# noqa: E501
:return: The carrier_name of this Device. # noqa: E501
:rtype: string
"""
return self._carrier_name
@carrier_name.setter
def carrier_name(self, carrier_name):
"""Sets the carrier_name of this Device.
Carrier name (for mobile devices).
# noqa: E501
:param carrier_name: The carrier_name of this Device. # noqa: E501
:type: string
"""
self._carrier_name = carrier_name
@property
def carrier_code(self):
"""Gets the carrier_code of this Device. # noqa: E501
Carrier country code (for mobile devices).
# noqa: E501
:return: The carrier_code of this Device. # noqa: E501
:rtype: string
"""
return self._carrier_code
@carrier_code.setter
def carrier_code(self, carrier_code):
"""Sets the carrier_code of this Device.
Carrier country code (for mobile devices).
# noqa: E501
:param carrier_code: The carrier_code of this Device. # noqa: E501
:type: string
"""
self._carrier_code = carrier_code
@property
def carrier_country(self):
"""Gets the carrier_country of this Device. # noqa: E501
Carrier country.
# noqa: E501
:return: The carrier_country of this Device. # noqa: E501
:rtype: string
"""
return self._carrier_country
@carrier_country.setter
def carrier_country(self, carrier_country):
"""Sets the carrier_country of this Device.
Carrier country.
# noqa: E501
:param carrier_country: The carrier_country of this Device. # noqa: E501
:type: string
"""
self._carrier_country = carrier_country
@property
def app_build(self):
"""Gets the app_build of this Device. # noqa: E501
The app's build number, e.g. 42.
# noqa: E501
:return: The app_build of this Device. # noqa: E501
:rtype: string
"""
return self._app_build
@app_build.setter
def app_build(self, app_build):
"""Sets the app_build of this Device.
The app's build number, e.g. 42.
# noqa: E501
:param app_build: The app_build of this Device. # noqa: E501
:type: string
"""
if app_build is None:
raise ValueError("Invalid value for `app_build`, must not be `None`") # noqa: E501
self._app_build = app_build
@property
def app_namespace(self):
"""Gets the app_namespace of this Device. # noqa: E501
The bundle identifier, package identifier, or namespace, depending on what the individual plattforms use, .e.g com.microsoft.example.
# noqa: E501
:return: The app_namespace of this Device. # noqa: E501
:rtype: string
"""
return self._app_namespace
@app_namespace.setter
def app_namespace(self, app_namespace):
"""Sets the app_namespace of this Device.
The bundle identifier, package identifier, or namespace, depending on what the individual plattforms use, .e.g com.microsoft.example.
# noqa: E501
:param app_namespace: The app_namespace of this Device. # noqa: E501
:type: string
"""
self._app_namespace = app_namespace
@property
def live_update_release_label(self):
"""Gets the live_update_release_label of this Device. # noqa: E501
Label that is used to identify application code 'version' released via Live Update beacon running on device
# noqa: E501
:return: The live_update_release_label of this Device. # noqa: E501
:rtype: string
"""
return self._live_update_release_label
@live_update_release_label.setter
def live_update_release_label(self, live_update_release_label):
"""Sets the live_update_release_label of this Device.
Label that is used to identify application code 'version' released via Live Update beacon running on device
# noqa: E501
:param live_update_release_label: The live_update_release_label of this Device. # noqa: E501
:type: string
"""
self._live_update_release_label = live_update_release_label
@property
def live_update_deployment_key(self):
"""Gets the live_update_deployment_key of this Device. # noqa: E501
Identifier of environment that current application release belongs to, deployment key then maps to environment like Production, Staging.
# noqa: E501
:return: The live_update_deployment_key of this Device. # noqa: E501
:rtype: string
"""
return self._live_update_deployment_key
@live_update_deployment_key.setter
def live_update_deployment_key(self, live_update_deployment_key):
"""Sets the live_update_deployment_key of this Device.
Identifier of environment that current application release belongs to, deployment key then maps to environment like Production, Staging.
# noqa: E501
:param live_update_deployment_key: The live_update_deployment_key of this Device. # noqa: E501
:type: string
"""
self._live_update_deployment_key = live_update_deployment_key
@property
def live_update_package_hash(self):
"""Gets the live_update_package_hash of this Device. # noqa: E501
Hash of all files (ReactNative or Cordova) deployed to device via LiveUpdate beacon. Helps identify the Release version on device or need to download updates in future.
# noqa: E501
:return: The live_update_package_hash of this Device. # noqa: E501
:rtype: string
"""
return self._live_update_package_hash
@live_update_package_hash.setter
def live_update_package_hash(self, live_update_package_hash):
"""Sets the live_update_package_hash of this Device.
Hash of all files (ReactNative or Cordova) deployed to device via LiveUpdate beacon. Helps identify the Release version on device or need to download updates in future.
# noqa: E501
:param live_update_package_hash: The live_update_package_hash of this Device. # noqa: E501
:type: string
"""
self._live_update_package_hash = live_update_package_hash
@property
def wrapper_runtime_version(self):
"""Gets the wrapper_runtime_version of this Device. # noqa: E501
Version of the wrapper technology framework (Xamarin runtime version or ReactNative or Cordova etc...). See wrapper_sdk_name to see if this version refers to Xamarin or ReactNative or other.
# noqa: E501
:return: The wrapper_runtime_version of this Device. # noqa: E501
:rtype: string
"""
return self._wrapper_runtime_version
@wrapper_runtime_version.setter
def wrapper_runtime_version(self, wrapper_runtime_version):
"""Sets the wrapper_runtime_version of this Device.
Version of the wrapper technology framework (Xamarin runtime version or ReactNative or Cordova etc...). See wrapper_sdk_name to see if this version refers to Xamarin or ReactNative or other.
# noqa: E501
:param wrapper_runtime_version: The wrapper_runtime_version of this Device. # noqa: E501
:type: string
"""
self._wrapper_runtime_version = wrapper_runtime_version
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Device):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 32.21895
| 505
| 0.632953
| 24,862
| 0.988038
| 0
| 0
| 17,863
| 0.709892
| 0
| 0
| 14,871
| 0.590987
|
6707b1d92879723bb590b117c8481d4a309bdf74
| 5,591
|
py
|
Python
|
src/providers/snmp.py
|
tcuthbert/napi
|
12ea1a4fb1075749b40b2d93c3d4ab7fb75db8b5
|
[
"MIT"
] | null | null | null |
src/providers/snmp.py
|
tcuthbert/napi
|
12ea1a4fb1075749b40b2d93c3d4ab7fb75db8b5
|
[
"MIT"
] | null | null | null |
src/providers/snmp.py
|
tcuthbert/napi
|
12ea1a4fb1075749b40b2d93c3d4ab7fb75db8b5
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# author : Thomas Cuthbert
import os, sys
from providers.provider import Provider
from config.config import Config
sys.path.append('../')
def _reverse_dict(d):
ret = {}
for key, val in d.items():
if ret.has_key(val):
ret[val].append(key)
else:
ret[val] = [key]
return ret
def _parse_routes(routing_table):
ret = {}
for key, value in routing_table.items():
ret[key] = {}
routes = [i.split('.') for i in value]
for index, route in enumerate(routes):
subnet = ".".join(route[0:4])
ret[key][subnet] = {
"mask": ".".join(route[4:8]),
"next_hop": ".".join(route[9:])
}
return ret
def _strip_oid_from_list(oids, strip):
"""Iterates through list of oids and strips snmp tree off index.
Returns sorted list of indexes.
Keyword Arguments:
self --
oid -- Regular numeric oid index
strip -- Value to be stripped off index
"""
sorted_oids = []
for index in oids:
s = index[0].replace(strip, "")
sorted_oids.append((s, index[1]))
return sorted(sorted_oids)
def _get_snmp(oid, hostname, community):
"""SNMP Wrapper function. Returns tuple of oid, value
Keyword Arguments:
oid --
community --
"""
from pysnmp.entity.rfc3413.oneliner import cmdgen
cmd_gen = cmdgen.CommandGenerator()
error_indication, error_status, error_index, var_bind = cmd_gen.getCmd(
cmdgen.CommunityData(community),
cmdgen.UdpTransportTarget((hostname, 161)),
oid)
if error_indication:
print(error_indication)
else:
if error_status:
print ('%s at %s' % (
error_status.prettyPrint(),
error_index and var_bind[int(error_index)-1] or '?')
)
else:
for name, value in var_bind:
return (name.prettyPrint(), value.prettyPrint())
def _walk_snmp(oid, hostname, community):
"""SNMP getNext generator method. Yields each index to caller.
Keyword Arguments:
oid --
community --
"""
from pysnmp.entity.rfc3413.oneliner import cmdgen
cmd_gen = cmdgen.CommandGenerator()
error_indication, error_status, error_index, var_bind_table = cmd_gen.nextCmd(
cmdgen.CommunityData(community),
cmdgen.UdpTransportTarget((hostname, 161)),
oid)
if error_indication:
print(error_indication)
else:
if error_status:
print ('%s at %s' % (
error_status.prettyPrint(),
error_index and var_bind_table[int(error_index)-1] or '?')
)
else:
for var_bind_row in var_bind_table:
for name, val in var_bind_row:
yield name.prettyPrint(), val.prettyPrint()
class SNMP(Provider):
"""docstring"""
def __init__(self, *args, **kwargs):
"docstring"
self.snmp_params = Config.config_section_map("SNMP_PARAMS")
self.snmp_oids = Config.config_section_map("OIDS")
super(SNMP, self).__init__(*args, **kwargs)
def __resolve_community_string(self):
if self._device.device_type == "core":
return self.snmp_params["community_core"]
else:
return self.snmp_params["community_remote"]
def walk_tree_from_oid(self, oid):
"""Walks SNMP tree from rooted at oid.
Oid must exist in the netlib configuration file else an exception is raised.
:type oid: string
:param oid: An SNMP oid index
"""
try:
index = self.snmp_oids[oid]
except KeyError as e:
#TODO: Logging
print "oid not present in config file"
raise e
return dict(_strip_oid_from_list(list(_walk_snmp(index, self._device.hostname, self.__resolve_community_string())), index + "."))
def __get_ipcidrrouteifindex(self):
"""Get routing table for use by Layer 3 object.
This method gets the ipcidrrouteifindex routing table.
"""
return self.walk_tree_from_oid("ipcidrrouteifindex")
def _build_layer3_prop_routing_table(self):
"Build routing table from device"
return _parse_routes(_reverse_dict(self.__get_ipcidrrouteifindex()))
def _build_layer2_prop_cam_table(self):
"Build cam table from device"
return "ff-ff-ff-ff"
def _build_device_prop_interfaces(self):
intfs = self.__get_index("ifname")
for key, val in intfs.items():
# intfs[key] = [intfs[key], self.__get_index("ifdesc")[key], self.__get_index("ifspeed")[key]]
intfs[key] = {
"intf_name": intfs[key],
"intf_desc": self.__get_index("ifdesc")[key],
"intf_speed": self.__get_index("ifspeed")[key]
}
return intfs
def _wrapper_layer3_device_prop_interfaces(self, func):
res = func()
res.update({
"0": {"intf_name": "INTERNAL"}
})
for key, value in _reverse_dict(self.walk_tree_from_oid("ipaddressifindex")).items():
res[key].update({"intf_ip": value.pop()})
return res
def __get_index(self, index):
"Gather interfaces for upstream device."
oid = self.snmp_oids[index]
hostname = self._device.hostname
return dict(_strip_oid_from_list(list(_walk_snmp(oid, hostname, self.__resolve_community_string())), oid + "."))
| 31.587571
| 137
| 0.603291
| 2,652
| 0.474334
| 893
| 0.159721
| 0
| 0
| 0
| 0
| 1,411
| 0.25237
|