blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7c37487b442fdebece43b54771829b25f77308e7
|
1a25f1e9454227a5a3abff901fd2fda3fc5a7903
|
/nodes/strip_chart.py
|
355a770db8f8282d40ca37828a667a8bc4d8315e
|
[] |
no_license
|
lyshds/muscle_imager
|
7155546bc04396c01de03e38502ecb5eb7e77e96
|
66c52a7808b6cdb5641c2173913cfe70a9850acb
|
refs/heads/master
| 2020-09-27T05:22:39.719641
| 2017-05-20T04:32:43
| 2017-05-20T04:32:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,785
|
py
|
#!/usr/bin/env python
import pyqtgraph as pg
from pyqtgraph.Qt import QtCore, QtGui#QStringList,QString
import rospy
import rosparam
import rospkg
from muscle_imager.msg import MsgExtractedSignal
from phidgets_daq.msg import phidgetsDAQinterpreted
from Kinefly.msg import MsgFlystate
from std_msgs.msg import Header, String
import numpy as np
import os
from cv_bridge import CvBridge, CvBridgeError
sizeImage = 128+1024*1024 # Size of header + data.
qt_tick_freq = 5
app = pg.mkQApp()
## Define main window class from template
path = os.path.dirname(os.path.abspath(__file__))
uiFile = os.path.join(path, 'strip_chart.ui')
WindowTemplate, TemplateBaseClass = pg.Qt.loadUiType(uiFile)
#from muscle_imager import muscle_model as mm
import muscle_model as mm
default_rframe_data = {'a1': np.array([ 51.5848967 , -5.93928407]),
'a2': np.array([ -0.09151179, 88.42505672]),
'p': np.array([ 26.66908747, 34.43488385])}
def toNumpyND(np_ndarray):
msg = MsgArrayNumpyND()
msg.shape = np.array(np_ndarray.shape).astype(int)
msg.data = np.ravel(np_ndarray).astype(np.float64)
return msg
class MainWindow(TemplateBaseClass):
def __init__(self):
TemplateBaseClass.__init__(self)
self.setWindowTitle('strip chart browser')
# Create the main window
self.ui = WindowTemplate()
#initialize the items created in designer
self.ui.setupUi(self)
#do some ros introspection
rp = rospkg.RosPack()
self.package_path = rp.get_path('muscle_imager')
self.model_path = os.path.join(self.package_path,'models')
self.buffer_duration_sec = 30.0 #sec
#Set up subscribers and buffers for the
#signals coming in from the unmixer
self.muscle_plots = dict()
self.muscle_curves = dict()
self.muscle_buffers = dict()
self.muscle_subcribers = dict()
self.muscle_update_period_sec = 30.0/1000#30ms
self.muscle_buffer_samples = int(self.buffer_duration_sec/self.muscle_update_period_sec)
for muscle in ['b1','b2','b3','i1','i2','iii1','iii3','iii24','hg1','hg2','hg3','hg4']:
self.muscle_plots[muscle] = pg.PlotItem()
for side in ['left','right']:
self.muscle_buffers[(side,muscle)] = [np.arange(self.muscle_buffer_samples,dtype = float),
np.ones(self.muscle_buffer_samples,dtype = float)]
self.ui.__dict__[muscle].setCentralItem(self.muscle_plots[muscle])
c = {'left':'w','right':'r'}[side]
self.muscle_curves[(side,muscle)] = self.muscle_plots[muscle].plot(self.muscle_buffers[(side,muscle)][0],
self.muscle_buffers[(side,muscle)][1],
pen = c)
if side == 'left':
self.muscle_subcribers[(side,muscle)] = rospy.Subscriber('/unmixer_%s/%s'%(side,muscle),
MsgExtractedSignal,
self.muscle_signal_callback_left,
queue_size=None,
buff_size=2*sizeImage,
tcp_nodelay=True)
elif side == 'right':
self.muscle_subcribers[(side,muscle)] = rospy.Subscriber('/unmixer_%s/%s'%(side,muscle),
MsgExtractedSignal,
self.muscle_signal_callback_right,
queue_size=None,
buff_size=2*sizeImage,
tcp_nodelay=True)
#Signals coming in from the daq
self.daq_buffers = dict()
self.daq_plots = dict()
self.daq_curves = dict()
self.daq_subscribers = dict()
self.daq_update_period_sec = rospy.get_param('/phidgets_daq/update_rate_ms')/1000.0
self.daq_buffer_samples = int(self.buffer_duration_sec/self.daq_update_period_sec)
self.daq_buffers['freq'] = [np.arange(self.daq_buffer_samples,dtype = float),
np.ones(self.daq_buffer_samples,dtype = float)]
self.daq_plots['freq'] = pg.PlotItem()
self.ui.freq.setCentralItem(self.daq_plots['freq'])
self.daq_curves['freq'] = self.daq_plots['freq'].plot(self.daq_buffers['freq'][0],
self.daq_buffers['freq'][1])
self.daq_subscribers['freq'] = rospy.Subscriber('/phidgets_daq/freq',
phidgetsDAQinterpreted,
self.daq_signal_callback,
queue_size=None,
buff_size=1000,
tcp_nodelay=True)
#Signals produced by kinefly
self.kinefly_buffers = dict()
self.kinefly_plots = dict()
self.kinefly_subscribers = dict()
self.kinefly_curves = dict()
self.kfly_update_period_sec = 15.0/1000.0
self.kfly_buffer_samples = int(self.buffer_duration_sec/self.kfly_update_period_sec)
self.kinefly_buffers['lmr'] = [np.arange(self.kfly_buffer_samples,dtype = float),
np.ones(self.kfly_buffer_samples,dtype = float)]
self.kinefly_plots['lmr'] = pg.PlotItem()
self.kinefly_curves['lmr'] = self.kinefly_plots['lmr'].plot(self.kinefly_buffers['lmr'][0],
self.kinefly_buffers['lmr'][1])
self.ui.lmr.setCentralItem(self.kinefly_plots['lmr'])
self.kinefly_subscribers['flystate'] = rospy.Subscriber('/kinefly/flystate',
MsgFlystate,
self.kinefly_signal_callback,
queue_size=None,
buff_size=1000,
tcp_nodelay=True)
self.lock = False
rospy.init_node('strip_chart')
#update the gui with a Qt timer
self.timer = QtCore.QTimer()
self.timer.timeout.connect(self.qt_tick)
self.timer.start(qt_tick_freq)
self.show()
def daq_signal_callback(self,msg):
if not(self.lock):
self.daq_buffers['freq'][0] = np.roll(self.daq_buffers['freq'][0],-1)
self.daq_buffers['freq'][1] = np.roll(self.daq_buffers['freq'][1],-1)
self.daq_buffers['freq'][0][-1] = msg.time
self.daq_buffers['freq'][1][-1] = msg.value
def kinefly_signal_callback(self,msg):
if not(self.lock):
self.kinefly_buffers['lmr'][0] = np.roll(self.kinefly_buffers['lmr'][0],-1)
self.kinefly_buffers['lmr'][1] = np.roll(self.kinefly_buffers['lmr'][1],-1)
self.kinefly_buffers['lmr'][0][-1] = msg.header.stamp.to_sec()
if ((len(msg.left.angles) >0) and (len(msg.right.angles) >0)):
self.kinefly_buffers['lmr'][1][-1] = msg.left.angles[0] - msg.right.angles[0]
else:
self.kinefly_buffers['lmr'][1][-1] = np.nan
def muscle_signal_callback_left(self,msg):
"""recive a MsgExtractedSignal message from the left unmixer, msg"""
if not(self.lock):
self.muscle_buffers[('left',msg.muscle)][0] = np.roll(self.muscle_buffers[('left',msg.muscle)][0],-1)
self.muscle_buffers[('left',msg.muscle)][1] = np.roll(self.muscle_buffers[('left',msg.muscle)][1],-1)
self.muscle_buffers[('left',msg.muscle)][0][-1] = msg.header.stamp.to_sec()
self.muscle_buffers[('left',msg.muscle)][1][-1] = msg.value
def muscle_signal_callback_right(self,msg):
"""recive a MsgExtractedSignal message from the right unmixer, msg"""
if not(self.lock):
self.muscle_buffers[('right',msg.muscle)][0] = np.roll(self.muscle_buffers[('right',msg.muscle)][0],-1)
self.muscle_buffers[('right',msg.muscle)][1] = np.roll(self.muscle_buffers[('right',msg.muscle)][1],-1)
self.muscle_buffers[('right',msg.muscle)][0][-1] = msg.header.stamp.to_sec()
self.muscle_buffers[('right',msg.muscle)][1][-1] = msg.value
def qt_tick(self):
self.lock = True
"""handle a qt timer tick"""
for side,muscle in self.muscle_curves.keys():
self.muscle_curves[(side,muscle)].setData(self.muscle_buffers[(side,muscle)][0]-\
self.muscle_buffers[(side,muscle)][0][0],
self.muscle_buffers[(side,muscle)][1])
self.daq_curves['freq'].setData((self.daq_buffers['freq'][0]-
self.daq_buffers['freq'][0][0]),
self.daq_buffers['freq'][1])
self.kinefly_curves['lmr'].setData((self.kinefly_buffers['lmr'][0]-
self.kinefly_buffers['lmr'][0][0]),
self.kinefly_buffers['lmr'][1])
self.lock = False
app.processEvents()
win = MainWindow()
## Start Qt event loop unless running in interactive mode or using pyside.
if __name__ == '__main__':
import sys
if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'):
QtGui.QApplication.instance().exec_()
#fly_db.close()
|
[
"thlindsay1@gmail.com"
] |
thlindsay1@gmail.com
|
748c79d00d0dbb0cc6f31fe9dd59cd7d07273eb3
|
120383e3d0ed2ca32b637b54084fb7b6c38b649c
|
/cpm/tasks/models.py
|
a899143466b4bf9f5aa66ef402bae949dd8e048c
|
[] |
no_license
|
sacrac/django-cpm
|
781c0373eb66d9cb8bf55c0072327c5c09a09a28
|
5aff06bf03608e6ba62a6279b1907efbd6d09b9e
|
refs/heads/master
| 2021-01-15T20:48:03.273292
| 2013-08-31T21:38:06
| 2013-08-31T21:38:06
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,844
|
py
|
from django.db import models
from django.core.urlresolvers import reverse
from django.utils.http import urlquote
from django.utils.timesince import timesince, timeuntil
from django.utils.translation import ugettext_lazy as _
from core.models import Slugged, base_concrete_model, DateStamp
from projects.models import Project
import reversion
def get_sentinel_category():
return TaskCategory.objects.get_or_create(title='Deleted')[0]
class Task(Slugged):
project = models.ForeignKey(Project)
category = models.ForeignKey('TaskCategory', blank=True, null=True, on_delete=models.SET(get_sentinel_category))
projected_completion_date = models.DateField(_("Projected Completion Date"),
blank=True, null=True)
completion_date = models.DateField(_("Actual Completion Date"),
blank=True, null=True)
description = models.TextField(blank=True)
expense = models.IntegerField(blank=True)
price = models.IntegerField(blank=True, verbose_name=_('Markup'))
class Meta:
order_with_respect_to = 'project'
def get_absolute_url(self):
return reverse('tasks:task-detail', kwargs={'pk': self.pk})
def get_update_url(self):
return reverse('tasks:task-update', kwargs={'pk': self.pk})
def due_date_until(self):
if self.projected_completion_date:
return timeuntil(self.projected_completion_date)
def due_date_since(self):
if self.projected_completion_date:
return timesince(self.projected_completion_date)
def get_status(self):
if self.project.start_time:
if self.completion_date:
result = 2
else:
result = 1
else:
result = 0
return result
def get_project_category_totals(self):
result_dict = {}
all_categories = TaskCategory.objects.all()
all_tasks = Task.objects.filter(project=self.project)
all_categories = all_categories.order_by('order')
for cat in all_categories:
cat_tasks = all_tasks.filter(category=cat)
if cat_tasks:
cat_exp_total = sum(cat_tasks.values_list('expense', flat=True))
cat_price_total = sum(cat_tasks.values_list('price', flat=True))
result_dict[cat.slug] = {
'id': cat.id,
'title': cat.title,
'expense': cat_exp_total,
'price': cat_price_total,
'total': sum([cat_exp_total, cat_price_total]),
'tasks': cat_tasks
}
return result_dict
due_date_since.short_description = _("Late by")
due_date_until.short_description = _("Due in")
reversion.register(Task)
class TaskCategory(Slugged):
parent = models.ForeignKey("TaskCategory", blank=True, null=True,
related_name="children", on_delete=models.SET_NULL)
ascendants = models.CharField(editable=False, max_length=100, null=True)
order = models.IntegerField(blank=True, null=True)
description = models.TextField(blank=True)
class Meta:
ordering = ('_order', 'order', 'ascendants')
order_with_respect_to = 'parent'
def save(self, *args, **kwargs):
if self.parent is None:
self._order = self.order
if self.ascendants:
if not self.id in [int(ascendant) for ascendant in self.ascendants.split(',')[:-1]]:
if self.update_descendants():
super(TaskCategory, self).save(*args, **kwargs)
else:
print 'error: self id in ascendants'
else:
super(TaskCategory, self).save(*args, **kwargs)
self.update_descendants()
def update_descendants(self):
current_ascendants = self.ascendants
print 'current: ' + str(current_ascendants)
ascendants = [str(self.id)]
parent = self.parent
while parent is not None and parent is not self:
ascendants.insert(0, str(parent.id))
if parent.parent:
parent = parent.parent
else:
#the while condition will set parent to None and we cant validate it so we end the loop before this
#while the parent is not None
break
if parent == self:
break
if parent != self or parent is None:
print 'parent safe'
ascendants = ",".join(ascendants)
self.ascendants = ascendants
if ascendants != current_ascendants or ascendants is None:
super(TaskCategory, self).save(update_fields=['ascendants'])
print 'new : ' + str(self.ascendants)
children = self.children.all()
if children:
for child in children:
child.update_descendants()
return True
else:
return False
def get_update_url(self):
return reverse('tasks:task-category-update', kwargs={'pk': self.pk})
def get_project_category_price(self, project):
total = 0
for p in project.task_set.filter(category=self):
total += p.price
return total
def get_project_category_expense(self, project):
total = 0
for p in project.task_set.filter(category=self):
total += p.expense
return total
reversion.register(TaskCategory, follow=['task_set'], exclude=["created, modified"])
class CategoryBundle(Slugged):
categories = models.ManyToManyField(TaskCategory, null=True, blank=True, related_name='bundles')
def get_update_url(self):
return reverse('tasks:bundle-update', kwargs={'pk': self.pk})
|
[
"will@webpowerlabs.com"
] |
will@webpowerlabs.com
|
19c2e01da48ca921b8de69497ea25ed9c31f8ef2
|
1fe8d4133981e53e88abf633046060b56fae883e
|
/venv/lib/python3.8/site-packages/tensorflow_estimator/python/estimator/tools/checkpoint_converter 2.py
|
ae2bdfdfb98ef173118485a0bc3347b669d692ff
|
[] |
no_license
|
Akira331/flask-cifar10
|
6c49db8485038731ce67d23f0972b9574746c7a7
|
283e7a2867c77d4b6aba7aea9013bf241d35d76c
|
refs/heads/master
| 2023-06-14T16:35:06.384755
| 2021-07-05T14:09:15
| 2021-07-05T14:09:15
| 382,864,970
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 130
|
py
|
version https://git-lfs.github.com/spec/v1
oid sha256:8fa8fb8a28f4e0579f76bf4c69c422094fc5b48a2ceb3947d71dc3fd7baa9304
size 15165
|
[
"business030301@gmail.com"
] |
business030301@gmail.com
|
1655c1d3923c8fc1b564ad7b964b0bba8bdfb5d4
|
ac5e52a3fc52dde58d208746cddabef2e378119e
|
/exps-gsn-edf.0/gsn-edf_ut=3.5_rd=0.5_rw=0.04_rn=4_u=0.075-0.325_p=harmonic-2/sched=RUN_trial=4/params.py
|
179d934c3074c090dc131dbc794033281d0e7cef
|
[] |
no_license
|
ricardobtxr/experiment-scripts
|
1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1
|
7bcebff7ac2f2822423f211f1162cd017a18babb
|
refs/heads/master
| 2023-04-09T02:37:41.466794
| 2021-04-25T03:27:16
| 2021-04-25T03:27:16
| 358,926,457
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 253
|
py
|
{'cpus': 4,
'duration': 30,
'final_util': '3.522286',
'max_util': '3.5',
'periods': 'harmonic-2',
'release_master': False,
'res_distr': '0.5',
'res_nmb': '4',
'res_weight': '0.04',
'scheduler': 'GSN-EDF',
'trial': 4,
'utils': 'uni-medium-3'}
|
[
"ricardo.btxr@gmail.com"
] |
ricardo.btxr@gmail.com
|
9bc0748ad060a2c2c9643d3b9b3dca68e9b283ce
|
3f4a5a0a08bf04f30980926b247e2513ff1616a2
|
/删除列表重复的项.py
|
84271590f194695d40202b1da68ade8762fb6e57
|
[] |
no_license
|
king-huoye/leetcode-with-python
|
4c43f85cde7a8de838749d69bd84da495ed85f71
|
b84309c55df7476ca3ba58b915da02f35a6ba719
|
refs/heads/master
| 2022-12-06T09:11:53.743470
| 2020-09-02T12:53:39
| 2020-09-02T12:53:39
| 292,276,224
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 339
|
py
|
class List(object):
pass
class Solution:
def removeDuplicates(self, nums: List[int]) -> int:
a = set(nums) # 变成集合
nums.clear() # 清空列表
a = list(a) # 集合变列表
nums.extend(a) # 将集合添加到列表
nums.sort()
return len(nums)
|
[
"noreply@github.com"
] |
king-huoye.noreply@github.com
|
28d1d8724bec68d18128ae14f995f71a73966766
|
5373c570897992986b18c1f76b6a2e1482b9599d
|
/Exercícios/ex046ContagemRegresiva.py
|
6f57ac9aaa45a94abeae4dd4ecbee755e5fb0380
|
[
"MIT"
] |
permissive
|
CarolineSantosAlves/Exercicios-Python
|
37ebe049919608ad6cf7f2558734f2f9b16082a1
|
3bbdc1fd996948142951e043175efd9d8a6c34f2
|
refs/heads/master
| 2022-09-08T20:29:26.375417
| 2020-05-30T23:44:04
| 2020-05-30T23:44:04
| 268,178,149
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 144
|
py
|
from time import sleep
import emoji
for c in range (10, -1, -1):
print(c)
sleep(1)
print(emoji.emojize(':collision:', use_aliases=True))
|
[
"caroline.santos.wb@gmail.com"
] |
caroline.santos.wb@gmail.com
|
aeaf756b0555b08916480ad2a0aa828e910f0ae6
|
2de9c11528bf468612f83b845c02d570f8d271e0
|
/geese/controller/__init__.py
|
3ba55d9231c7646c6780fa7f263b7690f4bb8d9d
|
[] |
no_license
|
waderaku/hungry_geese_for_kaggle
|
4e7cfe2010820ecbab05cdbcadd8395995b4f932
|
811b76bf47571adc1aa0d0987c02b72bca4789ac
|
refs/heads/master
| 2023-06-07T14:20:43.483612
| 2021-07-10T06:54:38
| 2021-07-10T06:54:38
| 369,221,676
| 0
| 0
| null | 2021-07-10T06:54:39
| 2021-05-20T13:41:36
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 109
|
py
|
from geese.controller.controller import Controller
from geese.controller.ppo_controller import PPOController
|
[
"yosemat.beta@gmail.com"
] |
yosemat.beta@gmail.com
|
22a8669b42f146da8f8d47e2f3732c7bc96e8e70
|
d28356a4b01b70377d26d13475385a4df26a8d96
|
/onlinelab/service/processes.py
|
8e79a0cdd9813f39b7c97576133f2efd6fd06273
|
[] |
no_license
|
volterra-luo/femhub-online-lab
|
c5794561993cb62406c7f52ceba9d9499350bd4b
|
29aa77847d365bef6702796e54c6355f4b040909
|
refs/heads/master
| 2021-01-20T23:48:00.110811
| 2010-10-19T21:42:17
| 2010-10-19T21:43:57
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 18,996
|
py
|
"""Engine process manager for Online Lab services. """
import os
import re
import time
import signal
import shutil
import logging
import xmlrpclib
import functools
import subprocess
import collections
import psutil
import pyinotify
import tornado.ioloop
import tornado.httpclient
import utilities
from ..utils import settings
class UIDSpaceExhausted(Exception):
"""Raised when no more spare UIDs were left. """
class ProcessManager(object):
"""Start and manage system processes for engines. """
_re = re.compile("^.*?port=(?P<port>\d+), pid=(?P<pid>\d+)")
_timeout = 20 # XXX: put this into config file
_inotify_mask = pyinotify.IN_CREATE \
| pyinotify.IN_MODIFY \
| pyinotify.IN_DELETE
def __init__(self):
self.ioloop = tornado.ioloop.IOLoop.instance()
self.settings = settings.Settings.instance()
self.processes = {}
self.watches = pyinotify.WatchManager()
self.notifier = pyinotify.Notifier(self.watches, timeout=0)
self.ioloop.add_handler(self.watches.get_fd(),
self._on_inotify, self.ioloop.READ)
mask = self._inotify_mask
self.watches.add_watch(self.settings.data_path, mask,
self._process_events, rec=True, auto_add=True)
self.uid_map = [False]*self.settings.uid_max
def _on_inotify(self, fd, events):
"""Get executed when new inotify's events arrive. """
while self.notifier.check_events():
self.notifier.read_events()
self.notifier.process_events()
def _process_events(self, event):
if event.dir:
return
user = self.settings.data_path
path = event.pathname
parts = []
while path != user:
path, part = os.path.split(path)
parts.insert(0, part)
if len(parts) < 2:
return
uuid = parts[0]
try:
process = self.processes[uuid]
except KeyError:
return
if not (process and process.is_evaluating):
return
file = os.path.join(*parts[1:])
if event.mask & (pyinotify.IN_CREATE | pyinotify.IN_MODIFY):
self.processes[uuid].add_file(file)
else:
self.processes[uuid].rm_file(file)
logging.info("Processed inotify event for '%s' (file='%s')" % (uuid, file))
@classmethod
def instance(cls):
"""Returns the global :class:`ProcessManager` instance. """
if not hasattr(cls, '_instance'):
cls._instance = cls()
return cls._instance
def build_env(self):
"""Build hardened environment for engine process. """
if self.settings.environ is True:
env = dict(os.environ)
else:
env = {}
for key, value in self.settings.environ.iteritems():
if value is True:
try:
value = os.environ[key]
except KeyError:
continue
env[key] = value
PYTHONPATH = self.settings.get_PYTHONPATH()
try:
path = env['PYTHONPATH']
except KeyError:
try:
path = os.environ['PYTHONPATH']
except KeyError:
path = None
if path:
PYTHONPATH += os.pathsep + path
env['PYTHONPATH'] = PYTHONPATH
return env
def alloc_uid_gid(self):
"""Find a spare UID and GID for a new process. """
uid_min = self.settings.uid_min
uid_max = self.settings.uid_max
for uid in xrange(uid_min, uid_max+1):
if not self.uid_map[uid]:
self.uid_map[uid] = True
break
else:
raise UIDSpaceExhausted
return uid, uid
def purge_uid_gid(self, uid, gid):
"""Return UID and GID to the pool for reuse. """
if uid is not None:
self.uid_map[uid] = False
def _run(self, uuid, args, okay, fail):
"""Take engine's configuration and start process for it. """
self.processes[uuid] = None
# XXX: this is temporary solution for development convenience
try:
command = args.command
except AttributeError:
from engine.python import boot
command = ["python", "-c", "%s" % boot]
env = self.build_env()
# Create a directory for a process that we will spawn in a moment. If
# it already exists, make sure it is empty (just remove it and create
# once again).
cwd = os.path.join(self.settings.data_path, uuid)
if os.path.exists(cwd):
shutil.rmtree(cwd)
os.mkdir(cwd)
# As we know the home directory for our engine, lets now hack Python's
# site.py and tell it where is should look for extra modules (.local)
# and make some other modules happy (e.g. matplotlib).
env['HOME'] = env['PYTHONUSERBASE'] = cwd
# In production environments we have to run every Online Lab user as
# a different system user on a particular machine where a service is
# running. For simplicity we run every process as a different user.
# This gives us about 60 thousandth unique resources to be bind with
# system processes. This should be way more than enough on any kind
# of hardware we will use, however, on a modern Linux systems, this
# limit can be pushed to several million, at least, so if we imagine
# running a million of processes, it should be possible.
preexec_fn = None
if not self.settings.setuid:
uid, gid = None, None
else:
uid, gid = self.alloc_uid_gid()
try:
os.chown(cwd, uid, gid)
except OSError:
logging.warning("Not enough privileges to set permissions (am I root?)")
self.purge_uid_gid(uid, gid)
uid, gid = None, None
else:
def preexec_fn():
os.setgid(gid)
os.setuid(uid)
# Lets start the engine's process. We must close all non-standard file
# descriptors (via 'close_fds'), because otherwise IOLoop will hang.
# When the process will be ready to handle requests from the core, it
# will tell us this by sending a single line of well formatted output
# (containing port numer and PID) via a pipe.
proc = subprocess.Popen(command, preexec_fn=preexec_fn,
cwd=cwd, env=env, close_fds=True, stdout=subprocess.PIPE)
# File descriptor of the pipe (fd) is our connector the process, so
# we will monitor this descriptor to see the change in status of the
# process (ready for processing requests, unexpected death).
fd = proc.stdout.fileno()
params = uuid, proc, uid, gid, cwd, okay, fail
timeout = functools.partial(self._on_run_timeout, uuid, params, fd)
tm = self.ioloop.add_timeout(time.time() + self._timeout, timeout)
handler = functools.partial(self._on_run_handler, params, tm)
self.ioloop.add_handler(fd, handler, self.ioloop.READ | self.ioloop.ERROR)
def cleanup(self, uuid, cwd, uid, gid):
"""Removed all data allocated for a process. """
self.purge_uid_gid(uid, gid)
del self.processes[uuid]
shutil.rmtree(cwd)
def _on_run_timeout(self, (uuid, proc, uid, gid, cwd, okay, fail), fd):
"""Hard deadline on engine's process startup (start or die). """
self.ioloop.remove_handler(fd)
# The process is running but takes too much time to start, e.g.
# a deadlock occurred or whatever else. We don't know, so what
# we can do is to remove process entry, kill the process and
# gracefully fail. If engines are properly configured, then
# this handler shouldn't be executed at all, unless e.g. we
# are running out of memory.
self.cleanup(uuid, cwd, uid, gid)
proc.kill()
proc.poll()
fail('timeout')
def _on_run_handler(self, (uuid, proc, uid, gid, cwd, okay, fail), tm, fd, events):
"""Startup handler that gets executed on pipe write or error. """
self.ioloop.remove_timeout(tm)
self.ioloop.remove_handler(fd)
if events & self.ioloop.ERROR:
logging.error("Newly created process died expectingly")
self.cleanup(uuid, cwd, uid, gid)
fail('died')
else:
# Connection was established, so lets get first output line
# and check if it contains valid data (socket port numer and
# process identifier).
output = proc.stdout.readline()
result = self._re.match(output)
if result is not None:
port = int(result.groupdict()['port'])
process = EngineProcess(uuid, proc, cwd, port)
self.processes[uuid] = process
handler = functools.partial(self._on_disconnect, uuid, cwd, uid, gid)
self.ioloop.add_handler(fd, handler, self.ioloop.ERROR)
logging.info("Started new child process (pid=%s)" % process.pid)
okay('started')
else:
# We got invalid data from the engine process, so lets
# clean up (remove process entry marker and kill the
# process) and gracefully fail.
logging.error("Newly created process didn't respond properly")
self.cleanup(uuid, cwd, uid, gid)
proc.kill()
proc.poll()
fail('invalid-output')
def _on_disconnect(self, uuid, cwd, uid, gid, fd, events):
"""Handler that gets executed when a process dies. """
self.ioloop.remove_handler(fd)
try:
process = self.processes[uuid]
except KeyError:
# We don't want to pass 'fd' everywhere so we don't
# remove this handler on process kill. We remove it
# here anyway.
return
logging.warning("Child process disconnected (pid=%s)" % process.pid)
# The pipe that connects this service to some engine's stdout was
# destroyed. Most likely engine's process was killed, but for the
# sake of completeness (to avoid dead process and memory leaks)
# lets make sure the process is really dead.
if process.is_running:
process.proc.kill() # XXX: we should use public API for this
# 'False' value tells us that this process was running but was killed
# unexpectedly. The next engine method invocation will take advantage
# of this and tell the caller that the process died (we can't do it
# here because we can't initiate communication with the core).
self.cleanup(uuid, cwd, uid, gid)
self.processes[uuid] = False
def init(self, uuid, args, okay, fail):
"""Initialize new engine (start a process). """
if uuid in self.processes:
if self.processes[uuid] is None:
fail('starting')
else:
fail('running')
else:
self._run(uuid, args, okay, fail)
def _get_process(self, uuid, fail):
if uuid not in self.processes:
fail('no-such-process')
else:
process = self.processes[uuid]
if process is None:
fail('starting')
elif process is False:
del self.processes[uuid]
fail('died')
else:
return process
def kill(self, uuid, args, okay, fail):
"""Stop an existing engine (kill a process). """
process = self._get_process(uuid, fail)
if process is not None:
process.kill(args, okay, fail)
# XXX: call cleanup() here
del self.processes[uuid]
def stat(self, uuid, args, okay, fail):
"""Gather data about a process. """
process = self._get_process(uuid, fail)
if process is not None:
process.stat(args, okay, fail)
def complete(self, uuid, args, okay, fail):
"""Complete a piece of source code. """
process = self._get_process(uuid, fail)
if process is not None:
process.complete(args, okay, fail)
def evaluate(self, uuid, args, okay, fail):
"""Evaluate a piece of source code. """
process = self._get_process(uuid, fail)
if process is not None:
process.evaluate(args, okay, fail)
def interrupt(self, uuid, args, okay, fail):
"""Stop evaluation of specified requests. """
process = self._get_process(uuid, fail)
if process is not None:
process.interrupt(args, okay, fail)
def killall(self):
"""Forcibly kill all processes that belong to this manager. """
for uuid, process in self.processes.iteritems():
if process is not None:
logging.warning("Forced kill of %s (pid=%s)" % (uuid, process.pid))
process.proc.kill()
process.proc.poll()
class EngineProcess(object):
"""Bridge between a logical engine and a physical process. """
def __init__(self, uuid, proc, path, port):
"""Initialize an engine based on existing system process. """
self.uuid = uuid
self.proc = proc
self.port = port
self.path = path
self.util = psutil.Process(proc.pid)
self.evaluating = False
self.queue = collections.deque()
self.url = "http://localhost:%s" % port
self.files = []
@property
def pid(self):
return self.proc.pid
@property
def is_running(self):
return self.proc.poll() is None
@property
def is_evaluating(self):
return self.evaluating
def add_file(self, file):
"""Register a new or modified file. """
self.rm_file(file)
self.files.append(file)
def rm_file(self, file):
"""Remove file from registered files. """
try:
i = self.files.index(file)
except ValueError:
pass
else:
del self.files[i]
def kill(self, args, okay, fail):
"""Terminate this engine's process. """
# XXX: clear the queue?
self.proc.terminate()
self.proc.poll()
okay('killed')
def stat(self, args, okay, fail):
"""Gather data about this engine's process. """
cpu_percent = self.util.get_cpu_percent()
cpu_times = self.util.get_cpu_times()
memory_percent = self.util.get_memory_percent()
memory_info = self.util.get_memory_info()
user, system = cpu_times
rss, vms = memory_info
okay({
'cpu': { 'percent': cpu_percent, 'user': user, 'system': system },
'memory': { 'percent': memory_percent, 'rss': rss, 'vms': vms },
})
def complete(self, args, okay, fail):
"""Complete code in this engine's process. """
if self.evaluating:
fail('busy')
else:
self._schedule(args, okay, fail)
self._evaluate(method='complete')
def evaluate(self, args, okay, fail):
"""Evaluate code in this engine's process. """
self._schedule(args, okay, fail)
self._evaluate()
def interrupt(self, args, okay, fail):
"""Stop evaluation of a particular request or all requests. """
if not self.evaluating:
okay('not-evaluating')
return
if args.get('all', False):
self.queue.clear()
else:
try:
cellid = args['cellid']
except KeyError:
pass
else:
_args, _, _ = self.evaluating
if cellid != _args.cellid:
for i, (_args, _okay, _) in enumerate(self.queue):
if cellid == _args.cellid:
del self.queue[i]
okay('interrupted')
result = {
'source': _args.source,
'index': None,
'time': 0,
'out': u'',
'err': u'',
'files': [],
'plots': [],
'traceback': False,
'interrupted': True,
}
_okay(result)
return
# Now the most interesting part. To physically interrupt
# the interpreter associated with this engine, we send
# SIGINT to the engine's process. The process will catch
# this signal via KeyboardInterrupt exception and return
# partial output and information that the computation was
# interrupted. If there are any requests pending, then
# evaluation handler (_on_evaluate_handler) will schedule
# next request for evaluation. This way we have only one
# one path of data flow in all cases.
self.proc.send_signal(signal.SIGINT)
okay('interrupted')
def _schedule(self, args, okay, fail):
"""Push evaluation request at the end of the queue. """
self.queue.append((args, okay, fail))
def _evaluate(self, method='evaluate'):
"""Evaluate next pending request if engine not busy. """
if not self.evaluating and self.queue:
args, okay, fail = self.evaluating = self.queue.pop()
body = utilities.xml_encode(args.source, method)
http_client = tornado.httpclient.AsyncHTTPClient()
http_request = tornado.httpclient.HTTPRequest(self.url,
method='POST', body=body, request_timeout=0)
http_client.fetch(http_request, self._on_evaluate_handler)
def _on_evaluate_timeout(self):
raise NotImplementedError
def _on_evaluate_handler(self, response):
"""Handler that gets executed when evaluation finishes. """
_, okay, fail = self.evaluating
self.evaluating = False
self._evaluate()
if response.code == 200:
try:
result = utilities.xml_decode(response.body)
except xmlrpclib.Fault, exc:
fail('fault: %s' % exc)
else:
self._process_response(result, okay)
else:
fail('response-code: %s' % response.code)
def _process_response(self, result, okay):
"""Perform final processing of evaluation results. """
result['files'] = self.files
okay(result)
|
[
"mattpap@gmail.com"
] |
mattpap@gmail.com
|
c2d2cd84addaced551db5ed53b4908ae371e3b5e
|
5aa93f2b25f2dc804da89b0ce3cd4a73ddb866df
|
/sentinal_linear_searach.py
|
5b929cc04fe250dc2be142e5640b874af97132d6
|
[] |
no_license
|
usmankhan-bit/APS-2020
|
9f64ad458ef0f8344bcd0dc35e4b9943c08569b3
|
ecd7de64977506534abfa3f6ba6a943b2e33f91b
|
refs/heads/master
| 2020-12-21T02:21:25.210612
| 2020-10-24T05:37:43
| 2020-10-24T05:37:43
| 236,277,305
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 236
|
py
|
arr=[3,5,6,4,7,35,2]
searchelement=2
n=len(arr)
temp=arr[n-1]
arr[n-1]=searchelement
i=0
while arr[i]!=searchelement:
i+=1
arr[n-1]=temp
if i<(n-1) or arr[n-1]==searchelement:
print("Found at ",i)
else:
print("Not Found")
|
[
"usman@pop-os.localdomain"
] |
usman@pop-os.localdomain
|
cb9932dd766a6df1de1a8b5c2d5dc46105c50b88
|
d308fffe3db53b034132fb1ea6242a509f966630
|
/pirates/leveleditor/worldData/tortuga_tunnel_jungle_1.py
|
2c46fa779686db2450b61ab626a791bf210c48c4
|
[
"BSD-3-Clause"
] |
permissive
|
rasheelprogrammer/pirates
|
83caac204965b77a1b9c630426588faa01a13391
|
6ca1e7d571c670b0d976f65e608235707b5737e3
|
refs/heads/master
| 2020-03-18T20:03:28.687123
| 2018-05-28T18:05:25
| 2018-05-28T18:05:25
| 135,193,362
| 3
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,163
|
py
|
# uncompyle6 version 3.2.0
# Python bytecode 2.4 (62061)
# Decompiled from: Python 2.7.14 (v2.7.14:84471935ed, Sep 16 2017, 20:19:30) [MSC v.1500 32 bit (Intel)]
# Embedded file name: pirates.leveleditor.worldData.tortuga_tunnel_jungle_1
from pandac.PandaModules import Point3, VBase3
objectStruct = {'Objects': {'1157132484.23sdnaik': {'Type': 'Connector Tunnel', 'Name': 'port_royal_tunnel_1', 'AdditionalData': ['tunnel_jungle'], 'File': '', 'Objects': {'1158969171.05sdnaik': {'Type': 'Locator Node', 'Name': 'portal_connector_1', 'Hpr': VBase3(-90.0, 0.0, 0.0), 'Pos': Point3(0.0, 0.0, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0)}, '1158969171.08sdnaik': {'Type': 'Locator Node', 'Name': 'portal_connector_2', 'Hpr': VBase3(90.0, 0.0, 0.0), 'Pos': Point3(-94.385, 149.189, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0)}}, 'Visual': {'Model': 'models/tunnels/tunnel_jungle'}}}, 'Node Links': [], 'Layers': {}, 'ObjectIds': {'1157132484.23sdnaik': '["Objects"]["1157132484.23sdnaik"]', '1158969171.05sdnaik': '["Objects"]["1157132484.23sdnaik"]["Objects"]["1158969171.05sdnaik"]', '1158969171.08sdnaik': '["Objects"]["1157132484.23sdnaik"]["Objects"]["1158969171.08sdnaik"]'}}
|
[
"33942724+itsyaboyrocket@users.noreply.github.com"
] |
33942724+itsyaboyrocket@users.noreply.github.com
|
9d58655de36afeae62b2789c6ebe451e82079978
|
5d156ae48b0634d5882952ed901ff0cd50b2c2d6
|
/build/gazebo_ros_demos/rrbot_description/catkin_generated/pkg.installspace.context.pc.py
|
4478c45f046262134c701da50bb17629b707cbfc
|
[] |
no_license
|
YiKangJ/catkin_ws
|
c5d710381de791b47a122b9c99ca25f4cfe63f9b
|
4d0bd825b295ffaf66225f94519cd0d7a67ec9ec
|
refs/heads/master
| 2020-03-21T01:57:04.807234
| 2018-11-08T08:34:39
| 2018-11-08T08:34:39
| 137,971,197
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 378
|
py
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "rrbot_description"
PROJECT_SPACE_DIR = "/home/jyk/catkin_ws/install"
PROJECT_VERSION = "0.0.0"
|
[
"jinyikangjyk@163.com"
] |
jinyikangjyk@163.com
|
86fb2849a77ef1adc3ae8260ed4da1ab9d596b63
|
9aad82d1fb2fc8a9c982332c935ba36544d74ee8
|
/Programs/DynamicProg__01KnapSack_Memoization.py
|
f38a8caf4456162d78864df22e95229b9daaa72a
|
[] |
no_license
|
anjalitwt009/Python
|
4773325fb37a21c839272a648fe5212573dc1adf
|
6c6f1fb92bcd34ca6f38f1766cae22db46b3f159
|
refs/heads/master
| 2023-02-28T05:26:15.831203
| 2021-02-08T05:36:03
| 2021-02-08T05:36:03
| 278,690,340
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,277
|
py
|
# -*- coding: utf-8 -*-
"""DynamicProg__01KnapSack_Memoization.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1VIZMVz61352OQ9m-o7JfglqjJGJ9rmhY
Given weights and values of n items, put these items in a knapsack of capacity W to get the maximum total value in the knapsack. In other words, given two integer arrays val[0..n-1] and wt[0..n-1] which represent values and weights associated with n items respectively. Also given an integer W which represents knapsack capacity, find out the maximum value subset of val[] such that sum of the weights of this subset is smaller than or equal to W. You cannot break an item, either pick the complete item or don’t pick it (0-1 property).
INPUT:
val = [60, 100, 120]
wt = [10, 20, 30]
W = 50
n = len(val)
OUTPUT:
220
"""
def knap_01(wt,val,W,n):
global t
if n==0 or W==0:
return 0
if t[n][W]!=-1:
return t[n][W]
if wt[n-1]<=W:
t[n][W]=max((val[n-1]+knap_01(wt,val,W-wt[n-1],n-1)),knap_01(wt,val,W,n-1))
return t[n][W]
else:
t[n][W]=knap_01(wt,val,W,n-1)
return t[n][W]
val = [60, 100, 120]
wt = [10, 20, 30]
W = 50
n = len(val)
t=[[-1 for i in range(W + 1)] for j in range(n + 1)]
ans=knap_01(wt,val,W,n)
ans
|
[
"noreply@github.com"
] |
anjalitwt009.noreply@github.com
|
bea60c370f5faa10bad2892986b423e548034e71
|
dd9befc554ff37c72a5462ab9240695729772623
|
/env/bin/wheel
|
1e0227ab4cc50ee966ef595517baa50e496400c8
|
[] |
no_license
|
rogermuffler/flask-hello-world
|
5c7093e14c10ec9fb7c985ae3296dc05f694e09e
|
b0421e59e76cd937b530b0b732e10c34e9a8ed21
|
refs/heads/master
| 2020-06-16T12:57:52.379070
| 2018-10-06T14:46:05
| 2018-10-06T14:46:05
| 75,099,705
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 270
|
#!/Users/rogermuffler/Documents/RealPython/flask-hello-world/env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from wheel.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"roger@stammtischphilosoph.ch"
] |
roger@stammtischphilosoph.ch
|
|
45dde0ce52f8db4bbe493bf33a0ef9c5b0227384
|
11a93cbb194b7270f8b97cbda8c00d8c199eaef0
|
/backend/piazza_api/rpc.py
|
a7e4a73ca15a74cf56e22af9e770e0fb0a833694
|
[] |
no_license
|
gaosui/Biazza
|
3616643253fe68d65f2adac6c9468b06e800e4f6
|
465201dd1fb0ad1d86b08d9366468bb854fa3d72
|
refs/heads/master
| 2020-04-01T05:19:42.982336
| 2020-03-26T22:33:15
| 2020-03-26T22:33:15
| 152,898,802
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 15,561
|
py
|
import getpass
import json
import requests
import six.moves
from piazza_api.exceptions import AuthenticationError, NotAuthenticatedError, \
RequestError
from piazza_api.nonce import nonce as _piazza_nonce
class PiazzaRPC(object):
"""Unofficial Client for Piazza's Internal API
Example:
>>> p = PiazzaRPC("hl5qm84dl4t3x2")
>>> p.user_login()
Email: ...
Password: ...
>>> p.content_get(181)
...
:type network_id: str|None
:param network_id: This is the ID of the network (or class) from which
to query posts
"""
def __init__(self, network_id=None):
self._nid = network_id
self.base_api_urls = {
"logic": "https://piazza.com/logic/api",
"main": "https://piazza.com/main/api",
}
self.session = requests.Session()
def user_login(self, email=None, password=None):
"""Login with email, password and get back a session cookie
:type email: str
:param email: The email used for authentication
:type password: str
:param password: The password used for authentication
"""
email = six.moves.input("Email: ") if email is None else email
password = getpass.getpass() if password is None else password
login_data = {
"method": "user.login",
"params": {"email": email,
"pass": password}
}
# If the user/password match, the server respond will contain a
# session cookie that you can use to authenticate future requests.
r = self.session.post(
self.base_api_urls["logic"],
data=json.dumps(login_data),
)
if r.json()["result"] not in ["OK"]:
raise AuthenticationError("Could not authenticate.\n{}"
.format(r.json()))
def demo_login(self, auth=None, url=None):
"""Authenticate with a "Share Your Class" URL using a demo user.
You may provide either the entire ``url`` or simply the ``auth``
parameter.
:param url: Example - "https://piazza.com/demo_login?nid=hbj11a1gcvl1s6&auth=06c111b"
:param auth: Example - "06c111b"
"""
assert all([
auth or url, # Must provide at least one
not (auth and url) # Cannot provide more than one
])
if url is None:
url = "https://piazza.com/demo_login"
params = dict(nid=self._nid, auth=auth)
res = self.session.get(url, params=params)
else:
res = self.session.get(url)
def content_get(self, cid, nid=None):
"""Get data from post `cid` in network `nid`
:type nid: str
:param nid: This is the ID of the network (or class) from which
to query posts. This is optional and only to override the existing
`network_id` entered when created the class
:type cid: str|int
:param cid: This is the post ID which we grab
:returns: Python object containing returned data
"""
r = self.request(
method="content.get",
data={"cid": cid},
nid=nid
)
return self._handle_error(r, "Could not get post {}.".format(cid))
def content_create(self, params):
"""Create a post or followup.
:type params: dict
:param params: A dict of options to pass to the endpoint. Depends on
the specific type of content being created.
:returns: Python object containing returned data
"""
r = self.request(
method="content.create",
data=params
)
return self._handle_error(
r,
"Could not create object {}.".format(repr(params))
)
def content_instructor_answer(self, params):
"""Answer a post as an instructor.
:type params: dict
:param params: A dict of options to pass to the endpoint. Depends on
the specific type of content being created.
:returns: Python object containing returned data
"""
r = self.request(
method="content.answer",
data=params
)
return self._handle_error(r, "Could not create object {}.".format(
repr(params)))
def content_mark_duplicate(self, params):
"""Mark a post as duplicate to another.
:type params: dict
:param params: the parameters to be passed in
"""
r = self.request(
method="content.duplicate",
data=params
)
return self._handle_error(r, "Could not create object {}.".format(
repr(params)))
def add_students(self, student_emails, nid=None):
"""Enroll students in a network `nid`.
Piazza will email these students with instructions to
activate their account.
:type student_emails: list of str
:param student_emails: A listing of email addresses to enroll
in the network (or class). This can be a list of length one.
:type nid: str
:param nid: This is the ID of the network to add students
to. This is optional and only to override the existing
`network_id` entered when created the class
:returns: Python object containing returned data, a list
of dicts of user data of all of the users in the network
including the ones that were just added.
"""
r = self.request(
method="network.update",
data={
"from": "ClassSettingsPage",
"add_students": student_emails
},
nid=nid,
nid_key="id"
)
return self._handle_error(r, "Could not add users.")
def get_all_users(self, nid=None):
"""Get a listing of data for each user in a network `nid`
:type nid: str
:param nid: This is the ID of the network to get users
from. This is optional and only to override the existing
`network_id` entered when created the class
:returns: Python object containing returned data, a list
of dicts containing user data.
"""
r = self.request(
method="network.get_all_users",
nid=nid
)
return self._handle_error(r, "Could not get users.")
def get_users(self, user_ids, nid=None):
"""Get a listing of data for specific users `user_ids` in
a network `nid`
:type user_ids: list of str
:param user_ids: a list of user ids. These are the same
ids that are returned by get_all_users.
:type nid: str
:param nid: This is the ID of the network to get students
from. This is optional and only to override the existing
`network_id` entered when created the class
:returns: Python object containing returned data, a list
of dicts containing user data.
"""
r = self.request(
method="network.get_users",
data={"ids": user_ids},
nid=nid
)
return self._handle_error(r, "Could not get users.")
def remove_users(self, user_ids, nid=None):
"""Remove users from a network `nid`
:type user_ids: list of str
:param user_ids: a list of user ids. These are the same
ids that are returned by get_all_users.
:type nid: str
:param nid: This is the ID of the network to remove students
from. This is optional and only to override the existing
`network_id` entered when created the class
:returns: Python object containing returned data, a list
of dicts of user data of all of the users remaining in
the network after users are removed.
"""
r = self.request(
method="network.update",
data={"remove_users": user_ids},
nid=nid,
nid_key="id"
)
return self._handle_error(r, "Could not remove users.")
def get_my_feed(self, limit=150, offset=20, sort="updated", nid=None):
"""Get my feed
:type limit: int
:param limit: Number of posts from feed to get, starting from ``offset``
:type offset: int
:param offset: Offset starting from bottom of feed
:type sort: str
:param sort: How to sort feed that will be retrieved; only current
known value is "updated"
:type nid: str
:param nid: This is the ID of the network to get the feed
from. This is optional and only to override the existing
`network_id` entered when created the class
"""
r = self.request(
method="network.get_my_feed",
nid=nid,
data=dict(
limit=limit,
offset=offset,
sort=sort
)
)
return self._handle_error(r, "Could not retrieve your feed.")
def filter_feed(self, updated=False, following=False, folder=False,
filter_folder="", sort="updated", nid=None):
"""Get filtered feed
Only one filter type (updated, following, folder) is possible.
:type nid: str
:param nid: This is the ID of the network to get the feed
from. This is optional and only to override the existing
`network_id` entered when created the class
:type sort: str
:param sort: How to sort feed that will be retrieved; only current
known value is "updated"
:type updated: bool
:param updated: Set to filter through only posts which have been updated
since you last read them
:type following: bool
:param following: Set to filter through only posts which you are
following
:type folder: bool
:param folder: Set to filter through only posts which are in the
provided ``filter_folder``
:type filter_folder: str
:param filter_folder: Name of folder to show posts from; required
only if ``folder`` is set
"""
assert sum([updated, following, folder]) == 1
if folder:
assert filter_folder
if updated:
filter_type = dict(updated=1)
elif following:
filter_type = dict(following=1)
else:
filter_type = dict(folder=1, filter_folder=filter_folder)
r = self.request(
nid=nid,
method="network.filter_feed",
data=dict(
sort=sort,
**filter_type
)
)
return self._handle_error(r, "Could not retrieve filtered feed.")
def search(self, query, nid=None):
"""Search for posts with ``query``
:type nid: str
:param nid: This is the ID of the network to get the feed
from. This is optional and only to override the existing
`network_id` entered when created the class
:type query: str
:param query: The search query; should just be keywords for posts
that you are looking for
"""
r = self.request(
method="network.search",
nid=nid,
data=dict(query=query)
)
return self._handle_error(r, "Search with query '{}' failed."
.format(query))
def get_stats(self, nid=None):
"""Get statistics for class
:type nid: str
:param nid: This is the ID of the network to get stats
from. This is optional and only to override the existing
`network_id` entered when created the class
"""
r = self.request(
api_type="main",
method="network.get_stats",
nid=nid,
)
return self._handle_error(r, "Could not retrieve stats for class.")
def get_user_profile(self):
"""Get profile of the current user"""
r = self.request(method="user_profile.get_profile")
return self._handle_error(r, "Could not get user profile.")
def get_user_status(self):
"""
Get global status of the current user, which contains information on
the relationship of the user with respect to all their enrolled classes.
"""
r = self.request(method="user.status")
return self._handle_error(r, "Could not get user status.")
def request(self, method, data=None, nid=None, nid_key='nid',
api_type="logic", return_response=False):
"""Get data from arbitrary Piazza API endpoint `method` in network `nid`
:type method: str
:param method: An internal Piazza API method name like `content.get`
or `network.get_users`
:type data: dict
:param data: Key-value data to pass to Piazza in the request
:type nid: str
:param nid: This is the ID of the network to which the request
should be made. This is optional and only to override the
existing `network_id` entered when creating the class
:type nid_key: str
:param nid_key: Name expected by Piazza for `nid` when making request.
(Usually and by default "nid", but sometimes "id" is expected)
:returns: Python object containing returned data
:type return_response: bool
:param return_response: If set, returns whole :class:`requests.Response`
object rather than just the response body
"""
self._check_authenticated()
nid = nid if nid else self._nid
if data is None:
data = {}
headers = {}
if "session_id" in self.session.cookies:
headers["CSRF-Token"] = self.session.cookies["session_id"]
# Adding a nonce to the request
endpoint = self.base_api_urls[api_type]
if api_type == "logic":
endpoint += "?method={}&aid={}".format(
method,
_piazza_nonce()
)
response = self.session.post(
endpoint,
data=json.dumps({
"method": method,
"params": dict({nid_key: nid}, **data)
}),
headers=headers
)
return response if return_response else response.json()
###################
# Private Methods #
###################
def _check_authenticated(self):
"""Check that we're logged in and raise an exception if not.
:raises: NotAuthenticatedError
"""
if not self.session.cookies:
raise NotAuthenticatedError("You must authenticate before "
"making any other requests.")
def _handle_error(self, result, err_msg):
"""Check result for error
:type result: dict
:param result: response body
:type err_msg: str
:param err_msg: The message given to the :class:`RequestError` instance
raised
:returns: Actual result from result
:raises RequestError: If result has error
"""
if result.get(u'error'):
raise RequestError("{}\nResponse: {}".format(
err_msg,
json.dumps(result, indent=2)
))
else:
return result.get(u'result')
|
[
"gaosui2051@gmail.com"
] |
gaosui2051@gmail.com
|
9fcb817c884bf3d04263b42b730fb3770451107e
|
bab9426fb4882d033bbd8eb5bd783de77a5f540d
|
/train.py
|
dc746d3957d86d11dfcc9a3c77af956ea6a32df7
|
[] |
no_license
|
Meherdeep/faceID-verification
|
2ecac783c130b15a4ee530053a485bc48fd6621b
|
7756532bc74c6c3735a4472cf1f69bc1e28ff750
|
refs/heads/master
| 2020-08-17T02:55:26.115903
| 2019-10-16T16:37:13
| 2019-10-16T16:37:13
| 215,595,571
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 711
|
py
|
"""TAKES THE DATABASE OF IMAGES AS INPUT AND CREATES XML FILE FOR THAT USER"""
import cv2
import numpy as np
import os
from os.path import join
data_path = './faces/'
images_ = [_img for _img in os.listdir(data_path) if '.jpg' in join(data_path, _img)]
training_data, labels = [], []
for index, file_ in enumerate(images_):
image_path = data_path + file_
image_ = cv2.imread(image_path, cv2.IMREAD_GRAYSCALE)
training_data.append(np.asarray(image_, dtype=np.uint8))
labels.append(index)
labels = np.asarray(labels, dtype=np.int32)
model = cv2.face.LBPHFaceRecognizer_create()
model.train(np.asarray(training_data), np.asarray(labels))
model.save('model.xml')
print("XML File created")
|
[
"meherdeept@gmail.com"
] |
meherdeept@gmail.com
|
b2014fd44c6dfc441af3b54f51bd4d493d78fc87
|
8f7a80b471f70bfab2221603cdc41d29db607f61
|
/python_mods/local_publisher_for_test.py
|
798364cf283a73c6a7e7fca27d02cb3a6bcb12c2
|
[] |
no_license
|
108356037/mqtt-model-test
|
bc20065c6d3b677f49a7f6d18c83fd6f6614e724
|
a8aa32a338509d4813a379fd32fbcaeb5d1bfa92
|
refs/heads/master
| 2023-01-19T04:49:20.877390
| 2020-11-26T08:34:46
| 2020-11-26T08:34:46
| 308,276,650
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 656
|
py
|
import paho.mqtt.client as mqtt
import random
import json
import datetime
import time
# 設置日期時間的格式
ISOTIMEFORMAT = '%m/%d %H:%M:%S'
# 連線設定
# 初始化地端程式
client = mqtt.Client()
# 設定登入帳號密碼
# client.username_pw_set("try","xxxx")
# 設定連線資訊(IP, Port, 連線時間)
client.connect("localhost", 1883, 60)
while True:
t0 = random.randint(0,30)
t = datetime.datetime.now().strftime(ISOTIMEFORMAT)
payload = {'Temperature' : t0 , 'Time' : t}
print (json.dumps(payload))
#要發布的主題和內容
client.publish("mota/report/test", json.dumps(payload))
time.sleep(5)
|
[
"tsai_wei_che@caiweizhede-MacBook-Pro.local"
] |
tsai_wei_che@caiweizhede-MacBook-Pro.local
|
525890181c383359389b21654d83749b152dea6e
|
561fad4f925839f74044aad761942436d0c22f0b
|
/rango/models.py
|
9c07db9a11eb3b030ff54fc6f0a2f092540d61ba
|
[] |
no_license
|
ashokjain001/TangowithDjango
|
3cfe9aa4f2ff872d29ba221a4b7e8a0e804703e1
|
b45ff04d9f1b240ee74b18731b594fedcf387298
|
refs/heads/master
| 2021-01-01T19:28:08.034108
| 2017-07-28T03:18:24
| 2017-07-28T03:18:24
| 98,597,426
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,010
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.db import models
from django.forms import ModelForm
from django.contrib.auth.models import User
# Create your models here.
from django.db import models
from django.contrib.auth.models import User
class Category(models.Model):
name = models.CharField(max_length=128,unique = True)
views = models.IntegerField(default=0)
likes = models.IntegerField(default=0)
def __unicode__(self):
return self.name
class Page(models.Model):
category = models.ForeignKey(Category)
title = models.CharField(max_length=128)
url = models.URLField()
views = models.IntegerField(default=0)
def __unicode__(self):
return self.title
class UserProfile(models.Model):
user = models.OneToOneField(User)
website = models.URLField(blank=True)
picture = models.ImageField(upload_to='Profile Images')
def __unicode__(self):
return self.user.username
|
[
"ashokjain001@gmail.com"
] |
ashokjain001@gmail.com
|
5fa7fbd20a59f5fa948bcb57d0d762df7c76d0d1
|
849a174efea976d4daed419b85668c2ba05fd2b9
|
/python/decorator_2.py
|
b83cae7c1c662a79254f0ac01b3badb114a86dde
|
[] |
no_license
|
samyuktahegde/Python
|
61e6fedbdd2a94b29e4475621afa6d5e98bf49b8
|
b02fa6e908661a918e0024f508df0192d5553411
|
refs/heads/master
| 2018-09-18T20:27:55.980689
| 2018-08-09T05:49:33
| 2018-08-09T05:49:33
| 116,491,078
| 0
| 0
| null | 2018-02-05T05:33:53
| 2018-01-06T14:52:16
| null |
UTF-8
|
Python
| false
| false
| 1,064
|
py
|
from functools import wraps
def my_logger(original_function):
import logging
logging.basicConfig(filename='{}.log'.format(original_function.__name__), level=logging.INFO)
@wraps(original_function)
def wrapper(*args, **kwargs):
logging.info('Ran with args:{}, and kwargs:{}'.format(args,kwargs))
return original_function(*args, **kwargs)
return wrapper
def my_timer(original_function):
import time
@wraps(original_function)
def wrapper(*args, **kwargs):
t1 = time.time()
result = original_function(*args, **kwargs)
t2 = time.time() - t1
print('{} ran in : {} sec'.format(original_function.__name__, t2))
return result
return wrapper
import time
@my_logger
@my_timer
def display_info(name, age):
time.sleep(1)
print('Display info ran with arguments ({}, {})'.format(name, age))
# ==> display_info = my_logger(my_timer(display_info))
# display_info = my_timer(display_info)
# print(display_info.__name__)
display_info('Hank', 28)
|
[
"noreply@github.com"
] |
samyuktahegde.noreply@github.com
|
c3b8f44043e2a35a9ba4f8776fe89b18dd6a4bc2
|
293d9cd472d76fa051e996a8b7cdf1f4fc2a90cc
|
/ex0.py
|
409288aec02bf472c69c5b2e8949863e676831cd
|
[] |
no_license
|
lizzyagibson/LPTHW
|
1c91cb6e4030400efa51e4e34a82c5d7ac7f56d9
|
012414d8e1fc0e0cdb238a237df31ed7c382ea1a
|
refs/heads/main
| 2023-02-22T21:00:58.657286
| 2021-01-19T14:56:53
| 2021-01-19T14:56:53
| 331,014,729
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 38
|
py
|
Learn Python the Hard Way
Exercise 0
|
[
"eag2186@cumc.columbia.edu"
] |
eag2186@cumc.columbia.edu
|
f50d93d6fa9e8a268df5323ef915dae73d6b09b8
|
296825de394dbb91f6e2181d9dbd5bcd57c03ced
|
/large_AE.py
|
e65bce8b324930a9e3ae319dce993aad17c1762f
|
[] |
no_license
|
satyam4u/IoT_Autoencoder
|
8d7c7436d5cee66dd4f7cbaf08f6844e66bef25f
|
7424efcbcacaf173cd565955839c316307ce65e9
|
refs/heads/master
| 2020-03-26T15:56:59.854001
| 2018-08-18T12:35:26
| 2018-08-18T12:35:26
| 145,073,350
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,133
|
py
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
from sklearn.model_selection import train_test_split
from keras.models import Model
from keras.layers import Input, Dense
import time
"""
print("Reading IoT dataset")
X1 = pd.read_csv("bengin_traffic.csv",header=0).as_matrix()
X1_length = X1.shape[0]
m1 = pd.read_csv("syn.csv",header=0).as_matrix()
m1_length = m1.shape[0]
Xm1 = np.concatenate((X1, m1), axis=0)
X_train, X_test = Xm1[:40000, ...], Xm1[40000:, ...]
"""
df1 = pd.read_csv("bengin_traffic.csv")
df2 = pd.read_csv("syn.csv")
frames = [df1, df2]
df = pd.concat(frames)
#Normalize
normalized_df=(df-df.min())/(df.max()-df.min()) #But it is not an online way
#Also max min shouldnt change after training phase.Change above code and see result.
#
X_train, X_test = train_test_split(normalized_df,test_size=0.7 ,shuffle = False, stratify = None)
#X_train, X_test = train_test_split(X1, test_size=0.4, random_state=0)
# In[17]:
"""
input_layer = Input(shape=(input_dim, ))
encoder = Dense(encoding_dim, activation="tanh", )(input_layer)
encoder = Dense(int(encoding_dim / 2), activation="sigmoid")(encoder)
decoder = Dense(int(encoding_dim / 2), activation='tanh')(encoder)
decoder = Dense(input_dim, activation='sigmoid')(decoder)
autoencoder = Model(inputs=input_layer, outputs=decoder)
"""
input_layer = Input(shape=(115,))
encoded = Dense(86, activation='sigmoid')(input_layer)
encoded = Dense(57, activation='sigmoid')(encoded)
encoded = Dense(37, activation='sigmoid')(encoded)
encoded = Dense(28, activation='sigmoid')(encoded)
decoded = Dense(37, activation='sigmoid')(encoded)
decoded = Dense(57, activation='sigmoid')(decoded)
decoded = Dense(86, activation='sigmoid')(decoded)
decoded = Dense(115, activation='sigmoid')(decoded)
autoencoder = Model(input_layer, decoded)
#the problem with keras is it want whole dataset as an input parameter
start = time.time()
autoencoder.compile(optimizer='adam',
loss='mean_squared_logarithmic_error') #change loss to without log
autoencoder.fit(X_train, X_train,
epochs=2,
batch_size=1,
shuffle=True)
#https://stackoverflow.com/questions/46308374/what-is-validation-data-used-for-in-a-keras-sequential-model
predictions = autoencoder.predict(X_test)
stop = time.time()
print("Total time taken: "+ str(stop - start))
rmse = np.mean(np.power(X_test - predictions, 2), axis=1).as_matrix(columns=None)
###################
#Calculating threshold
threshold1 = 0
index =0
for i in range(0,10000):
if(rmse[i]> threshold1):
threshold1 = rmse[i]
index = i
#creating timestamps
observations = df.shape[0]
templist = []
for i in range(0,observations):
temp = i/25000
templist.append(temp)
temparr = np.array(templist)
print("Plotting results")
from matplotlib import cm
plt.figure(figsize=(10,5))
fig = plt.scatter(temparr[36274:],rmse[:],s=0.01,c=None,cmap=None)
#plt.yscale("log")
plt.title("Anomaly Scores for Execution Phase")
plt.ylabel("RMSE ")
plt.xlabel("Time elapsed [min]")
plt.show()
|
[
"noreply@github.com"
] |
satyam4u.noreply@github.com
|
e4e1e8907b6b80050289d575dc8654287d717733
|
28c58fe89e647293ce3bbd1804f3d601611d77c7
|
/events/models/excel_upload_log.py
|
3c5bd9491f3e48a228dfd403e5e914742a018840
|
[] |
no_license
|
rameshdhungana/erp-backend
|
a938efa559d145998c893b703c92588b4f168259
|
98693f8b54edddd72ab2897d723ebf9afa653542
|
refs/heads/main
| 2023-01-06T02:47:42.143250
| 2020-02-22T17:11:38
| 2020-02-22T17:11:38
| 305,904,318
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,603
|
py
|
import uuid as uuid
from django.db import models
from users.models import Base
ACCOMMODATION_ROOM_ALLOCATION = 'Accommodation-Room-Allocation'
ACCOMMODATION_ROOM_CREATION = 'Accommodation-Room-Creation'
EXCEL_UPLOAD_LOG_TYPE = (
(ACCOMMODATION_ROOM_ALLOCATION, ACCOMMODATION_ROOM_ALLOCATION),
(ACCOMMODATION_ROOM_CREATION, ACCOMMODATION_ROOM_CREATION)
)
class ExcelUploadLog(Base):
uuid = models.UUIDField(default=uuid.uuid4, editable=False, db_index=True)
type = models.CharField(max_length=255, choices=EXCEL_UPLOAD_LOG_TYPE)
total_processing = models.IntegerField()
total_success = models.IntegerField()
total_failure = models.IntegerField()
def __repr__(self):
return 'Type: {}- Total Processing: {}-Total-Failure: {}-Total success: {}'.format(self.type,
self.total_processing,
self.total_failure,
self.total_success)
def __str__(self):
return 'Type: {}- Total Processing: {}-Total-Failure: {}-Total success: {}'.format(self.type,
self.total_processing,
self.total_failure,
self.total_success)
|
[
"rdramesh2009@gmail.com"
] |
rdramesh2009@gmail.com
|
72315bcfbaff7fcb02a51349cfc671232a52314c
|
6b43695ca4a86d8f1a57268a7f8cb7c138ef5771
|
/166_Fraction_to_Recurring_Decimal.py
|
d36c19c48517090b682d97d964b0244569e9aa03
|
[] |
no_license
|
MarcelArthur/leetcode_collection
|
bd3a8a23f3095fbea79831e6a6ea4c515bddfd7c
|
a39280ab6bbbf3b688a024a71ef952be5010d98e
|
refs/heads/master
| 2021-11-23T15:40:21.874639
| 2021-11-20T16:15:17
| 2021-11-20T16:15:17
| 110,528,906
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 958
|
py
|
class Solution(object):
def fractionToDecimal(self, numerator, denominator):
sign = (numerator * denominator) >= 0
numerator, denominator = abs(numerator), abs(denominator)
q, r = divmod(numerator, denominator)
res = str(q)
if not sign:
res = "-" + res
if r == 0:
return res
l = []
dic = {}
r = r * 10
while r != 0:
tmp, r = divmod(r, denominator)
if r in dic and tmp == dic[r][0]:
break
else:
dic[r] = [tmp, len(l)]
l.append(str(tmp))
r *= 10
res += "."
if r == 0:
return res + "".join(l)
else:
for i in range(len(l)):
if i == dic[r][1]:
res += '('
res += str(l[i])
res += ')'
return res
|
[
"854748548@qq.com"
] |
854748548@qq.com
|
839981522d1bb09e61d962a7abc94e7431c8ff39
|
3aa9ddf792a265ef23259c0d9eee16b7aecd382b
|
/main.py
|
688aceddbda0d1e0e9d50b2aee5e284a31dd1b67
|
[] |
no_license
|
kkh5770/mini_pro
|
6020a4877b0363164d89a908c9b6e0695d69da2f
|
fc4612ef180a0d0a39e84814ed7d3589065da6bd
|
refs/heads/master
| 2021-04-01T04:50:43.623222
| 2020-03-18T06:44:06
| 2020-03-18T06:44:06
| 248,157,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,793
|
py
|
import sys
from PyQt5.QtWidgets import *
from PyQt5 import QtCore
from PyQt5.QtCore import pyqtSlot,pyqtSignal,QUrl
from PyQt5 import uic
from lib.YouViewLayout import Ui_MainWindow
from lib.AuthDialog import AuthDialog
from PyQt5 import QtWebEngineWidgets
import re
import datetime
import sys
import io
from pytube import YouTube
import pytube
sys.stdout = io.TextIOWrapper(sys.stdout.detach(), encoding = 'utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.detach(), encoding = 'utf-8')
#from_class=uic.loadUiType('D:/mini_pro/ui/basic_mhp.ui')[0]
class Main(QMainWindow, Ui_MainWindow): #PyQt5.QtWidgets에서 상속됨
#생성자
def __init__(self):
super().__init__() #부모의 생성자 호출
#초기화
self.setupUi(self) # 함수 선언
#인증버튼 이벤트 후
self.initAuthUnlock()
#인증버튼 이벤트 전
self.initAuthLock()
#setupUI
#시그널 초기화
self.initSignal()
#로그인 관련 변수 선언(로그인 정보를 담을 변수)
self.user_id=None
self.user_pw=None
#재생 여부
self.is_play=False
#youtube 관련 변수 선언
self.youtb=None
self.youtb_fsize=0
#기본 UI 비활성화
def initAuthLock(self):
self.previewButton.setEnabled(False)
self.fileNavButton.setEnabled(False)
self.streamComboBox.setEnabled(False)
self.startButton.setEnabled(False)
self.calendarWidget.setEnabled(False)
self.urlTextEdit.setEnabled(False)
self.pathTextEdit.setEnabled(False)
self.showStatusMsg('인증안됨')
def initAuthUnlock(self):
self.previewButton.setEnabled(True)
self.fileNavButton.setEnabled(True)
self.streamComboBox.setEnabled(True)
self.startButton.setEnabled(True)
self.calendarWidget.setEnabled(True)
self.urlTextEdit.setEnabled(True)
self.pathTextEdit.setEnabled(True)
self.showStatusMsg('인증완료')
def showStatusMsg(self,msg):
self.statusbar.showMessage(msg)
#시그널 초기화
def initSignal(self):
self.loginButton.clicked.connect(self.authCheck)
self.previewButton.clicked.connect(self.load_url)
self.exitButton.clicked.connect(QtCore.QCoreApplication.instance().quit)
self.webEngineView.loadProgress.connect(self.showProgressBrowerLoading)
self.fileNavButton.clicked.connect(self.selectDownPath)
self.calendarWidget.clicked.connect(self.append_date)
self.startButton.clicked.connect(self.downloadYoutb)
@pyqtSlot() #명시적 표현(유지보수 때문에 슬롯과 시그널이 여러개일 때 시그널은 시그널끼리 슬롯은 슬롯끼리 모아 놓는 책깔피 개념)
def authCheck(self):
#print('test')
dlg=AuthDialog()
dlg.exec_()
self.user_id=dlg.user_id
self.user_pw=dlg.user_pw
#print("id : %s Password : %s" %(self.user_id,self.user_pw))
#이 부분에서 필요한 경우 실제 로컬 DB 또는 서버 연동 후
# 유저 정보 및 사용자 유효기간을 체크하는 코딩
if True: #강제로 아이디 비번 모두 인증완료
self.initAuthUnlock()#로그인후 모두 비활성화
self.loginButton.setText("인증완료")
self.loginButton.setEnabled(False) #로그인버튼 비활성화
self.urlTextEdit.setFocus(True) #커서이동
self.append_log_msg("login Success")
else:
QMessageBox.about(self, "인증오류","아이디 또는 비밀번호가 맞지 않습니다.")
def load_url(self):
url = self.urlTextEdit.text().strip()
v = re.compile('^https://www.youtube.com/?')
if self.is_play : # 재생중일 때 멈춤
self.append_log_msg('Stop Click')
self.webEngineView.load(QUrl('about:blank')) #about:blank:빈페이지로 초기화
self.previewButton.setText('Play')
self.is_play=False
self.urlTextEdit.clear()
self.urlTextEdit.setFocus(True)
self.startButton.setEnabled(False)
self.streamComboBox.clear() #저장 완료시 초기화
self.progressBar_2.setValue(0) #다운로드 완료시 초기화
self.showStatusMsg("인증완료")
else : #play 되지 않은 상태
if v.match(url) is not None :
self.append_log_msg('Play Click')
self.webEngineView.load(QUrl(url))
#상태표시줄
self.showStatusMsg(url + "재생중")
self.previewButton.setText("Stop")
self.is_play=True
self.startButton.setEnabled(True)
self.initialYouWork(url)
else:
QMessageBox.about(self,"URL 형식오류","Youtube 주소 형식이 아닙니다")
self.urlTextEdit.clear()
self.urlTextEdit.setFocus(True)
def initialYouWork(self,url):
video_list=pytube.YouTube(url)
#로딩바 계산
video_list.register_on_progress_callback(self.showProgressDownload)
self.youtb=video_list.streams.all()
self.streamComboBox.clear()
for q in self.youtb:
#print(q)
tmp_list,str_list=[],[]
tmp_list.append(str(q.mime_type or ''))
tmp_list.append(str(q.resolution or ''))
tmp_list.append(str(q.fps or ''))
tmp_list.append(str(q.abr or ''))
str_list=[x for x in tmp_list if x!='']
print('join',','.join(str_list))
self.streamComboBox.addItem(','.join(str_list))
def append_log_msg(self,act): #act:login Success
now=datetime.datetime.now()
nowDatetime=now.strftime('%Y-%m-%d %H:%M:%S')
app_msg=self.user_id +' : '+ act + " - ("+ nowDatetime +")"
print(app_msg)
self.plainTextEdit.appendPlainText(app_msg)
#활동 로그 저장(서버 DB를 사용)
with open('D:/mini_pro/log/log.text','a')as f:
f.write(app_msg+'\n')
@pyqtSlot(int)
def showProgressBrowerLoading(self,v):
self.loadProgress.setValue(v)
def selectDownPath(self):
print('test')
#파일선택
# fname=QFileDialog.getOpenFileName(self)
# self.pathTextEdit.setText(fname[0])
#경로 선택
fpath=QFileDialog.getExistingDirectory(self,'select Directory')
self.pathTextEdit.setText(fpath)
def append_date(self):
cur_date = self.calendarWidget.selectedDate()
print(str(cur_date.year())+'-'+str(cur_date.month())+'-'+str(cur_date.day()))
self.append_log_msg("Calendar Click")
# @pyqtSlot()
def downloadYoutb(self):
down_dir=self.pathTextEdit.text().strip()
if down_dir is None or down_dir=='' or not down_dir:
QMessageBox.about(self,"경로선택","다운로드 받을 경로를 다시 선택하세요")
return None
self.youtb_fsize=self.youtb[self.streamComboBox.currentIndex()].filesize
print('fsize: ',self.youtb_fsize)
self.youtb[self.streamComboBox.currentIndex()].download(down_dir)
self.append_log_msg('Download click')
def showProgressDownload(self, chunk, file_handler, bytes_remaining):
# print(int(self.youtb_fsize - bytes_remaining))
print('bytes_remaining',bytes_remaining)
self.progressBar_2.setValue(int(((self.youtb_fsize - bytes_remaining) / self.youtb_fsize) * 100))
if __name__ == "__main__": # 클래스 전체 호출
app=QApplication(sys.argv)
you_viewer_main=Main()
you_viewer_main.show()
app.exec_()
|
[
"kkh5770@hotmail.com"
] |
kkh5770@hotmail.com
|
228fa1246cb53752628e836e36d68ae9519dc9dd
|
2698b0148191078f36efe266c3572d9f30724255
|
/utils.py
|
0fb2026ff1cc785fba8764b506c719c189dab929
|
[] |
no_license
|
sugar-activities/4196-activity
|
8c81dc6c0aa0c7e7fd8bd08da79c5a1279d7d400
|
8eaefd6bc6429694e2d765b70f5bdd42b1a5286a
|
refs/heads/master
| 2021-01-19T23:15:28.535962
| 2017-04-21T05:45:34
| 2017-04-21T05:45:34
| 88,937,828
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 698
|
py
|
# -*- mode:python; tab-width:4; indent-tabs-mode:t; -*-
import os
import gtk
def getFileType(filename):
return os.path.basename(filename).split('.').pop()
def copy_file(src, dest):
f1 = open(src, "rb")
data = f1.read()
f1.close()
f2 = open(dest, "wb")
f2.write(data)
f2.close()
def run_dialog(header,msg):
"""Pops up a blocking dialog box with 'msg'"""
dialog = gtk.Dialog(str(header), None, gtk.DIALOG_MODAL,
(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT))
hbox = gtk.HBox(False, 12)
hbox.set_border_width(12)
dialog.vbox.pack_start(hbox, True, True, 0)
hbox.show()
label = gtk.Label(str(msg))
hbox.pack_start(label, False, False, 0)
label.show()
dialog.run()
dialog.destroy()
|
[
"ignacio@sugarlabs.org"
] |
ignacio@sugarlabs.org
|
76517c04be701ac3ba21954ad04359b6db6d662f
|
fd00764e83f68a47babaeb85cb5310778b031984
|
/portfolio/python_django/CarPartsShop/settings.py
|
35774f582bc0d2d8cc59b3ae9818987a4bcdc7ec
|
[] |
no_license
|
Tungrus/portfolio
|
058115f6b35feac73b75ea06513cd3ec1a731fd2
|
6b7a1154979c0f7614bc1ee899a199393ff54cf3
|
refs/heads/master
| 2022-05-17T23:26:49.377395
| 2022-04-02T07:56:08
| 2022-04-02T07:56:08
| 138,946,295
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,605
|
py
|
"""
Django settings for CarPartsShop project.
Generated by 'django-admin startproject' using Django 2.0.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
from django.conf import settings
from yandex_money.api import Wallet, ExternalPayment
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'v*d6tg+_8*o%pzduursnw615@254_52$82w(jf7_rlk3+-*)xg'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
YANDEX_ID = '63C3080358EFF6A849E0C9BE3319B2A33DFC8381120618AC0F634A4E0573B237'
#site = Site()
#site.domain = 'example.com'
#site.name = 'example.com'
#site.save()
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'shop',
'CarPartsShop',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
#'django.contrib.sites',
#'django_payeer',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'CarPartsShop.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'CarPartsShop.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
|
[
"pervishkodenis@gmail.com"
] |
pervishkodenis@gmail.com
|
d5471815818dcde88127e490d527bdbb807d35f6
|
294578eee80bc89e0411e41b4a351561bd4fbbd2
|
/capture_packets_using_scapy.py
|
0d96b9cd0a6c362f24ed2c4a8cae12ffd1ec323e
|
[] |
no_license
|
HackerSpot2001/Voilent-Python-with-Python3
|
947a09dd7ba6bd07fecefef89beb443670987b99
|
bca716d9f88d3c2657a24476ee93860b7c59bc6a
|
refs/heads/master
| 2023-07-22T15:41:34.839458
| 2023-07-07T17:05:07
| 2023-07-07T17:05:07
| 375,223,373
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,478
|
py
|
#!/usr/bin/python3
from scapy.all import *
from scapy.layers.http import HTTPRequest
from colorama import init,Fore
init()
GREEN = Fore.GREEN
RED = Fore.RED
RESET = Fore.RESET
def process_packet(packet):
"""
This function is executed whenever a packet is sniffed
"""
if packet.haslayer(HTTPRequest) :
# if this packet is an HTTP Request
# get the requested URL
url = packet[HTTPRequest].Host.decode() + packet[HTTPRequest].Path.decode()
# get the requester's IP Address
ip = packet[IP].src
# get the request method
method = packet[HTTPRequest].Method.decode()
print(f"\n{GREEN}[+] {ip} Requested {url} with {method}{RESET}")
if show_raw and packet.haslayer(Raw) and method == "POST":
# if show_raw flag is enabled, has raw data, and the requested method is "POST"
# then show raw
print(f"\n{RED}[*] Some useful Raw data: {packet[Raw].load}{RESET}")
def sniff_packet(iface=None):
"""
Sniff 80 port packets with `iface`, if None (default), then the
Scapy's default interface is used
"""
if iface:
# port 80 for http (generally)
# `process_packet` is the callback
sniff(filter="port 80", prn=process_packet, iface=iface, store=False)
else:
# sniff with default interface
sniff(filter="port 80", prn=process_packet, store=False)
if __name__ == "__main__":
sniff_packet(iface='wlan0')
|
[
"abhisheksagar513@gmail.com"
] |
abhisheksagar513@gmail.com
|
f87ed5c5a94a52470b9b2c9a3c9b1b90f3da1007
|
df65807740836509ed0b6a1c735462244ae12dee
|
/rolo.py
|
842a5c531777ef37f6487f2183053e61dc375567
|
[] |
no_license
|
frimmy/LPTHW-exs
|
5c592aca45c92d992c6b745df8e86c9d8f2fd318
|
a3071e41ff57781f3dd07967215577c1dbaf0f1f
|
refs/heads/master
| 2020-06-04T09:08:48.543418
| 2014-01-26T04:38:58
| 2014-01-26T04:38:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,070
|
py
|
import random
from urllib import urlopen
import sys
WORD_URL = "http://learncodethehardway.org/words.txt"
WORDS = []
PHRASES = {
"class %%%(%%%):":
"Make a class named %%% that is-a %%%.",
"class %%%(object):\n\tdef __init__(self, ***)":
"class %%% has-a __init__ that takes self and *** parameters.",
"class %%%(object):\n\tdef ***(self, @@@)":
"class %%% has-a function named *** that takes self and @@@ parameters.",
"*** = %%%():":
"Set *** to an instance of class %%%.",
"***.***(@@@)":
"From *** get the *** function, and call it with parameters self, @@@.",
"***.*** = '***'":
"From *** get the *** attribute and set it to '***'."
}
# do they want to drill phrases first
PHRASE_FIRST = False
if len(sys.argv) == 2 and sys.argv[1] == "english":
PHRASE_FIRST = True
# load up the words from the website
for word in urlopen(WORD_URL).readlines():
WORDS.append(word.strip())
def convert(snippet, phrase):
class_names = [w.capitalize() for w in
random.sample(WORDS, snippet.count("%%%"))]
other_names = random.sample(WORDS, snippet.count("***"))
results = []
param_names = []
for i in range(0, snippet.count("@@@")):
param_count = random.randint(1,3)
param_names.append(', '.join(random.sample(WORDS, param_count)))
for sentence in snippet, phrase:
result = sentence[:]
#fake class names
for word in class_names:
result = result.replace("%%%", word, 1)
#fake other names
for word in other_names:
result = result.replace("***", word, 1)
#fake parameter lists
for word in param_names:
result = result.replace("@@@", word, 1)
results.append(result)
return results
# keep going until they hit CTRL+D
try:
while True:
snippets = PHRASES.keys()
random.shuffle(snippets)
for snippet in snippets:
phrase = PHRASES[snippet]
question, answer = convert(snippet, phrase)
print question, answer
if PHRASE_FIRST:
question, answer = answer, question
print question
raw_input("> ")
print "Answer : %s\n\n" % answer
except EOFError:
print "\nBye"
|
[
"adrian.frimpong@gmail.com"
] |
adrian.frimpong@gmail.com
|
15517a085b1db69287e5e8e185b60a3471a0294a
|
2389e5e647809d7f1d2da39b79700effbc69a9e9
|
/Estruturas de repetição/1323.py
|
aa026733c459fb8ac20d912d87b8b4060851866b
|
[] |
no_license
|
victorrgouvea/URI-Problems
|
8103fc6cc77372d42b804cfad16e6f9944d2e761
|
0539305aa06c317ab908f5840a3117a04dfaa426
|
refs/heads/main
| 2023-08-17T12:09:23.259978
| 2021-10-09T18:56:18
| 2021-10-09T18:56:18
| 397,400,716
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 147
|
py
|
while True:
n = int(input())
q = 0
if n == 0:
break
for x in range(n, 0, -1):
q += x**2
print("{}".format(q))
|
[
"victorgouvea28@gmail.com"
] |
victorgouvea28@gmail.com
|
ffd95d8d219bf085f14f77f14b366babe81b1f5a
|
17a02c7e45688817d9c67af57dba780836341e67
|
/Test1/test_modify_prod.py
|
24757e9582c2f3ea4d047fdfa7618fb7949b2ddb
|
[
"Apache-2.0"
] |
permissive
|
Dmitriy1978/Chebr
|
a24f8c21e5edb97b20eb9bd5b4c14e1c2b05bdd5
|
d524915645ed9e1a40a80350c0d694f09971b167
|
refs/heads/master
| 2021-01-01T06:35:53.999558
| 2017-08-04T14:11:53
| 2017-08-04T14:11:53
| 97,464,733
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 425
|
py
|
from Model.Text_prod import Modify_prod
from Model.Names_for_login import Name_log
def test_modify_prod(app):
app.session_.login (Name_log(emaillogin="dmitriy115@i.ua", password="00000" ))
app.session_.insure_login (firstname = "test", lastname = "tester")
app.Prodhelper.modify_prod(Modify_prod( firstname="Квартира new", price="20000", quantity="2", file="\kv2.jpg" ) )
#app.session.Logout_any ()
|
[
"dmitriy197819781978@gmail.com"
] |
dmitriy197819781978@gmail.com
|
8b8abd6362dccd63fc3626ca67abe79c13b09a7f
|
6ccb24f71bc526492fed72f5a3d9df3f7807ad35
|
/world.py
|
d3ba818fc5759c837908a0ead07b781ec10d35b3
|
[] |
no_license
|
msanden/simple-predator-prey-model
|
2993d38b358dc96d384a09f7fe3531a7dbf06d10
|
4ca9bf49a8ddf204d4b40eeed9eb5f595261dc70
|
refs/heads/master
| 2020-06-17T14:34:41.207004
| 2019-07-09T07:09:10
| 2019-07-09T07:09:10
| 195,951,881
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,708
|
py
|
import cTurtle
import random
class World:
def __init__(self, xDimension, yDimension):
'''
The world model consists of life-forms that live at specific
locations in a two-dimensional world grid.
The grid is a list of rows.
'''
self.xDimension = xDimension
self.yDimension = yDimension
self.lifeForms = []
self.grid = []
# ======================================
# Creating the list of lists implementation:
# Each entry in list is initialized as a None type
# ex. g = [ [None, None, None, None, None],
# [None, None, None, None, None],
# [None, None, None, None, None],
# [None, None, None, None, None] ]
# ======================================
for arow in range(self.xDimension):
row = []
for acol in range(self.yDimension):
row.append(None)
self.grid.append(row)
#Defining the bounds of our world model
self.wturtle = cTurtle.Turtle()
self.wturtle.setWorldCoordinates(0,0,self.xDimension-1,self.yDimension-1)
self.wturtle.addshape("img/bear.gif")
self.wturtle.addshape("img/fish.gif")
self.wturtle.hideturtle()
def draw(self):
'''
Method draws the grid system using x & y dimensions.
'''
self.wturtle.tracer(0)
self.wturtle.forward(self.xDimension-1)
self.wturtle.left(90)
self.wturtle.forward(self.yDimension-1)
self.wturtle.left(90)
self.wturtle.forward(self.xDimension-1)
self.wturtle.left(90)
self.wturtle.forward(self.yDimension-1)
self.wturtle.left(90)
for i in range(self.yDimension-1):
self.wturtle.forward(self.xDimension-1)
self.wturtle.backward(self.xDimension-1)
self.wturtle.left(90)
self.wturtle.forward(1)
self.wturtle.right(90)
self.wturtle.forward(1)
self.wturtle.right(90)
for i in range(self.xDimension-2):
self.wturtle.forward(self.yDimension-1)
self.wturtle.backward(self.yDimension-1)
self.wturtle.left(90)
self.wturtle.forward(1)
self.wturtle.right(90)
self.wturtle.tracer(1)
def freezeGraphic(self):
self.wturtle.exitOnClick()
def getXDimension(self):
return self.xDimension
def getYDimension(self):
return self.yDimension
def lookAtLocation(self, x, y):
return self.grid[y][x]
def addLifeForm(self, creature, x, y):
'''
Method adds the life-form to our list and the position where it should
be placed
'''
creature.setX(x)
creature.setY(y)
self.grid[y][x] = creature
creature.setWorld(self)
self.lifeForms.append(creature)
creature.appear()
def moveLifeForm(self, oldx, oldy, newx, newy):
self.grid[newy][newx] = self.grid[oldy][oldx]
self.grid[oldy][oldx] = None
def emptyLocation(self, x, y):
if self.grid[y][x] == None:
return True
else:
return False
def liveLife(self):
'''
A creature is selected at random, then allowed to live life
(breed).
'''
if self.lifeForms != [ ]:
creature = random.randrange(len(self.lifeForms))
randomCreature = self.lifeForms[creature]
randomCreature.liveLife()
def delLifeForm(self, creature):
creature.hide()
self.grid[creature.getY()][creature.getX()] = None
self.lifeForms.remove(creature)
|
[
"mnlsande22@gmail.com"
] |
mnlsande22@gmail.com
|
28e0ea5092719228d787b32347d50e4da9ddde4c
|
682581de9e3674d157877756d1a536f5b028c045
|
/script/Mapper4_NW.py
|
691858720ebeee5a25ee3bcf7479291ca2cc684a
|
[] |
no_license
|
wchnicholas/ProteinGFourMutants
|
3a81b9175e0e5bb864d5723fa59443a3ba07eda6
|
dbdd7639187e0b8f22359f404ce4d1d950fcc8a9
|
refs/heads/master
| 2023-08-16T19:53:02.475407
| 2023-08-03T12:24:44
| 2023-08-03T12:24:44
| 33,599,807
| 8
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,644
|
py
|
#!/usr/bin/python
import os
import sys
import operator
from string import atof
from itertools import imap
def hamming(str1, str2):
assert len(str1) == len(str2)
return sum(imap(operator.ne, str1, str2))
def hashin(sample, countfile, counthash):
print 'Reading sample: %s' % sample
infile = open(countfile,'r')
counthash[sample] = {}
for line in infile.xreadlines():
line = line.rstrip().rsplit(' ')
while '' in line: line.remove('')
count = line[0]
mutID = line[1]
counthash[sample][mutID] = count
infile.close()
return counthash
def output(WToutfile, Mutoutfile, counthash, samples, WT):
WToutfile = open(WToutfile,'w')
Mutoutfile = open(Mutoutfile,'w')
muts = []
[muts.extend(counthash[sample].keys()) for sample in samples]
muts = list(set(muts))
header = 'mut'+"\t"+'HD'+"\t"+"\t".join(samples)
WToutfile.write(header+"\n")
Mutoutfile.write(header+"\n")
for mut in muts:
out = [mut,str(hamming(WT,mut))]
for sample in samples:
if counthash[sample].has_key(mut): out.append(counthash[sample][mut])
else: out.append('0')
out = "\t".join(out)
Mutoutfile.write(out+"\n")
if mut == WT: WToutfile.write(out+"\n")
WToutfile.close()
Mutoutfile.close()
def main():
samples = ['Input','IGG10','IGG20','IGG90']
WT = 'VDGV'
WToutfile = 'result/WTcount'
Mutoutfile = 'result/Mutcount'
counthash = {}
for sample in samples:
countfile = 'count/'+sample+'.count'
counthash = hashin(sample, countfile, counthash)
output(WToutfile, Mutoutfile, counthash, samples, WT)
if __name__ == '__main__':
main()
|
[
"wchnicholas@Nicholass-MacBook-Pro.local"
] |
wchnicholas@Nicholass-MacBook-Pro.local
|
9d7fb25a0c53950b98f1ea28fbf02fc0814251e7
|
65a7ceb41fbb831e1b0670786498af450f80d12e
|
/calibrate.py
|
f8c0e54ff0c36f718e648b5a4570a7bcbbcb5eb7
|
[] |
no_license
|
mistletoe999/local_vol_model
|
ae4a6404c2a5a7c026d39c85964b785e996d1f35
|
a65efa6648ba6865ffb221bfcd209095cdcc5a50
|
refs/heads/master
| 2021-01-12T13:26:44.096888
| 2016-09-25T14:41:10
| 2016-09-25T14:41:10
| 69,168,821
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,018
|
py
|
# calibrate.py
import datetime as dt
import matplotlib.pyplot as plt
import numpy as np
import os
import bsm_formula
def cal_implied_vol(vix, vix_option_data):
vix_option_data['Mid'] = (vix_option_data['Bid']
+ vix_option_data['Ask']) / 2.0
vix_option_data['ImpliedMid'] = [ bsm_formula.bsm_call_imp_vol(
vix, item['Strike'], item['TTM'], item['ZeroRate'],
item['ZeroRate'], item['Mid'], 1.5)
for index, item in vix_option_data.iterrows() ]
vix_option_data['ImpliedBid'] = [ bsm_formula.bsm_call_imp_vol(
vix, item['Strike'], item['TTM'], item['ZeroRate'],
item['ZeroRate'], item['Bid'], 1.5)
for index, item in vix_option_data.iterrows() ]
vix_option_data['ImpliedAsk'] = [ bsm_formula.bsm_call_imp_vol(
vix, item['Strike'], item['TTM'], item['ZeroRate'],
item['ZeroRate'], item['Ask'], 1.5)
for index, item in vix_option_data.iterrows() ]
vix_option_data['ImpliedModel'] = [ bsm_formula.bsm_call_imp_vol(
vix, item['Strike'], item['TTM'], item['ZeroRate'],
item['ZeroRate'], item['Model'], 1.5)
for index, item in vix_option_data.iterrows() ]
MSE = np.average(((vix_option_data['Mid'] - vix_option_data['Model'])
/ vix_option_data['Mid']) ** 2)
#print MSE
return MSE
def plot_implied_vol(vix_option_data):
vix_option_data_grouped = vix_option_data.groupby('Maturity')
#print vix_option_data_grouped
nrows = (vix_option_data_grouped.ngroups + 1) / 2
fig, axes = plt.subplots(figsize=(12, 6 * nrows),
nrows = nrows, ncols = 2,
gridspec_kw = dict(hspace = 0.4))
volatilites = np.array(vix_option_data.iloc[:, -4:])
ylimits = [np.min(volatilites) - 0.1, np.max(volatilites) + 0.1]
targets = zip(vix_option_data_grouped.groups.keys(), axes.flatten())
for i, (key, ax) in enumerate(targets):
data = vix_option_data_grouped.get_group(key)
#print data
ax.plot(data['Strike'], data['ImpliedBid'], '-.')
ax.plot(data['Strike'], data['ImpliedAsk'], ':')
ax.plot(data['Strike'], data['ImpliedMid'], '--')
ax.plot(data['Strike'], data['ImpliedModel'], lw = 3)
xlimits = [np.min(data['Strike']) - 0.2, np.max(data['Strike']) + 0.2]
ax.set_xlim(xlimits)
ax.set_ylim(ylimits)
ax.set_xlabel('strike')
ax.set_ylabel('implied volatility')
ax.grid(True)
date = key.astype('M8[D]').astype(np.datetime64)
ax.set_title(date)
ax.legend(labels = ['Bid', 'Ask', 'Mid', 'Model'])
file_path = '../data_graphs/vix_implied_vol_skew.pdf'
if os.path.exists(file_path):
os.remove(file_path)
plt.savefig(file_path, bbox_inches='tight')
def cal_vix_options(vix_option_data, dynamic_local_vol):
vix_option_data['Model'] = [ dynamic_local_vol.cal_vix_option(
item['TTM'], item['Strike'])
for index, item in vix_option_data.iterrows() ]
print
|
[
"boyuwei@gmail.com"
] |
boyuwei@gmail.com
|
0f2a54986fe6556831c8a4e0c5945a14db03101c
|
f158c31eceb414be162a4580ad5b3b9db9601c5c
|
/graphql_mongodb/data.py
|
ad70470d52204caf31b7d479bff3de6ebea65e11
|
[] |
no_license
|
mubaxhir/Ninja_bootcamp
|
06bb1174a7c3d24cd5637440456b70260f638b0b
|
e997e2f5d58c557567aa82dd2b59a661a4453e5a
|
refs/heads/master
| 2020-09-30T02:58:29.344825
| 2020-02-26T12:29:36
| 2020-02-26T12:29:36
| 227,185,706
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 432
|
py
|
from mongoengine import connect
from model import Id,TITLE,Description,Done
connect(host="mongodb+srv://mubi:1234@cluster001-avto2.mongodb.net/test?retryWrites=true&w=majority")
def initData():
id = Id(name="32452343242")
id.save()
title = TITLE(name= "aman")
title.save()
description = Description(name="abcdefghijklmnopqrstuvwxyz")
description.save()
done = Done(name="true")
done.save()
|
[
"noreply@github.com"
] |
mubaxhir.noreply@github.com
|
7b2b6ff21d85d218afc25224cb18996a23d5b0d9
|
328ff8d36a8180943cb30d40061631a894b343ce
|
/15/ml_check_3k_2_right.py
|
2b8ad87b9677047b4e790173f214e7a844aa08b0
|
[] |
no_license
|
mihailselezniov/Deformation
|
8f9ed0176a49e292a9453b9eb020d422cbc6f1c6
|
024981f42c01f7be0037212759aa101575e9cffa
|
refs/heads/master
| 2022-02-17T08:37:11.676746
| 2022-02-12T17:41:31
| 2022-02-12T17:41:31
| 150,991,705
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 49,739
|
py
|
# -*- coding: utf-8 -*-
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score, confusion_matrix, classification_report, roc_auc_score, average_precision_score, f1_score
from sklearn.linear_model import LogisticRegression, SGDClassifier
from sklearn.svm import LinearSVC, SVC
from sklearn.neighbors import KNeighborsClassifier
from sklearn.naive_bayes import BernoulliNB
from sklearn.ensemble import RandomForestClassifier
from sklearn.neural_network import MLPClassifier
from sklearn.externals import joblib
from xgboost import XGBClassifier, XGBRegressor
import collections
par = {}
parts = 10
par['pressure_time'] = {'Min': 0.0, 'Max': 100.0}
par['pressure_radius'] = {'Min': 0.0, 'Max': 5.0}
par['pressure_amplitude'] = {'Min': 0.0, 'Max': 200.0}
par['length'] = {'Min': 10.0, 'Max': 100.0}
par['diameter'] = {'Min': 0.01, 'Max': 0.5}
par['young'] = {'Min': 60.0, 'Max': 300.0}
par['density'] = {'Min': 1000.0, 'Max': 2000.0}
par['strength'] = {'Min': 0.2, 'Max': 10.0}
def get_list(Min, Max):
return list(map(lambda x: round(x, 2), np.arange(Min, Max+0.01, (Max-Min)/(parts-1))))
e0 = tuple(enumerate(get_list(**par['length'])))
e1 = tuple(enumerate(get_list(**par['diameter'])))
e2 = tuple(enumerate(get_list(**par['young'])))
e3 = tuple(enumerate(get_list(**par['density'])))
e4 = tuple(enumerate(get_list(**par['pressure_time'])))
e5 = tuple(enumerate(get_list(**par['pressure_radius'])))
e6 = tuple(enumerate(get_list(**par['pressure_amplitude'])))
e7 = tuple(enumerate(get_list(**par['strength'])))
extreme_values = [[
par['length']['Min'],
par['diameter']['Min'],
par['young']['Min'],
par['density']['Min'],
par['pressure_time']['Min'],#get_list(**par['pressure_time'])[1],
par['pressure_radius']['Min'],#get_list(**par['pressure_radius'])[1],
par['pressure_amplitude']['Min'],#get_list(**par['pressure_amplitude'])[1],
par['strength']['Min'],
],
[
par['length']['Max'],
par['diameter']['Max'],
par['young']['Max'],
par['density']['Max'],
par['pressure_time']['Max'],
par['pressure_radius']['Max'],
par['pressure_amplitude']['Max'],
par['strength']['Max'],
]
]
extreme_values = np.array(extreme_values)
#x_train = (x_train - extreme_values.min(axis=0)) / (extreme_values.max(axis=0) - extreme_values.min(axis=0))
with open('data3k_2.txt', 'r') as f:
data_is_broken = f.readlines()
data_is_broken = list(map(int, data_is_broken))
Y_test = []
for i, val in enumerate(data_is_broken):
Y_test.extend([i%2]*val)
new_parts = 19
def get_new_list(Min, Max):
return list(map(lambda x: round(x, 2), np.arange(Min, Max+0.01, (Max-Min)/(new_parts-1))))[1::2]
e2_0 = tuple(enumerate(get_new_list(**par['length'])))
e2_1 = tuple(enumerate(get_new_list(**par['diameter'])))
e2_2 = tuple(enumerate(get_new_list(**par['young'])))
e2_3 = tuple(enumerate(get_new_list(**par['density'])))
e2_4 = tuple(enumerate(get_new_list(**par['pressure_time'])))
e2_5 = tuple(enumerate(get_new_list(**par['pressure_radius'])))
e2_6 = tuple(enumerate(get_new_list(**par['pressure_amplitude'])))
e2_7 = tuple(enumerate(get_new_list(**par['strength'])))
i = 0
x_test, y_test = [], []
for i0, l in e2_0:
for i1, di in e2_1:
for i2, y in e2_2:
for i3, de in e2_3:
for i4, pt in e2_4:
for i5, pr in e2_5:
for i6, pa in e2_6:
for i7, s in e2_7:
if 0 not in [i4, i5, i6]:
x_test.append([l, di, y, de, pt, pr, pa, s])
y_test.append(Y_test[i])
i += 1
print(i0)
x_test, y_test = np.array(x_test), np.array(y_test)
x_test = (x_test - extreme_values.min(axis=0)) / (extreme_values.max(axis=0) - extreme_values.min(axis=0))
def make_str(data):
return ''.join(map(str, data))
def make_set(data):
return {make_str(i) for i in data}
source_f = '../12/ml_threads/6_1.txt'
with open(source_f, 'r') as f:
threads = f.readlines()
roc_metrics, pr_metrics, f1_metrics = [], [], []
roc_metric, pr_metric, f1_metric = [], [], []
for cut in [100, 200, 300, 400, 500]:
#cut = 200#100
print('\n\n\n', '#'*10, cut, '#'*10)
x_train_dict = {}
for t in threads[:cut]:
tr = list(map(int, t.replace('\n', '').split(',')))
x_train_dict[make_str(tr[:-1])] = tr[-1]
i = 0
x_train, y_train = [], []
for i0, l in e0:
for i1, di in e1:
for i2, y in e2:
for i3, de in e3:
for i4, pt in e4:
for i5, pr in e5:
for i6, pa in e6:
for i7, s in e7:
if 0 not in [i4, i5, i6]:
key = make_str([i0, i1, i2, i3, i4, i5, i6, i7])
if key in x_train_dict:
x_train.append([l, di, y, de, pt, pr, pa, s])
y_train.append(x_train_dict[key])
i += 1
print(i0)
x_train, y_train = np.array(x_train), np.array(y_train)
x_train = (x_train - extreme_values.min(axis=0)) / (extreme_values.max(axis=0) - extreme_values.min(axis=0))
#print(x_train)
def fit_model(model):
global roc_metric
global pr_metric
global f1_metric
print('\n', '-'*10, model.__class__.__name__, '-'*10)
print(x_test.shape, y_test.shape)
print('y_test', dict(collections.Counter(y_test)), 'y_train', dict(collections.Counter(y_train)))
# fit model on training data
model.fit(x_train, y_train)
y_pred = model.predict(x_test)
print('y_pred', dict(collections.Counter(y_pred)))
# make predictions for test data
y_pred = [round(value) for value in y_pred]
# evaluate predictions
accuracy = accuracy_score(y_test, y_pred)
print('Accuracy: {}'.format(accuracy))
cm = confusion_matrix(y_test, y_pred)
print('Confusion matrix:\n{}'.format(cm))
print('Precision, recall and f1-score:')
print(classification_report(y_test, y_pred))
try:
roc = roc_auc_score(y_test, y_pred)
print('ROC AUC: {}'.format(roc))
pr = average_precision_score(y_test, y_pred)
print('PR AUC: {}'.format(pr))
roc_metric.append(round(float(roc), 4))
pr_metric.append(round(float(pr), 4))
f1 = f1_score(y_test, y_pred, average=None)
f1_metric.append(round(float(f1[0]), 4))
except Exception as e:
print(e)
print('-'*10, 'End', model.__class__.__name__, '-'*10)
roc_metric, pr_metric, f1_metric = [], [], []
fit_model(XGBClassifier(random_state=42))
fit_model(LogisticRegression())
fit_model(LinearSVC(random_state=42, tol=1e-5))
fit_model(KNeighborsClassifier(n_neighbors=5))
fit_model(SGDClassifier(random_state=42))
fit_model(BernoulliNB())
fit_model(RandomForestClassifier(random_state=42))
fit_model(MLPClassifier())
fit_model(SVC(random_state=42))
roc_metrics.append(roc_metric[:])
pr_metrics.append(pr_metric[:])
f1_metrics.append(f1_metric[:])
print('roc_metrics')
print(roc_metrics)
print('pr_metrics')
print(pr_metrics)
print('f1_metrics')
print(f1_metrics)
print()
'''
########## 100 ##########
0
1
2
3
4
5
6
7
8
9
---------- XGBClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 33, 0: 67}
y_pred {1: 20872008, 0: 9361080}
Accuracy: 0.7713856751913665
Confusion matrix:
[[ 2612335 162972]
[ 6748745 20709036]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.28 0.94 0.43 2775307
1 0.99 0.75 0.86 27457781
accuracy 0.77 30233088
macro avg 0.64 0.85 0.64 30233088
weighted avg 0.93 0.77 0.82 30233088
ROC AUC: 0.8477457993709515
PR AUC: 0.9715485354609068
---------- End XGBClassifier ----------
---------- LogisticRegression ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 33, 0: 67}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:432: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.
FutureWarning)
y_pred {1: 19350366, 0: 10882722}
Accuracy: 0.7314627933474741
Confusion matrix:
[[ 2769660 5647]
[ 8113062 19344719]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.25 1.00 0.41 2775307
1 1.00 0.70 0.83 27457781
accuracy 0.73 30233088
macro avg 0.63 0.85 0.62 30233088
weighted avg 0.93 0.73 0.79 30233088
ROC AUC: 0.8512456056325731
PR AUC: 0.9726707644829715
---------- End LogisticRegression ----------
---------- LinearSVC ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 33, 0: 67}
y_pred {1: 25753857, 0: 4479231}
Accuracy: 0.9205157607453132
Confusion matrix:
[[ 2425742 349565]
[ 2053489 25404292]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.54 0.87 0.67 2775307
1 0.99 0.93 0.95 27457781
accuracy 0.92 30233088
macro avg 0.76 0.90 0.81 30233088
weighted avg 0.95 0.92 0.93 30233088
ROC AUC: 0.8996287109726478
PR AUC: 0.980576566191755
---------- End LinearSVC ----------
---------- KNeighborsClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 33, 0: 67}
y_pred {0: 22963529, 1: 7269559}
Accuracy: 0.32497183218598114
Confusion matrix:
[[ 2665325 109982]
[20298204 7159577]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.12 0.96 0.21 2775307
1 0.98 0.26 0.41 27457781
accuracy 0.32 30233088
macro avg 0.55 0.61 0.31 30233088
weighted avg 0.91 0.32 0.39 30233088
ROC AUC: 0.6105598981135527
PR AUC: 0.9281940354007876
---------- End KNeighborsClassifier ----------
---------- SGDClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 33, 0: 67}
y_pred {1: 26138479, 0: 4094609}
Accuracy: 0.9268699909185592
Confusion matrix:
[[ 2329485 445822]
[ 1765124 25692657]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.57 0.84 0.68 2775307
1 0.98 0.94 0.96 27457781
accuracy 0.93 30233088
macro avg 0.78 0.89 0.82 30233088
weighted avg 0.94 0.93 0.93 30233088
ROC AUC: 0.8875380875113489
PR AUC: 0.9781391260013497
---------- End SGDClassifier ----------
---------- BernoulliNB ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 33, 0: 67}
y_pred {0: 30233088}
Accuracy: 0.0917970073053735
Confusion matrix:
[[ 2775307 0]
[27457781 0]]
Precision, recall and f1-score:
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
'precision', 'predicted', average, warn_for)
precision recall f1-score support
0 0.09 1.00 0.17 2775307
1 0.00 0.00 0.00 27457781
accuracy 0.09 30233088
macro avg 0.05 0.50 0.08 30233088
weighted avg 0.01 0.09 0.02 30233088
ROC AUC: 0.5
PR AUC: 0.9082029926946265
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 in labels with no predicted samples.
'precision', 'predicted', average, warn_for)
---------- End BernoulliNB ----------
---------- RandomForestClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 33, 0: 67}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/ensemble/forest.py:245: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.
"10 in version 0.20 to 100 in 0.22.", FutureWarning)
y_pred {1: 6624964, 0: 23608124}
Accuracy: 0.30831124495122697
Confusion matrix:
[[ 2735772 39535]
[20872352 6585429]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.12 0.99 0.21 2775307
1 0.99 0.24 0.39 27457781
accuracy 0.31 30233088
macro avg 0.55 0.61 0.30 30233088
weighted avg 0.91 0.31 0.37 30233088
ROC AUC: 0.6127965415667408
PR AUC: 0.928788181362327
---------- End RandomForestClassifier ----------
---------- MLPClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 33, 0: 67}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/neural_network/multilayer_perceptron.py:566: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn't converged yet.
% self.max_iter, ConvergenceWarning)
y_pred {1: 22218194, 0: 8014894}
Accuracy: 0.8235247090869448
Confusion matrix:
[[ 2727404 47903]
[ 5287490 22170291]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.34 0.98 0.51 2775307
1 1.00 0.81 0.89 27457781
accuracy 0.82 30233088
macro avg 0.67 0.90 0.70 30233088
weighted avg 0.94 0.82 0.86 30233088
ROC AUC: 0.8950857830781003
PR AUC: 0.9805819895623722
---------- End MLPClassifier ----------
---------- SVC ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 33, 0: 67}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/svm/base.py:193: FutureWarning: The default value of gamma will change from 'auto' to 'scale' in version 0.22 to account better for unscaled features. Set gamma explicitly to 'auto' or 'scale' to avoid this warning.
"avoid this warning.", FutureWarning)
y_pred {0: 30233088}
Accuracy: 0.0917970073053735
Confusion matrix:
[[ 2775307 0]
[27457781 0]]
Precision, recall and f1-score:
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
'precision', 'predicted', average, warn_for)
precision recall f1-score support
0 0.09 1.00 0.17 2775307
1 0.00 0.00 0.00 27457781
accuracy 0.09 30233088
macro avg 0.05 0.50 0.08 30233088
weighted avg 0.01 0.09 0.02 30233088
ROC AUC: 0.5
PR AUC: 0.9082029926946265
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 in labels with no predicted samples.
'precision', 'predicted', average, warn_for)
---------- End SVC ----------
########## 200 ##########
0
1
2
3
4
5
6
7
8
9
---------- XGBClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 69, 0: 131}
y_pred {1: 23684942, 0: 6548146}
Accuracy: 0.8407594685663602
Confusion matrix:
[[ 2254560 520747]
[ 4293586 23164195]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.34 0.81 0.48 2775307
1 0.98 0.84 0.91 27457781
accuracy 0.84 30233088
macro avg 0.66 0.83 0.69 30233088
weighted avg 0.92 0.84 0.87 30233088
ROC AUC: 0.8279968580336526
PR AUC: 0.9670972688956141
---------- End XGBClassifier ----------
---------- LogisticRegression ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 69, 0: 131}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:432: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.
FutureWarning)
y_pred {1: 23415015, 0: 6818073}
Accuracy: 0.859238593159918
Confusion matrix:
[[ 2668864 106443]
[ 4149209 23308572]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.39 0.96 0.56 2775307
1 1.00 0.85 0.92 27457781
accuracy 0.86 30233088
macro avg 0.69 0.91 0.74 30233088
weighted avg 0.94 0.86 0.88 30233088
ROC AUC: 0.9052670424212677
PR AUC: 0.9822693579446738
---------- End LogisticRegression ----------
---------- LinearSVC ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 69, 0: 131}
y_pred {1: 26365483, 0: 3867605}
Accuracy: 0.9373318398702772
Confusion matrix:
[[ 2374130 401177]
[ 1493475 25964306]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.61 0.86 0.71 2775307
1 0.98 0.95 0.96 27457781
accuracy 0.94 30233088
macro avg 0.80 0.90 0.84 30233088
weighted avg 0.95 0.94 0.94 30233088
ROC AUC: 0.9005280088555725
PR AUC: 0.980618636694123
---------- End LinearSVC ----------
---------- KNeighborsClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 69, 0: 131}
y_pred {0: 21187644, 1: 9045444}
Accuracy: 0.3782768402619011
Confusion matrix:
[[ 2583170 192137]
[18604474 8853307]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.12 0.93 0.22 2775307
1 0.98 0.32 0.49 27457781
accuracy 0.38 30233088
macro avg 0.55 0.63 0.35 30233088
weighted avg 0.90 0.38 0.46 30233088
ROC AUC: 0.6266012742086223
PR AUC: 0.9309525123019805
---------- End KNeighborsClassifier ----------
---------- SGDClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 69, 0: 131}
y_pred {1: 25235667, 0: 4997421}
Accuracy: 0.904825798806923
Confusion matrix:
[[ 2447659 327648]
[ 2549762 24908019]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.49 0.88 0.63 2775307
1 0.99 0.91 0.95 27457781
accuracy 0.90 30233088
macro avg 0.74 0.89 0.79 30233088
weighted avg 0.94 0.90 0.92 30233088
ROC AUC: 0.8945402637332561
PR AUC: 0.9796977587585388
---------- End SGDClassifier ----------
---------- BernoulliNB ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 69, 0: 131}
y_pred {0: 30233088}
Accuracy: 0.0917970073053735
Confusion matrix:
[[ 2775307 0]
[27457781 0]]
Precision, recall and f1-score:
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
'precision', 'predicted', average, warn_for)
precision recall f1-score support
0 0.09 1.00 0.17 2775307
1 0.00 0.00 0.00 27457781
accuracy 0.09 30233088
macro avg 0.05 0.50 0.08 30233088
weighted avg 0.01 0.09 0.02 30233088
ROC AUC: 0.5
PR AUC: 0.9082029926946265
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 in labels with no predicted samples.
'precision', 'predicted', average, warn_for)
---------- End BernoulliNB ----------
---------- RandomForestClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 69, 0: 131}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/ensemble/forest.py:245: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.
"10 in version 0.20 to 100 in 0.22.", FutureWarning)
y_pred {1: 11363026, 0: 18870062}
Accuracy: 0.4573695879163915
Confusion matrix:
[[ 2619988 155319]
[16250074 11207707]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.14 0.94 0.24 2775307
1 0.99 0.41 0.58 27457781
accuracy 0.46 30233088
macro avg 0.56 0.68 0.41 30233088
weighted avg 0.91 0.46 0.55 30233088
ROC AUC: 0.676107507312788
PR AUC: 0.9400933332382555
---------- End RandomForestClassifier ----------
---------- MLPClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 69, 0: 131}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/neural_network/multilayer_perceptron.py:566: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn't converged yet.
% self.max_iter, ConvergenceWarning)
y_pred {1: 24908587, 0: 5324501}
Accuracy: 0.906133571271317
Confusion matrix:
[[ 2630968 144339]
[ 2693533 24764248]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.49 0.95 0.65 2775307
1 0.99 0.90 0.95 27457781
accuracy 0.91 30233088
macro avg 0.74 0.92 0.80 30233088
weighted avg 0.95 0.91 0.92 30233088
ROC AUC: 0.9249472213177019
PR AUC: 0.9857686657105307
---------- End MLPClassifier ----------
---------- SVC ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 69, 0: 131}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/svm/base.py:193: FutureWarning: The default value of gamma will change from 'auto' to 'scale' in version 0.22 to account better for unscaled features. Set gamma explicitly to 'auto' or 'scale' to avoid this warning.
"avoid this warning.", FutureWarning)
y_pred {0: 30233088}
Accuracy: 0.0917970073053735
Confusion matrix:
[[ 2775307 0]
[27457781 0]]
Precision, recall and f1-score:
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
'precision', 'predicted', average, warn_for)
precision recall f1-score support
0 0.09 1.00 0.17 2775307
1 0.00 0.00 0.00 27457781
accuracy 0.09 30233088
macro avg 0.05 0.50 0.08 30233088
weighted avg 0.01 0.09 0.02 30233088
ROC AUC: 0.5
PR AUC: 0.9082029926946265
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 in labels with no predicted samples.
'precision', 'predicted', average, warn_for)
---------- End SVC ----------
########## 300 ##########
0
1
2
3
4
5
6
7
8
9
---------- XGBClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 98, 0: 202}
y_pred {1: 24224987, 0: 6008101}
Accuracy: 0.8603185357711393
Confusion matrix:
[[ 2280203 495104]
[ 3727898 23729883]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.38 0.82 0.52 2775307
1 0.98 0.86 0.92 27457781
accuracy 0.86 30233088
macro avg 0.68 0.84 0.72 30233088
weighted avg 0.92 0.86 0.88 30233088
ROC AUC: 0.842917758208755
PR AUC: 0.969873928529596
---------- End XGBClassifier ----------
---------- LogisticRegression ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 98, 0: 202}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:432: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.
FutureWarning)
y_pred {1: 23574421, 0: 6658667}
Accuracy: 0.86538477313333
Confusion matrix:
[[ 2682070 93237]
[ 3976597 23481184]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.40 0.97 0.57 2775307
1 1.00 0.86 0.92 27457781
accuracy 0.87 30233088
macro avg 0.70 0.91 0.74 30233088
weighted avg 0.94 0.87 0.89 30233088
ROC AUC: 0.910789464262977
PR AUC: 0.9833231989550733
---------- End LogisticRegression ----------
---------- LinearSVC ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 98, 0: 202}
y_pred {1: 26123197, 0: 4109891}
Accuracy: 0.9355782644498637
Confusion matrix:
[[ 2468765 306542]
[ 1641126 25816655]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.60 0.89 0.72 2775307
1 0.99 0.94 0.96 27457781
accuracy 0.94 30233088
macro avg 0.79 0.91 0.84 30233088
weighted avg 0.95 0.94 0.94 30233088
ROC AUC: 0.9148887826006498
PR AUC: 0.9834802620794294
---------- End LinearSVC ----------
---------- KNeighborsClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 98, 0: 202}
y_pred {0: 20824773, 1: 9408315}
Accuracy: 0.38924856104675776
Confusion matrix:
[[ 2567589 207718]
[18257184 9200597]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.12 0.93 0.22 2775307
1 0.98 0.34 0.50 27457781
accuracy 0.39 30233088
macro avg 0.55 0.63 0.36 30233088
weighted avg 0.90 0.39 0.47 30233088
ROC AUC: 0.6301182699524746
PR AUC: 0.931564505228858
---------- End KNeighborsClassifier ----------
---------- SGDClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 98, 0: 202}
y_pred {1: 25569924, 0: 4663164}
Accuracy: 0.9221416945566394
Confusion matrix:
[[ 2542287 233020]
[ 2120877 25336904]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.55 0.92 0.68 2775307
1 0.99 0.92 0.96 27457781
accuracy 0.92 30233088
macro avg 0.77 0.92 0.82 30233088
weighted avg 0.95 0.92 0.93 30233088
ROC AUC: 0.9193983669018354
PR AUC: 0.9845003265735672
---------- End SGDClassifier ----------
---------- BernoulliNB ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 98, 0: 202}
y_pred {0: 30233088}
Accuracy: 0.0917970073053735
Confusion matrix:
[[ 2775307 0]
[27457781 0]]
Precision, recall and f1-score:
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
'precision', 'predicted', average, warn_for)
precision recall f1-score support
0 0.09 1.00 0.17 2775307
1 0.00 0.00 0.00 27457781
accuracy 0.09 30233088
macro avg 0.05 0.50 0.08 30233088
weighted avg 0.01 0.09 0.02 30233088
ROC AUC: 0.5
PR AUC: 0.9082029926946265
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 in labels with no predicted samples.
'precision', 'predicted', average, warn_for)
---------- End BernoulliNB ----------
---------- RandomForestClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 98, 0: 202}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/ensemble/forest.py:245: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.
"10 in version 0.20 to 100 in 0.22.", FutureWarning)
y_pred {1: 19269831, 0: 10963257}
Accuracy: 0.7119699449821335
Confusion matrix:
[[ 2515263 260044]
[ 8447994 19009787]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.23 0.91 0.37 2775307
1 0.99 0.69 0.81 27457781
accuracy 0.71 30233088
macro avg 0.61 0.80 0.59 30233088
weighted avg 0.92 0.71 0.77 30233088
ROC AUC: 0.7993143425940217
PR AUC: 0.9624137396562463
---------- End RandomForestClassifier ----------
---------- MLPClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 98, 0: 202}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/neural_network/multilayer_perceptron.py:566: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn't converged yet.
% self.max_iter, ConvergenceWarning)
y_pred {1: 25514232, 0: 4718856}
Accuracy: 0.9237808258289726
Confusion matrix:
[[ 2594911 180396]
[ 2123945 25333836]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.55 0.93 0.69 2775307
1 0.99 0.92 0.96 27457781
accuracy 0.92 30233088
macro avg 0.77 0.93 0.82 30233088
weighted avg 0.95 0.92 0.93 30233088
ROC AUC: 0.9288232522538259
PR AUC: 0.9863757263716217
---------- End MLPClassifier ----------
---------- SVC ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 98, 0: 202}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/svm/base.py:193: FutureWarning: The default value of gamma will change from 'auto' to 'scale' in version 0.22 to account better for unscaled features. Set gamma explicitly to 'auto' or 'scale' to avoid this warning.
"avoid this warning.", FutureWarning)
y_pred {0: 30233088}
Accuracy: 0.0917970073053735
Confusion matrix:
[[ 2775307 0]
[27457781 0]]
Precision, recall and f1-score:
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
'precision', 'predicted', average, warn_for)
precision recall f1-score support
0 0.09 1.00 0.17 2775307
1 0.00 0.00 0.00 27457781
accuracy 0.09 30233088
macro avg 0.05 0.50 0.08 30233088
weighted avg 0.01 0.09 0.02 30233088
ROC AUC: 0.5
PR AUC: 0.9082029926946265
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 in labels with no predicted samples.
'precision', 'predicted', average, warn_for)
---------- End SVC ----------
########## 400 ##########
0
1
2
3
4
5
6
7
8
9
---------- XGBClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 132, 0: 268}
y_pred {1: 24615480, 0: 5617608}
Accuracy: 0.8673912171988518
Confusion matrix:
[[ 2191871 583436]
[ 3425737 24032044]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.39 0.79 0.52 2775307
1 0.98 0.88 0.92 27457781
accuracy 0.87 30233088
macro avg 0.68 0.83 0.72 30233088
weighted avg 0.92 0.87 0.89 30233088
ROC AUC: 0.8325061267691527
PR AUC: 0.9678022121715352
---------- End XGBClassifier ----------
---------- LogisticRegression ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 132, 0: 268}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:432: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.
FutureWarning)
y_pred {1: 24132505, 0: 6100583}
Accuracy: 0.883478525250216
Confusion matrix:
[[ 2676543 98764]
[ 3424040 24033741]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.44 0.96 0.60 2775307
1 1.00 0.88 0.93 27457781
accuracy 0.88 30233088
macro avg 0.72 0.92 0.77 30233088
weighted avg 0.94 0.88 0.90 30233088
ROC AUC: 0.9198556569298495
PR AUC: 0.9849705106843407
---------- End LogisticRegression ----------
---------- LinearSVC ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 132, 0: 268}
y_pred {1: 26072928, 0: 4160160}
Accuracy: 0.9360626674986029
Confusion matrix:
[[ 2501222 274085]
[ 1658938 25798843]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.60 0.90 0.72 2775307
1 0.99 0.94 0.96 27457781
accuracy 0.94 30233088
macro avg 0.80 0.92 0.84 30233088
weighted avg 0.95 0.94 0.94 30233088
ROC AUC: 0.9204118912972257
PR AUC: 0.9845767108744891
---------- End LinearSVC ----------
---------- KNeighborsClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 132, 0: 268}
y_pred {0: 20316831, 1: 9916257}
Accuracy: 0.40662270423715896
Confusion matrix:
[[ 2576255 199052]
[17740576 9717205]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.13 0.93 0.22 2775307
1 0.98 0.35 0.52 27457781
accuracy 0.41 30233088
macro avg 0.55 0.64 0.37 30233088
weighted avg 0.90 0.41 0.49 30233088
ROC AUC: 0.6410868538493939
PR AUC: 0.9335857418997253
---------- End KNeighborsClassifier ----------
---------- SGDClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 132, 0: 268}
y_pred {1: 28081109, 0: 2151979}
Accuracy: 0.9563816306160985
Confusion matrix:
[[ 1804284 971023]
[ 347695 27110086]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.84 0.65 0.73 2775307
1 0.97 0.99 0.98 27457781
accuracy 0.96 30233088
macro avg 0.90 0.82 0.85 30233088
weighted avg 0.95 0.96 0.95 30233088
ROC AUC: 0.8187288070540304
PR AUC: 0.9646962317462499
---------- End SGDClassifier ----------
---------- BernoulliNB ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 132, 0: 268}
y_pred {0: 30233088}
Accuracy: 0.0917970073053735
Confusion matrix:
[[ 2775307 0]
[27457781 0]]
Precision, recall and f1-score:
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
'precision', 'predicted', average, warn_for)
precision recall f1-score support
0 0.09 1.00 0.17 2775307
1 0.00 0.00 0.00 27457781
accuracy 0.09 30233088
macro avg 0.05 0.50 0.08 30233088
weighted avg 0.01 0.09 0.02 30233088
ROC AUC: 0.5
PR AUC: 0.9082029926946265
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 in labels with no predicted samples.
'precision', 'predicted', average, warn_for)
---------- End BernoulliNB ----------
---------- RandomForestClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 132, 0: 268}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/ensemble/forest.py:245: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.
"10 in version 0.20 to 100 in 0.22.", FutureWarning)
y_pred {1: 12305957, 0: 17927131}
Accuracy: 0.4877263612635269
Confusion matrix:
[[ 2607412 167895]
[15319719 12138062]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.15 0.94 0.25 2775307
1 0.99 0.44 0.61 27457781
accuracy 0.49 30233088
macro avg 0.57 0.69 0.43 30233088
weighted avg 0.91 0.49 0.58 30233088
ROC AUC: 0.690783366767588
PR AUC: 0.9427517941236724
---------- End RandomForestClassifier ----------
---------- MLPClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 132, 0: 268}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/neural_network/multilayer_perceptron.py:566: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn't converged yet.
% self.max_iter, ConvergenceWarning)
y_pred {1: 25221782, 0: 5011306}
Accuracy: 0.9160637841559552
Confusion matrix:
[[ 2624481 150826]
[ 2386825 25070956]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.52 0.95 0.67 2775307
1 0.99 0.91 0.95 27457781
accuracy 0.92 30233088
macro avg 0.76 0.93 0.81 30233088
weighted avg 0.95 0.92 0.93 30233088
ROC AUC: 0.9293636052970538
PR AUC: 0.9865601865637038
---------- End MLPClassifier ----------
---------- SVC ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 132, 0: 268}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/svm/base.py:193: FutureWarning: The default value of gamma will change from 'auto' to 'scale' in version 0.22 to account better for unscaled features. Set gamma explicitly to 'auto' or 'scale' to avoid this warning.
"avoid this warning.", FutureWarning)
y_pred {0: 30233051, 1: 37}
Accuracy: 0.09179823113007841
Confusion matrix:
[[ 2775307 0]
[27457744 37]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.09 1.00 0.17 2775307
1 1.00 0.00 0.00 27457781
accuracy 0.09 30233088
macro avg 0.55 0.50 0.08 30233088
weighted avg 0.92 0.09 0.02 30233088
ROC AUC: 0.5000006737616561
PR AUC: 0.9082031163932338
---------- End SVC ----------
########## 500 ##########
0
1
2
3
4
5
6
7
8
9
---------- XGBClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 167, 0: 333}
y_pred {1: 24053115, 0: 6179973}
Accuracy: 0.8682784901099088
Confusion matrix:
[[ 2486466 288841]
[ 3693507 23764274]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.40 0.90 0.56 2775307
1 0.99 0.87 0.92 27457781
accuracy 0.87 30233088
macro avg 0.70 0.88 0.74 30233088
weighted avg 0.93 0.87 0.89 30233088
ROC AUC: 0.8807044039899787
PR AUC: 0.9772587103124715
---------- End XGBClassifier ----------
---------- LogisticRegression ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 167, 0: 333}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:432: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.
FutureWarning)
y_pred {1: 25090240, 0: 5142848}
Accuracy: 0.910800808703365
Confusion matrix:
[[ 2610694 164613]
[ 2532154 24925627]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.51 0.94 0.66 2775307
1 0.99 0.91 0.95 27457781
accuracy 0.91 30233088
macro avg 0.75 0.92 0.80 30233088
weighted avg 0.95 0.91 0.92 30233088
ROC AUC: 0.9242333276281159
PR AUC: 0.9855786904261938
---------- End LogisticRegression ----------
---------- LinearSVC ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 167, 0: 333}
y_pred {1: 26485239, 0: 3747849}
Accuracy: 0.9444534412098426
Confusion matrix:
[[ 2421906 353401]
[ 1325943 26131838]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.65 0.87 0.74 2775307
1 0.99 0.95 0.97 27457781
accuracy 0.94 30233088
macro avg 0.82 0.91 0.86 30233088
weighted avg 0.96 0.94 0.95 30233088
ROC AUC: 0.9121860648486685
PR AUC: 0.9828681328297034
---------- End LinearSVC ----------
---------- KNeighborsClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 167, 0: 333}
y_pred {0: 19780691, 1: 10452397}
Accuracy: 0.4255173007798608
Confusion matrix:
[[ 2593806 181501]
[17186885 10270896]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.13 0.93 0.23 2775307
1 0.98 0.37 0.54 27457781
accuracy 0.43 30233088
macro avg 0.56 0.65 0.39 30233088
weighted avg 0.90 0.43 0.51 30233088
ROC AUC: 0.6543314346255341
PR AUC: 0.9360453079704546
---------- End KNeighborsClassifier ----------
---------- SGDClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 167, 0: 333}
y_pred {1: 26674770, 0: 3558318}
Accuracy: 0.9476769624062219
Confusion matrix:
[[ 2375869 399438]
[ 1182449 26275332]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.67 0.86 0.75 2775307
1 0.99 0.96 0.97 27457781
accuracy 0.95 30233088
macro avg 0.83 0.91 0.86 30233088
weighted avg 0.96 0.95 0.95 30233088
ROC AUC: 0.906505020848937
PR AUC: 0.9817173155680339
---------- End SGDClassifier ----------
---------- BernoulliNB ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 167, 0: 333}
y_pred {0: 30233088}
Accuracy: 0.0917970073053735
Confusion matrix:
[[ 2775307 0]
[27457781 0]]
Precision, recall and f1-score:
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
'precision', 'predicted', average, warn_for)
precision recall f1-score support
0 0.09 1.00 0.17 2775307
1 0.00 0.00 0.00 27457781
accuracy 0.09 30233088
macro avg 0.05 0.50 0.08 30233088
weighted avg 0.01 0.09 0.02 30233088
ROC AUC: 0.5
PR AUC: 0.9082029926946265
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/metrics/classification.py:1437: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 in labels with no predicted samples.
'precision', 'predicted', average, warn_for)
---------- End BernoulliNB ----------
---------- RandomForestClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 167, 0: 333}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/ensemble/forest.py:245: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.
"10 in version 0.20 to 100 in 0.22.", FutureWarning)
y_pred {1: 18905723, 0: 11327365}
Accuracy: 0.7023971219876712
Confusion matrix:
[[ 2552609 222698]
[ 8774756 18683025]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.23 0.92 0.36 2775307
1 0.99 0.68 0.81 27457781
accuracy 0.70 30233088
macro avg 0.61 0.80 0.58 30233088
weighted avg 0.92 0.70 0.77 30233088
ROC AUC: 0.8000923449190362
PR AUC: 0.9626491633581195
---------- End RandomForestClassifier ----------
---------- MLPClassifier ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 167, 0: 333}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/neural_network/multilayer_perceptron.py:566: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and the optimization hasn't converged yet.
% self.max_iter, ConvergenceWarning)
y_pred {1: 26322057, 0: 3911031}
Accuracy: 0.9467785758437908
Confusion matrix:
[[ 2538645 236662]
[ 1372386 26085395]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.65 0.91 0.76 2775307
1 0.99 0.95 0.97 27457781
accuracy 0.95 30233088
macro avg 0.82 0.93 0.86 30233088
weighted avg 0.96 0.95 0.95 30233088
ROC AUC: 0.9323720755928387
PR AUC: 0.9868702027975371
---------- End MLPClassifier ----------
---------- SVC ----------
(30233088, 8) (30233088,)
y_test {1: 27457781, 0: 2775307} y_train {1: 167, 0: 333}
/home/mihailselezniov/anaconda3/lib/python3.7/site-packages/sklearn/svm/base.py:193: FutureWarning: The default value of gamma will change from 'auto' to 'scale' in version 0.22 to account better for unscaled features. Set gamma explicitly to 'auto' or 'scale' to avoid this warning.
"avoid this warning.", FutureWarning)
y_pred {0: 30202364, 1: 30724}
Accuracy: 0.09281324487925283
Confusion matrix:
[[ 2775307 0]
[27427057 30724]]
Precision, recall and f1-score:
precision recall f1-score support
0 0.09 1.00 0.17 2775307
1 1.00 0.00 0.00 27457781
accuracy 0.09 30233088
macro avg 0.55 0.50 0.09 30233088
weighted avg 0.92 0.09 0.02 30233088
ROC AUC: 0.5005594771114242
PR AUC: 0.9083057093435957
---------- End SVC ----------
roc_metrics
[[0.8477, 0.8512, 0.8996, 0.6106, 0.8875, 0.5, 0.6128, 0.8951, 0.5], [0.828, 0.9053, 0.9005, 0.6266, 0.8945, 0.5, 0.6761, 0.9249, 0.5], [0.8429, 0.9108, 0.9149, 0.6301, 0.9194, 0.5, 0.7993, 0.9288, 0.5], [0.8325, 0.9199, 0.9204, 0.6411, 0.8187, 0.5, 0.6908, 0.9294, 0.5], [0.8807, 0.9242, 0.9122, 0.6543, 0.9065, 0.5, 0.8001, 0.9324, 0.5006]]
pr_metrics
[[0.9715, 0.9727, 0.9806, 0.9282, 0.9781, 0.9082, 0.9288, 0.9806, 0.9082], [0.9671, 0.9823, 0.9806, 0.931, 0.9797, 0.9082, 0.9401, 0.9858, 0.9082], [0.9699, 0.9833, 0.9835, 0.9316, 0.9845, 0.9082, 0.9624, 0.9864, 0.9082], [0.9678, 0.985, 0.9846, 0.9336, 0.9647, 0.9082, 0.9428, 0.9866, 0.9082], [0.9773, 0.9856, 0.9829, 0.936, 0.9817, 0.9082, 0.9626, 0.9869, 0.9083]]
f1_metrics
[[0.4305, 0.4056, 0.6688, 0.2071, 0.6782, 0.1682, 0.2074, 0.5055, 0.1682], [0.4836, 0.5564, 0.7148, 0.2156, 0.6298, 0.1682, 0.2421, 0.6496, 0.1682], [0.5192, 0.5686, 0.7171, 0.2176, 0.6836, 0.1682, 0.3662, 0.6925, 0.1682], [0.5223, 0.6031, 0.7213, 0.2231, 0.7324, 0.1682, 0.2519, 0.6741, 0.1682], [0.5553, 0.6594, 0.7426, 0.23, 0.7502, 0.1682, 0.362, 0.7594, 0.1683]]
'''
|
[
"mihailselezniov@yandex.ru"
] |
mihailselezniov@yandex.ru
|
489623c85dd39d6e0b92b5fcd5b2f7171a9bca5b
|
3b981dfc835d36eb9bb86e4dbb0b1e332285d5cf
|
/actions/migrations/0004_auto_20150617_1350.py
|
55c9700935257cb7323ae886c6e430f876d95787
|
[] |
no_license
|
richraines/nuortenideat
|
d9ad5ff33e4231c7f9960b9e1a54be16395173a2
|
033f63575c52ce118f0deba1168afca743de6c26
|
refs/heads/master
| 2020-09-01T01:39:39.137935
| 2016-10-31T14:24:59
| 2016-10-31T14:24:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 450
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('actions', '0003_auto_20150616_1206'),
]
operations = [
migrations.AlterField(
model_name='action',
name='subtype',
field=models.CharField(default='', max_length=40),
preserve_default=True,
),
]
|
[
"erno@fns.fi"
] |
erno@fns.fi
|
93bc359a0bb970033fc1ab5dc491508e504711f2
|
797118d008cebbe7d06c06b6a6b83fe786d526fe
|
/review/models.py
|
77dfd46660a70249d24bb786a9c08f55ccc9ac15
|
[] |
no_license
|
ellehs/mznak-master
|
0b6aee18c382bcb4b8344754d058348c0a08a9c5
|
e5b7517d77ecfa0a6bbc465c0a164df0ef4bf4ae
|
refs/heads/master
| 2016-09-05T23:37:33.165829
| 2015-02-07T09:32:06
| 2015-02-07T09:32:06
| 30,451,354
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 507
|
py
|
# -*- coding: utf-8 -*-
from django.db import models
from pytils import dt, translit
class Review(models.Model):
name = models.CharField(u'название', max_length=255)
url = models.CharField(u'адрес сайта', blank=True, max_length=255)
text = models.TextField(u'текст')
class Meta:
verbose_name = u'отзыв'
verbose_name_plural = u'отзывы'
ordering = ['-name']
def __unicode__(self):
return self.name
|
[
"work-for@bk.ru"
] |
work-for@bk.ru
|
c04262bc65754c407a3897d7621ab5cab6a52dba
|
9ef29fb299a732a005ded1b28dd2cedaccd11a2a
|
/Programacion-dinamica-estocastica/caminos-aleatorios/borracho.py
|
a7c1c2212cf24879b0c212e8625252da69ef2e7c
|
[] |
no_license
|
alanmonu12/AI_ML_platzi
|
9c77549fbc54608dd6c03d0307535b1f9434d506
|
b3e569350dcce4f8182fa159754463e485391b06
|
refs/heads/master
| 2022-11-19T04:24:58.612345
| 2020-07-16T04:20:06
| 2020-07-16T04:20:06
| 280,051,977
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 286
|
py
|
import random
class Borracho:
def __init__(self, nombre):
self.nombre = nombre
class Borracho_tradicional(Borracho):
def __init__(self, nombre):
super().__init__(nombre)
def camina(self):
return random.choice([(0, 1), (0, -1), (1, 0), (-1, 0)])
|
[
"alan.9489@gmail.com"
] |
alan.9489@gmail.com
|
062b351bf693088e13a6e64f364ba46c80f90d4e
|
4f8cec7b9e2d751cc32ca322725b1990ad68d1d8
|
/mt/data/fscore.py
|
88ce724d04a400c9ab7df78d91900a383ff46db8
|
[] |
no_license
|
Makalania/NLP
|
bf00b1403d7320eec92786cd2ccb8277086ed8a4
|
c051ddb4cf03a25d495b9f409f0bcf3edbda1eae
|
refs/heads/master
| 2020-05-30T19:52:55.476989
| 2015-03-25T22:55:10
| 2015-03-25T22:55:10
| 32,892,968
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,016
|
py
|
#!/usr/bin/env python
import sys
progoutputfile = sys.argv[1]
correctfile = sys.argv[2]
##
##
##fsock_progoutput = open(progoutputfile,'r')
##fsock_correct = open(correctfile,'r')
fsock_progoutput = open(progoutputfile,'r')
fsock_correct = open(correctfile,'r')
tokens = 0.0
correctwords = 0.0
shouldreturn = 0.0
##original = [x for x in fsock_original]
progoutput = [x for x in fsock_progoutput]
correct = [x for x in fsock_correct]
a = 0
for i in range(0,len(progoutput)):
## line1 = original[i].split()
line2 = progoutput[i].split()
line3 = correct[i].split()
if len(line2)>10:
a= a+1
else:
tokens = tokens + len(line2)
shouldreturn = shouldreturn + len(line3)
for j in range(0,len(line2)):
if line2[j] in line3:
correctwords = correctwords + 1
correctness = 2*(correctwords/tokens*correctwords/shouldreturn/(correctwords/tokens+correctwords/shouldreturn))
print correctness
|
[
"yz102@cs.brown.edu"
] |
yz102@cs.brown.edu
|
6a95a78a19676624942e0fa3f19b0f11fb130db1
|
20cc3d4f4be442047d896fcc0e71ff88ba1b4d99
|
/app/email.py
|
e2846a0434ad65e2390ec6dd49b5cae37e60980f
|
[] |
no_license
|
sahilsingh99/blog_app
|
94a01c4fbfceb5deca2c75e2e6bd39edc44b64c6
|
1c756ceb1920bbdfe7445dd748a108f34a660ffb
|
refs/heads/master
| 2022-11-27T13:26:15.061096
| 2020-07-22T18:05:53
| 2020-07-22T18:05:53
| 281,745,906
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 816
|
py
|
from flask_mail import Message
from app import mail,app
from flask import render_template
from threading import Thread
def send_password_reset_email(user):
token = user.get_reset_password_token()
send_email('[Microblog] Reset Your Password',
sender=app.config['ADMINS'][0],
recipients=[user.email],
text_body=render_template('email/reset_password.txt',
user=user, token=token),
html_body=render_template('email/reset_password.html', user=user, token=token))
def send_async_email(app, msg):
with app.app_context():
mail.send(msg)
def send_email(subject, sender, recipients, text_body, html_body):
msg = Message(subject, sender=sender, recipients=recipients)
msg.body = text_body
msg.html = html_body
Thread(target=send_async_email, args=(app, msg)).start()
|
[
"sahilkundu.1234@gmail.com"
] |
sahilkundu.1234@gmail.com
|
54623a912dddf4029450072dc23900b3ced7a8d3
|
19ddab74600f71700a6b693281d0180d5271f295
|
/leetcode_hot/hot2_两数相加.py
|
a8ebbf9cd632f3116ba0a584393749db6cd7385f
|
[] |
no_license
|
zhulf0804/Coding.Python
|
4d55a430da1a8077c81feba65c13ac654aaf094a
|
46ab03e23d15ebd5434ef4dd5ae99130000b00a5
|
refs/heads/master
| 2022-09-14T18:40:59.880941
| 2022-08-20T08:25:51
| 2022-08-20T08:25:51
| 213,113,482
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,009
|
py
|
class ListNode:
def __init__(self, val, next) -> None:
self.val = val
self.next = next
class Solution:
def addTwoNumbers(self, l1, l2):
l = ListNode()
p, q, r = l1, l2, l
add = 0
while p and q:
cur = p.val + q.val
r.val = (cur + add) % 10
add = (cur + add) // 10
if add > 0 or p.next or q.next:
r.next = ListNode()
r = r.next
p = p.next
q = q.next
while p:
r.val = (p.val + add) % 10
add = (p.val + add) // 10
if add > 0 or p.next:
r.next = ListNode()
r = r.next
p = p.next
while q:
r.val = (q.val + add) % 10
add = (q.val + add) // 10
if add > 0 or q.next:
r.next = ListNode()
r = r.next
q = q.next
if add > 0:
r.val = add
return l
|
[
"zhulf0804@gmail.com"
] |
zhulf0804@gmail.com
|
983aedf01189675dae4469cf64111363e4119acc
|
c39e466c2b6fdffbc410f24669f214e13fb87781
|
/PYTHON/EJERCICIOS/TEMA 5/COMPIS/015_G5-Ruben Gomez-Gordo Diaz_638463_assignsubmission_file_/Script5_G5.py
|
1b49bb424875ac04bdc63a8b5cd13b564ba64db6
|
[] |
no_license
|
enanibus/biopython
|
3a58efbcc92f1ce60285a115c620de9295b7d281
|
613d334a5c0502059930d9381a9464ef533cca1c
|
refs/heads/master
| 2021-01-12T17:27:39.516793
| 2017-01-02T18:30:09
| 2017-01-02T18:30:09
| 71,573,732
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,185
|
py
|
#Group 5: Daniel Díaz, Rubén Gómez-Gordo, Alicia Roig
def GetGroups(Regex,Str): #Creamos una función que nos devuelva una tupla con los grupos de una expesión regular (Regex) encontrados en un string (Str).
MyRE=re.compile(Regex) #Compilamos nuestra expresión regular.
Obj=MyRE.search(Str) #Almacenamos el objeto que obtenemos al buscar la expresión regular en el string.
Res=Obj.groups() #Creamos una tupla con los grupos definidos del objeto.
if len(Res)==0: #En el caso de que nuestra expresión regular no contenga ningún grupo definido entre paréntesis...
Res=Obj.group() #...almacenaremos el string completo del objeto.
return(Res) #La función devuelve el resultado.
def GetAccNum(InputN): #Creamos una función que nos devuelva el número de acceso de una secuencia en formato FASTA (InputN).
MyFile=open(InputN,"r") #Abrimos para lectura el fichero FASTA.
for Line in MyFile: #Este bucle irá leyendo cada línea del fichero.
if ">" in Line: #Este condicional asegura que el número de acceso sea buscado en la línea de cabecera.
RegEx=r"[NX]M_\d+\.\d" #Definimos el string de nuestra expresión regular para el número de acceso.
AccNum=GetGroups(RegEx,Line) #Almacenamos el número de acceso encontrado en el fichero.
MyFile.close() #Cerramos el fichero.
return(AccNum) #La función devuelve el número de acceso.
def GetDNA(InputN): #Creamos una función que nos devuelva la secuencia de DNA contenida en un fichero tipo FASTA (InputN).
DNA="" #Definimos una variable vacía que almacenará la secuencia de DNA contenida en el fichero.
MyFile=open(InputN,"r") #Abrimos para lectura el fichero FASTA.
for Line in MyFile: #Este bucle irá leyendo cada línea del fichero.
if not ">" in Line: #Con este condicional aseguramos que la línea de cabecera no se incluya en la secuencia.
DNA+=Line.strip() #Todas las líneas con secuencia de DNA van a quedar guardadas de manera concatenada dentro de la variable local 'DNA'.
MyFile.close() #Cerramos el fichero.
return(DNA.upper()) #La función nos devuelve la secuencia de DNA, siempre en mayúsculas.
def GetCode(InputN): #Creamos una función que nos devuelva un diccionario que contenga un código genético, a partir de un fichero '.csv' (InputN).
GenCode={} #Diccionario que contendrá cada codón como 'key' y su respectivo aminoácido como 'value' (en forma de lista con las notaciones de 1 y 3 letras).
MyFile=open(InputN,"r") #Abrimos para lectura el fichero que contiene el código genético.
RegEx=r"([ACTG]{3})\t([^BJUXZ])\t([A-Z][a-z]{2})" #Definimos el string de nuestra expresión regular para cada línea del código genético.
for Line in MyFile: #Este bucle irá leyendo cada línea del fichero.
Code=GetGroups(RegEx,Line)#Almacenamos una tupla con los elementos de cada línea del código genético.
GenCode[Code[0]]=Code[1:] #Guardamos en el diccionario cada codón como "key", y cada aminoácido como "value" en forma de lista (ej: 'TTT':['P','Phe']).
MyFile.close() #Cerramos el fichero.
return(GenCode) #La función nos devuelve el diccionario con el código genético.
def Get_rcDNA(DNA): #Creamos una función que nos devuelva el reverso complementario de una secuencia de DNA.
Complementary={"A":"T","T":"A","C":"G","G":"C"} #Diccionario que contiene cada base asociada a su base complementaria.
rcDNA="" #Variable que almacenará la secuencia de la cadena (-) de DNA.
for Nuc in DNA[::-1]: #Este bucle va a pasar por todos los nucleótidos de la cadena (+) de DNA en sentido 3' -> 5'...
rcDNA+=Complementary[Nuc] #...y se van a introducir uno a uno los nucleótidos complementarios, obteniendo la cadena (-) de DNA.
return(rcDNA) #La función nos devuelve el DNA reverso complementario.
def Translate(DNAs,GenCode,OutputN): #Creamos una función cuyos argumentos son: 1) Una lista de secuencias de DNA (en este caso 2: cadena positiva y cadena negativa).
#2)Un diccionario con el código genético. 3) El nombre del fichero que vamos a generar.
#La función nos devuelve un fichero con todas las posibles traducciones a proteína, según las 3 pautas de lectura.
MyFile=open(OutputN,"w") #Abrimos para escritura un fichero en el que introduciremos las secuencias proteicas obtenidas.
s="+" #Variable que nos informa de que se va a traducir la cadena (+).
for Strand in DNAs: #Primer bucle: el programa pasará primero por la cadena (+) de DNA y después por la cadena (-).
MyFile.write('## STRAND ('+s+')\n') #Escribimos en el fichero cuál de las dos cadenas se va a traducir
for Frame in [0,1,2]: #Segundo bucle: el programa pasará por las 3 posibles pautas de lectura: +1, +2 y +3.
Protein="" #Definimos una variable vacía que almacenará la secuencia de proteína cada vez que el DNA sea traducido.
for Position in range(Frame,len(Strand),3): #Tercer bucle: el programa avanzará 3 nucleótidos a la derecha cada vez.
#El origen dependerá de la pauta de lectura empleada en cada caso.
CurrentCodon=Strand[Position:Position+3] #Definimos la subsecuencia de 3 nucleótidos equivalente a un codón, que dependerá de los bucles anteriores.
if len(CurrentCodon)==3: #Evitamos que el programa falle al llegar al final, cuando quedan 1 ó 2 nucleótidos fuera de la pauta de lectura.
Protein+=GenCode[CurrentCodon][0] #Traduce codón a codón y obtiene la secuencia de proteína, almacenándola en la variable "Protein".
MyFile.write("# Frame +"+str(Frame+1)+":\n"+Protein+"\n\n") #Introducimos cada secuencia de proteína en el fichero, tras indicar la pauta de lectura.
s="-" #Variable que nos informa de que se va a traducir la cadena (-).
MyFile.write("\n") #Introducimos un retorno de carro en el fichero, para dejar más espacio visual entre ambas cadenas.
MyFile.close() #Finalmente cerramos el fichero.
def main(): #Creamos una función a partir de las anteriores que sirva para ejecutar todo el programa.
Acc_Num=GetAccNum(sys.argv[1]) #Variable que almacena el número de acceso de la secuencia.
Seq=GetDNA(sys.argv[1]) #Variable que almacene la secuencia de DNA.
#En los dos casos anteriores, el nombre del fichero FASTA corresponde al primer argumento introducido por el usuario en el intérprete interactivo.
Gen_Code=GetCode("GeneticCode_standard.csv") #Variable que almacena el diccionario con el código genético (en este caso, el código estándar).
Sequences=[Seq,Get_rcDNA(Seq)] #Lista que contiene las secuencias de las dos cadenas de DNA.
Translate(Sequences,Gen_Code,Acc_Num+".txt") #Ejecución de la función "Translate". #El fichero creado llevará por nombre el nº de acceso de la secuencia.
if __name__=="__main__": #Condicional que asegura que este script no se ejecute cuando sea importado como módulo en otro script.
import sys #Importamos el módulo sys.
import re #Importamos el módulo re.
try: #Entramos en un bloque try.
if len(sys.argv)==2: #El programa funcionará si el usuario introduce el nombre del fichero FASTA tras el nombre del programa.
main() #Ejecutamos la función que contiene todo el bloque de código necesario para que el programa funcione.
else: #Si el usuario ejecuta mal el programa en el intérprete, recibirá una notificación que le enseñe a utilizarlo correctamente.
print("(!) Type the name of your FASTA file after the name of the script (e.g., 'python program.py DNA.fasta'")
except AttributeError: #Si en el fichero FASTA no se encuentra ningún número de acceso, el programa daría error...
print("Oops! Your file does not contain any RefSeq number. Please, try with another file.")#...por lo que en ese caso avisaríamos al usuario por pantalla.
except FileNotFoundError: #Si el fichero FASTA indicado no es localizable, el programa daría error...
print("Oops! Your file was not found. Please, check you're typing the full name, including the extension.") #...y avisaríamos al usuario por pantalla.
except: #Si ocurre cualquier otro tipo de error...
print("Oops! There is something wrong with your input. Please, check you're introducing a FASTA file.") #...también será advertido.
|
[
"juanenriqueztraba@gmail.com"
] |
juanenriqueztraba@gmail.com
|
25a37a5bba8360a0c2bb9e1c42712a4cd2b816b3
|
8cb4fa5bb9de49eb07c2d805c510317f0192c88b
|
/virtual/bin/flask
|
1cbd1be13fc0e8389dca51bc2c50af578d407c8e
|
[] |
no_license
|
hope-mbuthye38/pomodoro
|
e6384268acda4c358e0999f7b6bef109ad047a1a
|
a81811b7f230f6e625e083af96e410d0ef082ac2
|
refs/heads/master
| 2023-08-04T18:19:01.318023
| 2021-09-21T08:00:20
| 2021-09-21T08:00:20
| 408,374,341
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 241
|
#!/home/hope/Desktop/pomodoro/virtual/bin/python3.8
# -*- coding: utf-8 -*-
import re
import sys
from flask.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"-C"
] |
-C
|
|
50fbef3d1cc683d9dcda3179bf322ba15a070b04
|
55cfa8c52e9b8ebcdaad9152e16d74eb9667fd64
|
/image_resizer.py
|
01882e896e31eb4c820b9591f8a8b745d6e6017b
|
[] |
no_license
|
szkm330/LrcToAss48
|
792b329a3fdf619b53203b23c27a7238b622103b
|
51a54231ce518ffc24a29855a1fd2ce47c834d24
|
refs/heads/main
| 2023-08-14T19:44:02.916790
| 2023-08-02T06:15:20
| 2023-08-02T06:15:20
| 340,286,885
| 1
| 2
| null | 2021-02-21T11:22:40
| 2021-02-19T07:09:02
|
Python
|
UTF-8
|
Python
| false
| false
| 1,327
|
py
|
import sys
from PIL import Image, ImageOps
def resize_and_fill(input_image_path, output_image_path, target_width=405, target_height=720):
# 打开原始图片
original_image = Image.open(input_image_path)
# 获取原始图片的宽高
original_width, original_height = original_image.size
# 计算宽高缩放比例
width_ratio = target_width / original_width
height_ratio = target_height / original_height
scale_ratio = min(width_ratio, height_ratio)
# 计算调整后的宽高
new_width = int(original_width * scale_ratio)
new_height = int(original_height * scale_ratio)
# 缩放图片并居中
resized_image = original_image.resize((new_width, new_height), Image.LANCZOS)
offset_x = (target_width - new_width) // 2
offset_y = (target_height - new_height) // 2
new_image = Image.new("RGB", (target_width, target_height), color="black")
new_image.paste(resized_image, (offset_x, offset_y))
# 保存输出图片
new_image.save(output_image_path)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: python demo.py input_image_path output_image_path")
sys.exit(1)
input_image_path = sys.argv[1]
output_image_path = sys.argv[2]
resize_and_fill(input_image_path, output_image_path)
|
[
"skxyue98@outlook.com"
] |
skxyue98@outlook.com
|
b260aa45d7254666897e517d887e60cb7d106589
|
b6a9e74b33dab9045dad84e45b20fd7ab53a3c14
|
/input/kinetics/families/2+2_cycloaddition_CCO/depository.py
|
e0df5777904093168df99f5d58e589c3ead7fb49
|
[] |
no_license
|
jwallen/RMG-database
|
ae5c22530db3566ffc753bd6dea32335d327dbd8
|
4e813e167a5a32f7bc511aeca09907c2823ee13b
|
refs/heads/master
| 2021-01-23T20:47:23.139929
| 2013-05-02T19:24:55
| 2013-05-02T19:24:55
| 1,837,845
| 1
| 6
| null | 2020-10-01T14:10:49
| 2011-06-02T15:43:15
|
Python
|
UTF-8
|
Python
| false
| false
| 141
|
py
|
#!/usr/bin/env python
# encoding: utf-8
name = "2+2_cycloaddition_CCO/depository"
shortDesc = u""
longDesc = u"""
"""
recommended = False
|
[
"jwallen@mit.edu"
] |
jwallen@mit.edu
|
3baf66f6be77e137549b98a6a8f0f95aade16cb4
|
def31ba2459712ae6247f3c81533fd190711b3ed
|
/DeviceTest/bin/Debug/projects/Tuya/tool.py
|
4a065cf4c9a24835d66d4d09776561071970a9cd
|
[] |
no_license
|
EtingC/DeviceTest
|
bfc217d1d8ab7125d29c2dcd3140f3f0a124b1da
|
19073c51856bb747857a5132b6e04dfa4fe418b4
|
refs/heads/master
| 2020-04-07T07:30:48.449128
| 2018-11-19T07:49:45
| 2018-11-19T07:49:45
| 158,179,225
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,769
|
py
|
# encoding=utf8
# ***********************************************
#
# Filename: tool.py
#
# Author: Benson - zjxucb@gmail.com
# Description: ---
# Create: 2018-04-11 09:39:36
# Last Modified: 2018-04-11 09:39:39
# ***********************************************
import codecs
import sys
import os
from utils import *
class QRCode(ItemObject):
def __init__(self,device):
ItemObject.__init__(self)
self.MAC = device.deviceId
self.PTYPE = device.name
self.MODELID = device.modelId
self.ZIGBEEVER = device.softwareVersion
self.MCUVER = device.mcuBinVersion
self.DATE = NowDateToStr()
self.SN = device.SN
def process():
path = os.path.dirname(os.path.realpath(__file__))
labelContent = LabelContent()
try:
try:
argumentFile = codecs.open("%s\\argument.txt" % path)#,encoding="utf-8")
argument = argumentFile.read()
argumentFile.close()
except Exception as er:
labelContent.Message= u"载入文件 [argument.txt]错误"
labelContent.ErrorCode =ERROR_CODE_LOAD_FILE_FAIL
return labelContent.dumpToJSON()
try:
device = ArgumentDevice()
device.assign(argument)
except Exception as er:
labelContent.Message = u"无效的设备参数文件"
labelContent.ErrorCode = ERROR_CODE_FAIL
return labelContent.dumpToJSON()
pair = KeyValuePair("MAC",device.deviceId)
labelContent.Items.append(pair)
pair = KeyValuePair("PTYPE",device.name)
labelContent.Items.append(pair)
pair = KeyValuePair("MODELID", device.modelId)
labelContent.Items.append(pair)
pair = KeyValuePair("ZIGBEEVER", device.softwareVersion)
labelContent.Items.append(pair)
pair = KeyValuePair("MCUVER", device.mcuBinVersion)
labelContent.Items.append(pair)
pair = KeyValuePair("DATE", NowDateToStr())
labelContent.Items.append(pair)
pair = KeyValuePair("SN", device.SN)
labelContent.Items.append(pair)
QRCodeItem =QRCode(device)
pair = KeyValuePair("QRCODE", QRCodeItem.dumpToJSON())
labelContent.Items.append(pair)
labelContent.FileURL = device.labelFilePathName
labelContent.Message= u'生成标签成功'
labelContent.ErrorCode = ERROR_CODE_OK
return labelContent.dumpToJSON()
except Exception as err:
labelContent.Message = u'未知的错误'
labelContent.ErrorCode = ERROR_CODE_FAIL
return labelContent.dumpToJSON()
if __name__ == '__main__':
result = process()
sys.stdout.write(result.encode(encoding='gbk'))
sys.exit()
|
[
"zjxucb@gmail.com"
] |
zjxucb@gmail.com
|
211d4f33542e2edd772964cd97c386295777429e
|
d4bd6aadc9ccc33e31c19c04593774818c45a308
|
/Python/Platzi/POO/polimorfismoPersona.py
|
0aa2e850b678705bdd36259946067fed0ef89cee
|
[] |
no_license
|
gilbertog07/CodeSamplesGilbertoG
|
2bbc3360f38594730c3c889e3809eb7b1457b77a
|
78a9353c4ebb51ece84fff2d77c5916db0a23eb9
|
refs/heads/master
| 2023-05-26T09:00:10.621901
| 2022-03-08T20:27:26
| 2022-03-08T20:27:26
| 248,544,362
| 0
| 0
| null | 2023-05-22T22:46:26
| 2020-03-19T15:55:24
|
Python
|
UTF-8
|
Python
| false
| false
| 562
|
py
|
class Persona:
def __init__(self, nombre):
self.nombre = nombre
def avanza(self):
print('Ando caminando')
class Ciclista(Persona): #clase ciclista extiende a persona
def __init__(self, nombre):
super().__init__(nombre) #Define la superclase
def avanza(self):
print('Ando moviendome en mi bicicleta')
def main():
persona = Persona('Gilberto')
persona.avanza()
ciclista = Ciclista('Gilberto')
ciclista.avanza()
if __name__ == '__main__':
main()
|
[
"gilbertoaguerrero@gmail.com"
] |
gilbertoaguerrero@gmail.com
|
4f8d860491fb98c8fa0456eb9247c12bfbd38a98
|
f12ec86db250a8b2d628aabaff87fc3d0639c9d8
|
/venv/lib/python3.6/site-packages/phonenumbers/data/region_MP.py
|
7a493a7ac3a93d56b2f4b5eff5804e4eba6077fd
|
[] |
no_license
|
LawreenL/cse442_debate_app
|
f70f5e57dd51030836bd8b06ce93eb3d80a4a631
|
e041a8951147c713231fc60cf366bd9af9fcb48a
|
refs/heads/master
| 2020-04-29T05:33:58.338443
| 2019-03-15T18:00:59
| 2019-03-15T18:00:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,449
|
py
|
"""Auto-generated file, do not edit by hand. MP metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_MP = PhoneMetadata(id='MP', country_code=1, international_prefix='011',
general_desc=PhoneNumberDesc(national_number_pattern='(?:[58]\\d\\d|(?:67|90)0)\\d{7}', possible_length=(10,), possible_length_local_only=(7,)),
fixed_line=PhoneNumberDesc(national_number_pattern='670(?:2(?:3[3-7]|56|8[5-8])|32[1-38]|4(?:33|8[348])|5(?:32|55|88)|6(?:64|70|82)|78[3589]|8[3-9]8|989)\\d{4}', example_number='6702345678', possible_length=(10,), possible_length_local_only=(7,)),
mobile=PhoneNumberDesc(national_number_pattern='670(?:2(?:3[3-7]|56|8[5-8])|32[1-38]|4(?:33|8[348])|5(?:32|55|88)|6(?:64|70|82)|78[3589]|8[3-9]8|989)\\d{4}', example_number='6702345678', possible_length=(10,), possible_length_local_only=(7,)),
toll_free=PhoneNumberDesc(national_number_pattern='8(?:00|33|44|55|66|77|88)[2-9]\\d{6}', example_number='8002123456', possible_length=(10,)),
premium_rate=PhoneNumberDesc(national_number_pattern='900[2-9]\\d{6}', example_number='9002123456', possible_length=(10,)),
personal_number=PhoneNumberDesc(national_number_pattern='5(?:00|2[12]|33|44|66|77|88)[2-9]\\d{6}', example_number='5002345678', possible_length=(10,)),
national_prefix='1',
national_prefix_for_parsing='1|([2-9]\\d{6})$',
national_prefix_transform_rule='670\\1',
leading_digits='670')
|
[
"tjpeyton@buffalo.edu"
] |
tjpeyton@buffalo.edu
|
d41002e677cea00809127c02bdf251b6dfd2a57c
|
8171f442a81f4a70f9f825947d529e0cc410ac65
|
/dbconnect.py
|
344cc4793241909ae00a5ac6ccc33156e4946770
|
[] |
no_license
|
omkarcpatilgithub/Moneytor
|
f0b8c726b1c7eec4fdad1ded8a9018016e6f9d4c
|
6e567745a1c1758e1264a9eb14a2142da709219a
|
refs/heads/master
| 2022-04-13T20:38:32.192512
| 2020-04-02T16:30:38
| 2020-04-02T16:30:38
| 250,233,712
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,407
|
py
|
import mysql.connector
from datetime import date, timedelta,datetime
mydb = mysql.connector.connect(
host="localhost", # name of host on which your database is stored
user="root", # username for the database
passwd="", # password for the database
database='moneytor' #name of data base
### before proceding further you may execute queries which are stored in sqls.txt file.
)
#print(mydb)
mycursor = mydb.cursor()
def selectall():
# select everythin
table = 'hotel'
query = 'select * from ' + table
mycursor.execute(query)
m = mycursor.fetchall()
return m
# insert a new record with booked 1
def insert(roomid, date, name):
# insert new entry
query = "INSERT INTO hotel VALUES (NULL, '"+roomid+"', '"+date+"', '1', '"+name+"')"
mycursor.execute(query)
clear_duplicates()
mycursor.execute('commit')
return
# if any duplicates are present in database then removes all previous and keeps latest one
def clear_duplicates():
# to clear ducplicates
query = '''DELETE FROM hotel
WHERE ID NOT IN
(
SELECT MAX(ID) AS MaxRecordID
FROM hotel
GROUP BY roomid, book_date
)'''
mycursor.execute(query)
mycursor.execute('commit')
# select specific date entries
def specific_date():
date = str(input())
query = "select * from hotel where book_date ='"+date+"'"
mycursor.execute(query)
m = mycursor.fetchall()
return m
# select specific room entry
def specific_room():
roomid = str(input())
query = "select * from hotel where roomid ='"+roomid+"'"
mycursor.execute(query)
m = mycursor.fetchall()
return m
# shows result in proper format
def showresult(result):
print('roomid\t booking date\t booked\t Name')
for x in result:
print(x[1],'\t',x[2],'\t',x[3],'\t',x[4])
# cancel booking after asking the roomid and the booked date
def cancelBooking():
print('enter room you want to cancel')
roomid = str(input())
print('for which date want to cancel')
date = str(input())
query = "UPDATE hotel SET booked = '0', name = '' WHERE roomid = '"+roomid+"' and book_date='"+date+"'"
mycursor.execute(query)
affected_rows = mycursor.rowcount
clear_duplicates()
mycursor.execute('commit')
return affected_rows
def check_result():
roomid = str(input())
date = str(input())
query = "select * from hotel where roomid ='"+roomid+"'and book_date='"+date+"'"
mycursor.execute(query)
m = mycursor.fetchall()
if (m == []):
print('seems wrong input')
else:
print('input is valid..... ')
# giving result of all the rooms which are available for certain period
def room_avaibility():
print('select from date in format YYYY-MM-DD')
startdate = str(input())
print('select end date in format YYYY-MM-DD')
endate = str(input())
query = "SELECT roomid FROM hotel WHERE book_date >= '"+startdate+"' AND book_date < '"+endate+"' GROUP BY roomid HAVING sum(booked=1)=0"
mycursor.execute(query)
m = mycursor.fetchall()
print()
print('following rooms are available from',startdate, 'to',endate)
for i in m:
print(i[0])
def show_all_bookings():
query = "SELECT * FROM hotel WHERE booked = '1'"
mycursor.execute(query)
m = mycursor.fetchall()
showresult(m)
def booking_for_specific_time_period():
print('which room you want to book between 101-110')
roomid = str(input())
print('for whom?....')
name = str(input())
print('select from date in format YYYY-MM-DD')
my_string = str(input())
# Create date object in given time format yyyy-mm-dd
my_date = datetime.strptime(my_string, "%Y-%m-%d")
#print(my_date)
print('select end date in format YYYY-MM-DD')
enddate = str(input())
my_dateend = datetime.strptime(enddate, "%Y-%m-%d")
#print(my_dateend)
sdate = my_date.date() # start date
edate = my_dateend.date() # end date
delta = edate - sdate # as timedelta
for i in range(delta.days + 1):
day = str(sdate + timedelta(days=i))
insert(roomid, day, name)
|
[
"noreply@github.com"
] |
omkarcpatilgithub.noreply@github.com
|
27d8dee494284aaf4ec8afde8592bd0151cb9870
|
7cf6555e871df5c21f4aefdfa9c32e151e4b0c5f
|
/mtp_bank_admin/apps/bank_admin/management/commands/create_adi_file.py
|
58dfe90de8f3712a3f43e28bcffc2b83228862c0
|
[
"MIT",
"LicenseRef-scancode-proprietary-license"
] |
permissive
|
ministryofjustice/money-to-prisoners-bank-admin
|
05af78e0bdd9303f71f05e0c470a67ac15e81cf1
|
64d2f0e0402a9b5c70924d40f39d77aa156f214c
|
refs/heads/main
| 2023-08-17T03:01:54.164015
| 2023-08-15T15:33:22
| 2023-08-15T15:33:22
| 40,470,925
| 4
| 2
|
MIT
| 2023-09-05T17:04:56
| 2015-08-10T08:13:03
|
Python
|
UTF-8
|
Python
| false
| false
| 160
|
py
|
from bank_admin.adi import get_adi_journal_file
from . import FileGenerationCommand
class Command(FileGenerationCommand):
function = get_adi_journal_file
|
[
"ian.brechin@gmail.com"
] |
ian.brechin@gmail.com
|
db70ac00a2f0896c3d813c7f035ad5f03774b1a6
|
172492be54a536fb045fe58bfe6e273204b109a6
|
/02两数相加.py
|
d26179d8cb683045b2c6c8b7657c9a0906b19bb0
|
[] |
no_license
|
z382556318/LeetCode
|
394f6fcc2c96a7dd34731322bb08d217963fe412
|
233128a863d6aa9424c2bff79a4f24cf2e753ab7
|
refs/heads/master
| 2020-05-30T11:38:32.257350
| 2019-06-01T08:35:12
| 2019-06-01T08:35:12
| 189,710,409
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,795
|
py
|
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def add(self, x, y, carry):
tmp = x + y + carry
if tmp >= 10:
carry = 1
tmp -= 10
else:
carry = 0
return tmp, carry
def addTwoNumbers(self, l1: ListNode, l2: ListNode) -> ListNode:
if l1 is None: return l2
if l2 is None: return l1
cur = head = None
carry = 0
while l1 and l2:
tmp, carry = self.add(l1.val, l2.val, carry)
if not head:
cur = head = ListNode(tmp)
else:
cur.next = ListNode(tmp)
cur = cur.next
l1 = l1.next
l2 = l2.next
if l1 is None:
while l2 and carry == 1:
tmp, carry = self.add(0, l2.val, carry)
cur.next = ListNode(tmp)
cur = cur.next
l2 = l2.next
else:
if l2:
cur.next = l2
elif carry == 1:
cur.next = ListNode(1)
elif l2 is None:
while l1 and carry == 1:
tmp, carry = self.add(0, l1.val, carry)
cur.next = ListNode(tmp)
cur = cur.next
l1 = l1.next
else:
if l1:
cur.next = l1
elif carry == 1:
cur.next = ListNode(1)
return head
if __name__ == '__main__':
a = [9,8]
b = [1]
l1 = ListNode(9)
l1.next = ListNode(8)
l2 = ListNode(1)
head = Solution().addTwoNumbers(l1,l2)
while head:
print(head.val)
head = head.next
|
[
"382556318@qq.com"
] |
382556318@qq.com
|
ca02187744195787ae69fa40e50b040a1de233f3
|
072402eb476b38ef22017c7b930c912da6c60ea6
|
/lib/_stubs/TDFunctions.py
|
56efd2ff7229f316a774fac02ae6981a8c0aeb32
|
[] |
no_license
|
t3kt/pattern-mapper
|
46e055bf5484f49d8e2ff1223e2db60f5e8858df
|
913c90213cf9eee42bdf6a681226c865c9d90e28
|
refs/heads/master
| 2022-08-24T20:56:53.344580
| 2019-12-01T02:51:48
| 2019-12-01T02:51:48
| 178,997,169
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 22,891
|
py
|
# This file and all related intellectual property rights are
# owned by Derivative Inc. ("Derivative"). The use and modification
# of this file is governed by, and only permitted under, the terms
# of the Derivative [End-User License Agreement]
# [https://www.derivative.ca/Agreements/UsageAgreementTouchDesigner.asp]
# (the "License Agreement"). Among other terms, this file can only
# be used, and/or modified for use, with Derivative's TouchDesigner
# software, and only by employees of the organization that has licensed
# Derivative's TouchDesigner software by [accepting] the License Agreement.
# Any redistribution or sharing of this file, with or without modification,
# to or with any other person is strictly prohibited [(except as expressly
# permitted by the License Agreement)].
#
# Version: 099.2017.30440.28Sep
#
# _END_HEADER_
import shlex
import inspect
import collections
import TDStoreTools
import warnings
def createInternalParameters(comp, pane):
op('TDCreateInternalParameters/utils').module.createInternalParameters(comp,
pane)
return
def clamp(value, inMin, inMax):
"""returns the value clamped between inMin and inMax"""
return min(inMax, max(inMin, value))
def parentLevel(parentOp, childOp):
"""
determines if op1 is a parent of op2 at any depth. Returns None or the
depth of parenthood. i.e. op2.parent(returnValue) will yield op1.
This method returns None so that op2.parent(returnValue) will error in
that case.
"""
if parentOp == childOp:
return 0
parentLev = 1
while True:
parent = childOp.parent(parentLev)
if parent is None:
return None
elif parent is parentOp:
return parentLev
parentLev += 1
def sharedParent(op1, op2):
"""
Returns the nearest shared parent of op1 and op2. Returns None if root is
result.
"""
if op1 == root or op2 == root:
return None
checkPath = op1.path.rpartition('/')[0]
while checkPath != '':
if op2.path.startswith(checkPath):
return op(checkPath)
checkPath = checkPath.rpartition('/')[0]
def getShortcutPath(fromOp, toOp, toParName=None):
"""
Return a shortcut path expression from fromOp to toOp or a parameter on
toOp. This expression is suitable for use in any OP parameter on fromOp.
OP portion of shortcut will be the first of:
me
op('/')
op('./<path>') - Direct child
op('<toOp.name>') - Direct sibling
parent.<parentshortcut>
parent.<parentshortcut>.op('<path>')
op.<opshortcut>
op.<opshortcut>.op('<path>')
parent(#)
parent(#).op('<path>')
op('<toOp.path>')
toParName: (string) if provided, the shortcut will be to that parameter on
toOp. If possible, the 'ipar' shortcut will be used.
"""
def rootChild(rootOp, rootExpr=''):
if rootOp == op('/'):
fromRoot = toOp.path
else:
fromRoot = toOp.path[len(rootOp.path) + 1:]
return rootExpr + "op('" + fromRoot + "')"
def parCheck(shortcutPath):
if toParName:
return shortcutPath +'.par.' + toParName
else:
return shortcutPath
if fromOp is None:
raise ValueError("getShortcutPath: invalid fromOp")
if toOp is None:
raise ValueError("getShortcutPath: invalid toOp")
# me
if fromOp == toOp:
return parCheck('me')
# parent.<parentshortcut>.op('<path>')
# search for iop/ipars
# sanity = 100
# searchOp = fromOp
# while searchOp != op('/'):
# sanity -= 1 # reduce sanity
# if sanity == 0:
# raise Exception("parentLevel search exceeded max depth",
# fromOp, toOp)
# searchOp = searchOp.parent()
# for iopPar in searchOp.pars('iop*'):
# if iopPar.eval() == toOp:
# shortcutParName = 'iopshortcut' + iopPar.name[3:]
# shortcut = getattr(searchOp.par, shortcutParName).eval()
# if toParName:
# return'ipar.' + shortcut + '.' + toParName
# else:
# return 'iop.' + shortcut
# op('/')
if toOp == op('/'):
return parCheck("op('/')")
# op('./<path>') - Direct child
if parentLevel(fromOp, toOp) and fromOp != op('/'):
return parCheck("op('." + toOp.path[len(fromOp.path):] + "')")
# op('<toOp.name>') - Direct sibling
if fromOp.parent() == toOp.parent():
return parCheck("op('" + toOp.name + "')")
# parent.<parentshortcut>
if parentLevel(toOp, fromOp) and getattr(toOp.par, 'parentshortcut') \
and toOp.par.parentshortcut.eval().strip():
return parCheck('parent.' + toOp.par.parentshortcut)
# # parent.<parentshortcut>.op('<path>')
# # search for common shortcut parents
# sanity = 100
# searchOp = toOp
# while searchOp != op('/'):
# sanity -= 1 # reduce sanity
# if sanity == 0:
# raise Exception("parentLevel search exceeded max depth",
# fromOp, toOp)
# searchOp = searchOp.parent()
# parentShortcut = searchOp.par.parentshortcut.eval().strip()
# if parentShortcut:
# if getattr(fromOp.parent, parentShortcut, None) == searchOp:
# rootExpr = 'parent.' + parentShortcut + '.'
# return parCheck(rootChild(searchOp, rootExpr))
# op.<opshortcut>
if getattr(toOp.par, 'opshortcut', None) \
and toOp.par.opshortcut.eval().strip():
return parCheck('op.' + toOp.par.opshortcut.eval())
# parent.<parentshortcut>.op('<path>')
# search for common shortcut parents
sanity = 100
searchOp = toOp
while searchOp != op('/'):
sanity -= 1 # reduce sanity
if sanity == 0:
raise Exception("parentLevel search exceeded max depth",
fromOp, toOp)
searchOp = searchOp.parent()
opShortcut = searchOp.par.opshortcut.eval().strip()
if opShortcut:
rootExpr = 'op.' + opShortcut + '.'
return parCheck(rootChild(searchOp, rootExpr))
# parent(#)
if True: # toOp.parent() != op('/'):
if parentLevel(toOp, fromOp) and toOp != root:
level = parentLevel(toOp, fromOp)
return parCheck('parent(' + (str(level) if level > 1 else '') + ')')
# parent(#).op('<path>')
# search for common parents
sanity = 100
searchOp = toOp
while searchOp != op('/'):
sanity -= 1 # reduce sanity
if sanity == 0:
raise Exception("parentLevel search exceeded max depth",
fromOp, toOp)
searchOp = searchOp.parent()
level = parentLevel(searchOp, fromOp)
if level:
if level > 1 and searchOp != root:
rootExpr = 'parent(' + \
(str(level) if level > 1 else '') + ').'
else:
rootExpr = ''
return parCheck(rootChild(searchOp, rootExpr))
# op('<toOp.path>')
# debug('!', toOp.path)
# debug(parCheck("op('" + toOp.path + "')"))
return parCheck("op('" + toOp.path + "')")
menuObject = collections.namedtuple('menuObject', ['menuNames', 'menuLabels'])
def parMenu(menuNames, menuLabels=None):
"""
Returns an object suitable for menuSource property of parameters.
menuNames must be a collection of strings.
menuLabels defaults to menuNames.
"""
if menuLabels is None:
menuLabels = menuNames
return menuObject(menuNames, menuLabels)
def incrementStringDigits(string, min=1):
"""
method for iterating a string with digits on the end, or adding
digits if none are there. This simulates the automatic naming of duplicate
operators.
"""
returnString = string
digits = tdu.digits(string)
if digits is None:
returnString += str(min)
else:
digitString = str(digits)
prefix = string[0:string.rfind(digitString)]
suffix = string[string.rfind(digitString)+len(digitString):]
returnString = prefix + str(digits+1) + suffix
return returnString
def findNetworkEdges(comp, ignoreNodes=None):
"""
returns a dictionary of 'nodes' and 'positions' at extremes of network.
returns None if no nodes found. Dictionary keys are 'top', 'left',
'right', 'bottom'
ignoreNodes is a list of nodes in network to ignore...
"""
if ignoreNodes is None:
ignoreNodes = []
if not isinstance(comp, COMP):
raise ValueError ('findNetworkEdges requires COMP to search', comp)
childOps = comp.findChildren(depth=1, key=lambda x: x not in ignoreNodes)
if not childOps:
return None
edgeNodes = {'top': None, 'left': None, 'right': None, 'bottom': None}
positions = {'top': None, 'left': None, 'right': None, 'bottom': None}
for tOp in childOps:
nX = tOp.nodeX
nY = tOp.nodeY
nW = tOp.nodeWidth
nH = tOp.nodeHeight
if positions['right'] is None or nX + nW >= positions['right']:
edgeNodes['right'] = tOp
positions['right'] = nX + nW
if positions['top'] is None or nY + nH >= positions['top']:
edgeNodes['top'] = tOp
positions['top'] = nY + nH
if positions['left'] is None or nX < positions['left']:
edgeNodes['left'] = tOp
positions['left'] = nX
if positions['bottom'] is None or nY < positions['bottom']:
edgeNodes['bottom'] = tOp
positions['bottom'] = nY
return {'nodes': edgeNodes, 'positions': positions}
def arrangeNode(node, position='bottom', spacing=20):
"""
Arrange a node according to the other nodes in the network
position: can be 'bottom', 'top', 'left' or 'right'
left, right will be placed parallel with top nodes.
top, bottom will be placed parallel with left nodes.
"""
edges = findNetworkEdges(node.parent(), [node])
if edges is None:
node.nodeX = node.nodeY = 0
return
extremes = edges['positions']
if position == 'bottom':
node.nodeX = extremes['left']
node.nodeY = extremes['bottom'] - node.nodeHeight - spacing
elif position == 'top':
node.nodeX = extremes['left']
node.nodeY = extremes['top'] + spacing
elif position == 'right':
node.nodeX = extremes['right'] + spacing
node.nodeY = extremes['top'] - node.nodeHeight
elif position == 'left':
node.nodeX = extremes['left'] - node.nodeWidth - spacing
node.nodeY = extremes['top'] - node.nodeHeight
else:
raise ValueError ('Invalid arrangeNode position', position)
def createProperty(classInstance, name, value=None, attributeName=None,
readOnly=False, dependable=True):
"""
Use this method to add a property (called name) that accesses
an attribute (called attributeName). attributeName defaults to '_' + name.
The attribute will be set to value argument.
If dependable is True, the attribute will store value in a dependency obj.
If dependable is 'deep', lists, sets, or dicts will be created as
dependable collections from TDStoreTools
If readonly is True, the property will be read only
WARNING: the property is added to the CLASS of instance, so all objects of
instance's class will now have this property.
"""
if dependable == 'Deep':
dependable = 'deep'
if attributeName is None:
attributeName = '_' + name
if dependable:
if dependable == 'deep':
depValue = makeDeepDependable(value)
else:
depValue = tdu.Dependency(value)
setattr(classInstance, attributeName, depValue)
def getter(self):
if dependable == 'deep' and readOnly:
try:
return getattr(self, attributeName).val.getRaw()
except:
return getattr(self, attributeName).val
else:
return getattr(self, attributeName).val
def setter(self, val):
getattr(self, attributeName).val = val
def deleter(self):
getattr(self, attributeName).modified()
delattr(self, attributeName)
else:
setattr(classInstance, attributeName, value)
def getter(self):
return getattr(self, attributeName)
def setter(self, val):
setattr(self, attributeName, val)
def deleter(self):
getattr(self, attributeName).modified()
delattr(self, attributeName)
setattr(classInstance.__class__, name, property(getter,
None if readOnly else setter,
deleter))
def makeDeepDependable(value):
"""
returns a deeply dependable object out of the provided python object.
Deeply dependable collections will cause cooks when their contents change.
"""
if isinstance(value, dict):
depValue = TDStoreTools.DependDict(value)
elif isinstance(value, list):
depValue = TDStoreTools.DependList(value)
elif isinstance(value, set):
depValue = TDStoreTools.DependSet(value)
else:
depValue = tdu.Dependency(value)
return depValue
def forceCookNonDatOps(comp):
"""
Recursively force cook op and all children of op, unless they are DATs
"""
if not isinstance(comp, COMP):
return
for child in comp.children:
forceCookNonDatOps(child)
comp.cook(force=True)
def showInPane(operator, pane='Floating', inside=False):
"""
Open an operator for viewing in a chosen editor pane. The pane will be
focused on the chosen operator unless inside is True, in which case it will
show the inside if possible.
operator: the operator to view
pane: a ui.pane or 'Floating' for a new floating pane.
inside: if inside is True, try to show view inside comps
"""
if isinstance(operator, COMP) and inside:
homeViewOp = None
else:
homeViewOp = operator
operator = operator.parent()
for o in operator.children:
o.selected = False
homeViewOp.current = True
if pane is None:
return
# check for closed pane...
if pane is not 'Floating' and pane.id not in [p.id for p in ui.panes]:
print ('showInPane: Target pane not found, creating floating pane.')
pane = 'Floating'
if pane == 'Floating':
targetPane = ui.panes.createFloating(type=PaneType.NETWORKEDITOR)
targetPane.owner = operator
try:
targetPane.name = operator.name
except:
pass
else:
try:
pane.owner = operator
except:
print('Unable to open ' + str(operator) + ' in ' + str(pane))
raise
targetPane = pane # for name set
if homeViewOp:
run("ui.panes['" + targetPane.name + "'].homeSelected(zoom=True)",
delayFrames=1)
else:
run("ui.panes['" + targetPane.name + "'].home(zoom=True)",
delayFrames=1)
return targetPane
def tScript(cmd):
"""
Run a tscript command. Use at your own risk, this is basically a hack.
Also, it's slow because it creates and destroys an operator. If you need
this to be faster, build an optimized network with its own tscript dat.
"""
dat = op('/').create(textDAT, 'tScripter')
dat.python = False
dat.text = cmd
dat.run()
dat.destroy()
def parStringToIntList(parString):
"""
Convert a space delimited string to a list of ints
"""
return [int(x) for x in parString.split()]
def listToParString(l):
"""
Convert a list to a space delimited string
"""
return ' '.join([str(x) for x in l])
def replaceOp(dest, source=None):
"""
Replace dest with an exact copy of source. If source is None and dest is a
comp, try to use dest's clone parameter.
"""
# check dest and source
if not isinstance(dest, OP):
raise ValueError('replaceOp: invalid dest', dest)
if source is None and hasattr(dest.par,'clone'):
source = op(dest.par.clone.eval())
clonePar = dest.par.clone
else:
clonePar = None
if not isinstance(source, OP):
raise AttributeError('replaceOp: invalid source ' + str(source) + ' '
'for ' + str(dest))
# save dest info
destName = dest.name
# attributes
destAttrs = ['nodeX', 'nodeY', 'nodeWidth', 'nodeHeight', 'color', 'dock']
destAttrDict = {}
for attr in destAttrs:
destAttrDict[attr] = getattr(dest, attr)
# connections
if hasattr(dest, 'inputCOMPs'):
destInputCOMPs = [c.path for c in dest.inputCOMPs]
else:
destInputCOMPs = None
if hasattr(dest, 'outputCOMPs'):
destOutputCOMPs = [c.path for c in dest.outputCOMPs]
else:
destOutputCOMPs = None
destInputs = [o.path for o in dest.inputs]
destOutputs = [o.path for o in dest.outputs]
# do copy
parent = dest.parent()
newDest = parent.copy(source)
try:
# attrs
for attr, value in destAttrDict.items():
setattr(newDest, attr, value)
# clone
if clonePar:
newDest.par.clone.val = clonePar.val
newDest.par.clone.expr = clonePar.expr
newDest.par.clone.mode = clonePar.mode
# connections
if destInputCOMPs:
newInput = newDest.inputCOMPConnectors[0]
for path in destInputCOMPs:
newInput.connect(op(path))
if destOutputCOMPs:
newOutput = newDest.outputCOMPConnectors[0]
for path in destOutputCOMPs:
newOutput.connect(op(path))
for i, path in enumerate(destInputs):
newDest.inputConnectors[i].connect(op(path))
for i, path in enumerate(destOutputs):
newDest.outputConnectors[i].connect(op(path))
except:
newDest.destroy()
raise
dest.destroy()
newDest.name = destName
return newDest
def getParInfo(sourceOp, pattern='*', names=None,
includeCustom=True, includeNonCustom=True):
"""
Returns parInfo dict for sourceOp. Filtered in the following order:
pattern is a pattern match string
names can be a list of names to include, default None includes all
includeCustom to include custom parameters
includeNonCustom to include non-custom parameters
parInfo is {<parName>:(par.val, par.expr, par.mode string, par.bindExpr,
par.default)...}
"""
parInfo = {}
for p in sourceOp.pars(pattern):
if (names is None or p.name in names) and \
((p.isCustom and includeCustom) or \
(not p.isCustom and includeNonCustom)):
parInfo[p.name] = [p.val, p.expr if p.expr else '', str(p.mode),
p.bindExpr, p.default]
return parInfo
def applyParDefaults(targetOp, parInfo):
"""
Attempt to apply par defaults from parInfo dict to targetOp. If application
fails, no exception will be raised!
parInfo is {<parName>:(par.val, par.expr, par.mode string, par.bindExpr,
par.default)...}
"""
for p in targetOp.pars():
if p.name in parInfo:
info = parInfo[p.name]
default = info[4] if len(info) > 4 else None
if default is not None:
try:
p.default = default
except:
pass
def applyParInfo(targetOp, parInfo, setDefaults=False):
"""
Attempt to apply par values, expressions, and modes from parInfo dict to
targetOp. If application fails, no exception will be raised!
parInfo is {<parName>:(par.val, par.expr, par.mode string, par.bindExpr,
par.default)...}
setDefaults: if True, set the par.default as well
"""
if setDefaults:
applyParDefaults(targetOp, parInfo)
for p in targetOp.pars():
if p.name in parInfo:
# this dance is to maintain mode and priority of value.
# otherwise bad things happen when an expression value
# is a constant.
info = parInfo[p.name]
val = info[0]
expr = info[1] if info[1] is not None else ''
mode = info[2]
bindExpr = info[3] if len(info) > 3 else ''
if type(mode) == str:
mode = getattr(ParMode, info[2])
if mode == ParMode.CONSTANT:
try:
p.expr = expr
except:
pass
try:
p.bindExpr = bindExpr
except:
pass
try:
p.val = val
except:
pass
p.mode = mode
elif mode == ParMode.EXPRESSION:
try:
p.val = val
except:
pass
try:
p.bindExpr = bindExpr
except:
pass
try:
p.expr = expr
except:
pass
p.mode = mode
elif mode == ParMode.BIND:
try:
p.val = val
except:
pass
try:
p.expr = expr
except:
pass
try:
p.bindExpr = bindExpr
except:
pass
p.mode = mode
else:
try:
p.val = val
except:
pass
try:
p.bindExpr = bindExpr
except:
pass
try:
p.expr = expr
except:
pass
p.mode = mode
def panelParentShortcut(panel, parentShortcut):
"""
return the first panelParent of panel that has the provided parentShortcut.
Returns None if no panelParent with shortcut is found.
"""
while hasattr(panel, 'panelParent'):
panel = panel.panelParent()
if panel is None or panel.par.parentshortcut == parentShortcut:
return panel
def getMenuLabel(menuPar):
"""
Return menuPar's currently selected menu item's label
"""
try:
return menuPar.menuLabels[menuPar.menuIndex]
except IndexError:
raise
except:
raise TypeError("getMenuLabel: invalid menu par " + repr(menuPar))
def setMenuLabel(menuPar, label):
"""
Sets menuPar's selected menu item to the item with menuLabel == label
"""
try:
menuPar.menuIndex = menuPar.menuLabels.index(label)
except ValueError:
raise ValueError("setMenuLabel: invalid label " + label + " - " +
repr(menuPar))
except:
raise TypeError("setMenuLabel: invalid menu par " + repr(menuPar))
def validChannelName(name):
"""
Returns a valid channel name based on name.
"""
name = name.replace(' ', '_')
op('constant1').par.name0 = name
validName = op('constant1')[0].name
op('constant1').par.name0 = ''
return validName
def errorDialog(text, title):
"""
Open a popup dialog (after one frame delay), with just an OK button
text: text of dialog
title: title of dialog
"""
run("""op.TDResources.op('popDialog').Open(
text=""" + repr(text) + ", title=" + repr(title) +
""",
buttons=['OK'],
callback=None,
details=None,
textEntry=False,
escButton=1,
escOnClickAway=True,
enterButton=1)
""", delayFrames=1, delayRef=op.TDResources
)
def extensionOPFromPar(comp, userIndex):
"""
Return extension module, internal parameter comp, or best guess given
extension par text
:param comp: the component holding extension
:param userIndex: the 1 based index of extension parameter
"""
ext = getattr(comp.par, 'extension' + str(userIndex)).eval()
extension = comp.extensions[userIndex - 1]
if extension:
eop = extensionOP(extension)
else:
eop = None
# try best guess
if '.module.' in ext:
try:
extParts = ext.split('.module.')
eop = comp.evalExpression(extParts[0])
except:
eop = None
return eop
def extensionOP(extension):
"""
Get an extension's associated operator
:param extension: the extension
:return: the DAT source of extension or the internal parameters' comp or
None if undetermined
"""
eop = None
if isinstance(extension, ParCollection):
# assume internal parameters
eop = extension.stdswitcher1.owner
elif extension:
try:
eop = op(extension.__class__.__module__)
except:
eop = None
return eop
def editExtensionOP(extOP):
"""
Attempt to open the appropriate editor for the given extension operator.
Viewer for uneditable DATs, editor for other DATs. CompEditor for Internal
Parameters.
:param extension: the extension
:return:
"""
if isinstance(extOP, textDAT):
extOP.par.edit.pulse()
elif isinstance(extOP, DAT):
extOP.openViewer()
elif isinstance(extOP, COMP):
op.TDDialogs.op('CompEditor').Connect(extOP)
op.TDDialogs.op('CompEditor').openViewer()
def bindChain(par):
"""
Return a list of parameters, starting with par, followed by its bind master,
if available, followed by it's master's master if available etc.
:param par: the parameter to start the chain search
:return: list of [par, par's bind master, ...]
"""
chain = [par]
try:
master = par.owner.evalExpression(par.bindExpr)
except:
master = None
while master:
chain.append(master)
try:
master = master.owner.evalExpression(master.bindExpr)
except:
master = None
return chain
def unbindReferences(par, modeOnly=False):
"""
Erase bind strings or change modes for all bindReferences of a parameter
:param par: the bindMaster parameter
:param modeOnly: if True, just change the references modes to prevMode
:return: the references that were changed
"""
refs = par.bindReferences
for p in refs:
p.mode = p.prevMode
if not modeOnly:
p.bindExpr = ''
return refs
|
[
"tekt@optexture.com"
] |
tekt@optexture.com
|
2762f8da39354dd3400e6370c75459e1b8b738f3
|
9368aedf7682f351db718e59888e846a3b6012cd
|
/src/v8.gyp
|
c5b261f2cddc8742235bd105a6689c6ecdd624b7
|
[
"BSD-3-Clause",
"bzip2-1.0.6"
] |
permissive
|
rangerch/v8
|
2aca52249b8b8b4d21aedcd7f5a1d91bb000e11e
|
aee1824adbcadb97bd7295e87e1e6765997bd8f9
|
refs/heads/master
| 2020-12-11T05:34:33.514474
| 2016-05-09T08:52:05
| 2016-05-09T08:53:48
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 77,341
|
gyp
|
# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
{
'variables': {
'icu_use_data_file_flag%': 0,
'v8_code': 1,
'v8_random_seed%': 314159265,
'v8_vector_stores%': 0,
'embed_script%': "",
'warmup_script%': "",
'v8_extra_library_files%': [],
'v8_experimental_extra_library_files%': [],
'mksnapshot_exec': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)mksnapshot<(EXECUTABLE_SUFFIX)',
},
'includes': ['../gypfiles/toolchain.gypi', '../gypfiles/features.gypi'],
'targets': [
{
'target_name': 'v8',
'dependencies_traverse': 1,
'dependencies': ['v8_maybe_snapshot'],
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
}, {
'toolsets': ['target'],
}],
['component=="shared_library"', {
'type': '<(component)',
'sources': [
# Note: on non-Windows we still build this file so that gyp
# has some sources to link into the component.
'v8dll-main.cc',
],
'include_dirs': [
'..',
],
'defines': [
'V8_SHARED',
'BUILDING_V8_SHARED',
],
'direct_dependent_settings': {
'defines': [
'V8_SHARED',
'USING_V8_SHARED',
],
},
'target_conditions': [
['OS=="android" and _toolset=="target"', {
'libraries': [
'-llog',
],
'include_dirs': [
'src/common/android/include',
],
}],
],
'conditions': [
['OS=="mac"', {
'xcode_settings': {
'OTHER_LDFLAGS': ['-dynamiclib', '-all_load']
},
}],
['soname_version!=""', {
'product_extension': 'so.<(soname_version)',
}],
],
},
{
'type': 'none',
}],
],
'direct_dependent_settings': {
'include_dirs': [
'../include',
],
},
},
{
# This rule delegates to either v8_snapshot, v8_nosnapshot, or
# v8_external_snapshot, depending on the current variables.
# The intention is to make the 'calling' rules a bit simpler.
'target_name': 'v8_maybe_snapshot',
'type': 'none',
'conditions': [
['v8_use_snapshot!="true"', {
# The dependency on v8_base should come from a transitive
# dependency however the Android toolchain requires libv8_base.a
# to appear before libv8_snapshot.a so it's listed explicitly.
'dependencies': ['v8_base', 'v8_nosnapshot'],
}],
['v8_use_snapshot=="true" and v8_use_external_startup_data==0', {
# The dependency on v8_base should come from a transitive
# dependency however the Android toolchain requires libv8_base.a
# to appear before libv8_snapshot.a so it's listed explicitly.
'dependencies': ['v8_base', 'v8_snapshot'],
}],
['v8_use_snapshot=="true" and v8_use_external_startup_data==1 and want_separate_host_toolset==0', {
'dependencies': ['v8_base', 'v8_external_snapshot'],
'inputs': [ '<(PRODUCT_DIR)/snapshot_blob.bin', ],
}],
['v8_use_snapshot=="true" and v8_use_external_startup_data==1 and want_separate_host_toolset==1', {
'dependencies': ['v8_base', 'v8_external_snapshot'],
'target_conditions': [
['_toolset=="host"', {
'inputs': [
'<(PRODUCT_DIR)/snapshot_blob_host.bin',
],
}, {
'inputs': [
'<(PRODUCT_DIR)/snapshot_blob.bin',
],
}],
],
}],
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
}, {
'toolsets': ['target'],
}],
]
},
{
'target_name': 'v8_snapshot',
'type': 'static_library',
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
'dependencies': [
'mksnapshot#host',
'js2c#host',
],
}, {
'toolsets': ['target'],
'dependencies': [
'mksnapshot',
'js2c',
],
}],
['component=="shared_library"', {
'defines': [
'V8_SHARED',
'BUILDING_V8_SHARED',
],
'direct_dependent_settings': {
'defines': [
'V8_SHARED',
'USING_V8_SHARED',
],
},
}],
],
'dependencies': [
'v8_base',
],
'include_dirs+': [
'..',
],
'sources': [
'<(SHARED_INTERMEDIATE_DIR)/libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/extras-libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/experimental-extras-libraries.cc',
'<(INTERMEDIATE_DIR)/snapshot.cc',
],
'actions': [
{
'action_name': 'run_mksnapshot',
'inputs': [
'<(mksnapshot_exec)',
],
'conditions': [
['embed_script!=""', {
'inputs': [
'<(embed_script)',
],
}],
['warmup_script!=""', {
'inputs': [
'<(warmup_script)',
],
}],
],
'outputs': [
'<(INTERMEDIATE_DIR)/snapshot.cc',
],
'variables': {
'mksnapshot_flags': [],
'conditions': [
['v8_random_seed!=0', {
'mksnapshot_flags': ['--random-seed', '<(v8_random_seed)'],
}],
['v8_vector_stores!=0', {
'mksnapshot_flags': ['--vector-stores'],
}],
],
},
'action': [
'<(mksnapshot_exec)',
'<@(mksnapshot_flags)',
'--startup_src', '<@(INTERMEDIATE_DIR)/snapshot.cc',
'<(embed_script)',
'<(warmup_script)',
],
},
],
},
{
'target_name': 'v8_nosnapshot',
'type': 'static_library',
'dependencies': [
'v8_base',
],
'include_dirs+': [
'..',
],
'sources': [
'<(SHARED_INTERMEDIATE_DIR)/libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/extras-libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/experimental-extras-libraries.cc',
'snapshot/snapshot-empty.cc',
],
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
'dependencies': ['js2c#host'],
}, {
'toolsets': ['target'],
'dependencies': ['js2c'],
}],
['component=="shared_library"', {
'defines': [
'BUILDING_V8_SHARED',
'V8_SHARED',
],
}],
]
},
{
'target_name': 'v8_external_snapshot',
'type': 'static_library',
'conditions': [
[ 'v8_use_external_startup_data==1', {
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
'dependencies': [
'mksnapshot#host',
'js2c#host',
'natives_blob',
]}, {
'toolsets': ['target'],
'dependencies': [
'mksnapshot',
'js2c',
'natives_blob',
],
}],
['component=="shared_library"', {
'defines': [
'V8_SHARED',
'BUILDING_V8_SHARED',
],
'direct_dependent_settings': {
'defines': [
'V8_SHARED',
'USING_V8_SHARED',
],
},
}],
],
'dependencies': [
'v8_base',
],
'include_dirs+': [
'..',
],
'sources': [
'snapshot/natives-external.cc',
'snapshot/snapshot-external.cc',
],
'actions': [
{
'action_name': 'run_mksnapshot (external)',
'inputs': [
'<(mksnapshot_exec)',
],
'variables': {
'mksnapshot_flags': [],
'conditions': [
['v8_random_seed!=0', {
'mksnapshot_flags': ['--random-seed', '<(v8_random_seed)'],
}],
['v8_vector_stores!=0', {
'mksnapshot_flags': ['--vector-stores'],
}],
],
},
'conditions': [
['embed_script!=""', {
'inputs': [
'<(embed_script)',
],
}],
['warmup_script!=""', {
'inputs': [
'<(warmup_script)',
],
}],
['want_separate_host_toolset==1', {
'target_conditions': [
['_toolset=="host"', {
'outputs': [
'<(PRODUCT_DIR)/snapshot_blob_host.bin',
],
'action': [
'<(mksnapshot_exec)',
'<@(mksnapshot_flags)',
'--startup_blob', '<(PRODUCT_DIR)/snapshot_blob_host.bin',
'<(embed_script)',
'<(warmup_script)',
],
}, {
'outputs': [
'<(PRODUCT_DIR)/snapshot_blob.bin',
],
'action': [
'<(mksnapshot_exec)',
'<@(mksnapshot_flags)',
'--startup_blob', '<(PRODUCT_DIR)/snapshot_blob.bin',
'<(embed_script)',
'<(warmup_script)',
],
}],
],
}, {
'outputs': [
'<(PRODUCT_DIR)/snapshot_blob.bin',
],
'action': [
'<(mksnapshot_exec)',
'<@(mksnapshot_flags)',
'--startup_blob', '<(PRODUCT_DIR)/snapshot_blob.bin',
'<(embed_script)',
'<(warmup_script)',
],
}],
],
},
],
}],
],
},
{
'target_name': 'v8_base',
'type': 'static_library',
'dependencies': [
'v8_libbase',
],
'variables': {
'optimize': 'max',
},
'include_dirs+': [
'..',
# To be able to find base/trace_event/common/trace_event_common.h
'../..',
],
'sources': [ ### gcmole(all) ###
'../include/v8-debug.h',
'../include/v8-experimental.h',
'../include/v8-platform.h',
'../include/v8-profiler.h',
'../include/v8-testing.h',
'../include/v8-util.h',
'../include/v8-version.h',
'../include/v8.h',
'../include/v8config.h',
'accessors.cc',
'accessors.h',
'address-map.cc',
'address-map.h',
'allocation.cc',
'allocation.h',
'allocation-site-scopes.cc',
'allocation-site-scopes.h',
'api-experimental.cc',
'api-experimental.h',
'api.cc',
'api.h',
'api-arguments.cc',
'api-arguments.h',
'api-natives.cc',
'api-natives.h',
'arguments.cc',
'arguments.h',
'assembler.cc',
'assembler.h',
'assert-scope.h',
'assert-scope.cc',
'ast/ast-expression-rewriter.cc',
'ast/ast-expression-rewriter.h',
'ast/ast-expression-visitor.cc',
'ast/ast-expression-visitor.h',
'ast/ast-literal-reindexer.cc',
'ast/ast-literal-reindexer.h',
'ast/ast-numbering.cc',
'ast/ast-numbering.h',
'ast/ast-value-factory.cc',
'ast/ast-value-factory.h',
'ast/ast.cc',
'ast/ast.h',
'ast/modules.cc',
'ast/modules.h',
'ast/prettyprinter.cc',
'ast/prettyprinter.h',
'ast/scopeinfo.cc',
'ast/scopeinfo.h',
'ast/scopes.cc',
'ast/scopes.h',
'ast/variables.cc',
'ast/variables.h',
'atomic-utils.h',
'background-parsing-task.cc',
'background-parsing-task.h',
'bailout-reason.cc',
'bailout-reason.h',
'basic-block-profiler.cc',
'basic-block-profiler.h',
'bignum-dtoa.cc',
'bignum-dtoa.h',
'bignum.cc',
'bignum.h',
'bit-vector.cc',
'bit-vector.h',
'bootstrapper.cc',
'bootstrapper.h',
'builtins.cc',
'builtins.h',
'cached-powers.cc',
'cached-powers.h',
'cancelable-task.cc',
'cancelable-task.h',
'char-predicates.cc',
'char-predicates-inl.h',
'char-predicates.h',
'checks.h',
'code-factory.cc',
'code-factory.h',
'code-stub-assembler.cc',
'code-stub-assembler.h',
'code-stubs.cc',
'code-stubs.h',
'code-stubs-hydrogen.cc',
'codegen.cc',
'codegen.h',
'collector.h',
'compilation-cache.cc',
'compilation-cache.h',
'compilation-dependencies.cc',
'compilation-dependencies.h',
'compilation-statistics.cc',
'compilation-statistics.h',
'compiler/access-builder.cc',
'compiler/access-builder.h',
'compiler/access-info.cc',
'compiler/access-info.h',
'compiler/all-nodes.cc',
'compiler/all-nodes.h',
'compiler/ast-graph-builder.cc',
'compiler/ast-graph-builder.h',
'compiler/ast-loop-assignment-analyzer.cc',
'compiler/ast-loop-assignment-analyzer.h',
'compiler/basic-block-instrumentor.cc',
'compiler/basic-block-instrumentor.h',
'compiler/branch-elimination.cc',
'compiler/branch-elimination.h',
'compiler/bytecode-branch-analysis.cc',
'compiler/bytecode-branch-analysis.h',
'compiler/bytecode-graph-builder.cc',
'compiler/bytecode-graph-builder.h',
'compiler/change-lowering.cc',
'compiler/change-lowering.h',
'compiler/c-linkage.cc',
'compiler/coalesced-live-ranges.cc',
'compiler/coalesced-live-ranges.h',
'compiler/code-generator-impl.h',
'compiler/code-generator.cc',
'compiler/code-generator.h',
'compiler/code-assembler.cc',
'compiler/code-assembler.h',
'compiler/common-node-cache.cc',
'compiler/common-node-cache.h',
'compiler/common-operator-reducer.cc',
'compiler/common-operator-reducer.h',
'compiler/common-operator.cc',
'compiler/common-operator.h',
'compiler/control-builders.cc',
'compiler/control-builders.h',
'compiler/control-equivalence.cc',
'compiler/control-equivalence.h',
'compiler/control-flow-optimizer.cc',
'compiler/control-flow-optimizer.h',
'compiler/dead-code-elimination.cc',
'compiler/dead-code-elimination.h',
'compiler/diamond.h',
'compiler/effect-control-linearizer.cc',
'compiler/effect-control-linearizer.h',
'compiler/escape-analysis.cc',
'compiler/escape-analysis.h',
"compiler/escape-analysis-reducer.cc",
"compiler/escape-analysis-reducer.h",
'compiler/frame.cc',
'compiler/frame.h',
'compiler/frame-elider.cc',
'compiler/frame-elider.h',
"compiler/frame-states.cc",
"compiler/frame-states.h",
'compiler/gap-resolver.cc',
'compiler/gap-resolver.h',
'compiler/graph-reducer.cc',
'compiler/graph-reducer.h',
'compiler/graph-replay.cc',
'compiler/graph-replay.h',
'compiler/graph-trimmer.cc',
'compiler/graph-trimmer.h',
'compiler/graph-visualizer.cc',
'compiler/graph-visualizer.h',
'compiler/graph.cc',
'compiler/graph.h',
'compiler/greedy-allocator.cc',
'compiler/greedy-allocator.h',
'compiler/instruction-codes.h',
'compiler/instruction-selector-impl.h',
'compiler/instruction-selector.cc',
'compiler/instruction-selector.h',
'compiler/instruction-scheduler.cc',
'compiler/instruction-scheduler.h',
'compiler/instruction.cc',
'compiler/instruction.h',
'compiler/int64-lowering.cc',
'compiler/int64-lowering.h',
'compiler/js-builtin-reducer.cc',
'compiler/js-builtin-reducer.h',
'compiler/js-call-reducer.cc',
'compiler/js-call-reducer.h',
'compiler/js-context-specialization.cc',
'compiler/js-context-specialization.h',
'compiler/js-create-lowering.cc',
'compiler/js-create-lowering.h',
'compiler/js-frame-specialization.cc',
'compiler/js-frame-specialization.h',
'compiler/js-generic-lowering.cc',
'compiler/js-generic-lowering.h',
'compiler/js-global-object-specialization.cc',
'compiler/js-global-object-specialization.h',
'compiler/js-graph.cc',
'compiler/js-graph.h',
'compiler/js-inlining.cc',
'compiler/js-inlining.h',
'compiler/js-inlining-heuristic.cc',
'compiler/js-inlining-heuristic.h',
'compiler/js-intrinsic-lowering.cc',
'compiler/js-intrinsic-lowering.h',
'compiler/js-native-context-specialization.cc',
'compiler/js-native-context-specialization.h',
'compiler/js-operator.cc',
'compiler/js-operator.h',
'compiler/js-typed-lowering.cc',
'compiler/js-typed-lowering.h',
'compiler/jump-threading.cc',
'compiler/jump-threading.h',
'compiler/linkage.cc',
'compiler/linkage.h',
'compiler/liveness-analyzer.cc',
'compiler/liveness-analyzer.h',
'compiler/live-range-separator.cc',
'compiler/live-range-separator.h',
'compiler/load-elimination.cc',
'compiler/load-elimination.h',
'compiler/loop-analysis.cc',
'compiler/loop-analysis.h',
'compiler/loop-peeling.cc',
'compiler/loop-peeling.h',
'compiler/machine-operator-reducer.cc',
'compiler/machine-operator-reducer.h',
'compiler/machine-operator.cc',
'compiler/machine-operator.h',
'compiler/move-optimizer.cc',
'compiler/move-optimizer.h',
'compiler/node-aux-data.h',
'compiler/node-cache.cc',
'compiler/node-cache.h',
'compiler/node-marker.cc',
'compiler/node-marker.h',
'compiler/node-matchers.cc',
'compiler/node-matchers.h',
'compiler/node-properties.cc',
'compiler/node-properties.h',
'compiler/node.cc',
'compiler/node.h',
'compiler/opcodes.cc',
'compiler/opcodes.h',
'compiler/operator-properties.cc',
'compiler/operator-properties.h',
'compiler/operator.cc',
'compiler/operator.h',
'compiler/osr.cc',
'compiler/osr.h',
'compiler/pipeline.cc',
'compiler/pipeline.h',
'compiler/pipeline-statistics.cc',
'compiler/pipeline-statistics.h',
'compiler/raw-machine-assembler.cc',
'compiler/raw-machine-assembler.h',
'compiler/register-allocator.cc',
'compiler/register-allocator.h',
'compiler/register-allocator-verifier.cc',
'compiler/register-allocator-verifier.h',
'compiler/representation-change.cc',
'compiler/representation-change.h',
'compiler/schedule.cc',
'compiler/schedule.h',
'compiler/scheduler.cc',
'compiler/scheduler.h',
'compiler/select-lowering.cc',
'compiler/select-lowering.h',
'compiler/simplified-lowering.cc',
'compiler/simplified-lowering.h',
'compiler/simplified-operator-reducer.cc',
'compiler/simplified-operator-reducer.h',
'compiler/simplified-operator.cc',
'compiler/simplified-operator.h',
'compiler/source-position.cc',
'compiler/source-position.h',
'compiler/state-values-utils.cc',
'compiler/state-values-utils.h',
'compiler/tail-call-optimization.cc',
'compiler/tail-call-optimization.h',
'compiler/type-hint-analyzer.cc',
'compiler/type-hint-analyzer.h',
'compiler/type-hints.cc',
'compiler/type-hints.h',
'compiler/typer.cc',
'compiler/typer.h',
'compiler/value-numbering-reducer.cc',
'compiler/value-numbering-reducer.h',
'compiler/verifier.cc',
'compiler/verifier.h',
'compiler/wasm-compiler.cc',
'compiler/wasm-compiler.h',
'compiler/wasm-linkage.cc',
'compiler/zone-pool.cc',
'compiler/zone-pool.h',
'compiler.cc',
'compiler.h',
'context-measure.cc',
'context-measure.h',
'contexts-inl.h',
'contexts.cc',
'contexts.h',
'conversions-inl.h',
'conversions.cc',
'conversions.h',
'counters.cc',
'counters.h',
'crankshaft/compilation-phase.cc',
'crankshaft/compilation-phase.h',
'crankshaft/hydrogen-alias-analysis.h',
'crankshaft/hydrogen-bce.cc',
'crankshaft/hydrogen-bce.h',
'crankshaft/hydrogen-canonicalize.cc',
'crankshaft/hydrogen-canonicalize.h',
'crankshaft/hydrogen-check-elimination.cc',
'crankshaft/hydrogen-check-elimination.h',
'crankshaft/hydrogen-dce.cc',
'crankshaft/hydrogen-dce.h',
'crankshaft/hydrogen-dehoist.cc',
'crankshaft/hydrogen-dehoist.h',
'crankshaft/hydrogen-environment-liveness.cc',
'crankshaft/hydrogen-environment-liveness.h',
'crankshaft/hydrogen-escape-analysis.cc',
'crankshaft/hydrogen-escape-analysis.h',
'crankshaft/hydrogen-flow-engine.h',
'crankshaft/hydrogen-gvn.cc',
'crankshaft/hydrogen-gvn.h',
'crankshaft/hydrogen-infer-representation.cc',
'crankshaft/hydrogen-infer-representation.h',
'crankshaft/hydrogen-infer-types.cc',
'crankshaft/hydrogen-infer-types.h',
'crankshaft/hydrogen-instructions.cc',
'crankshaft/hydrogen-instructions.h',
'crankshaft/hydrogen-load-elimination.cc',
'crankshaft/hydrogen-load-elimination.h',
'crankshaft/hydrogen-mark-deoptimize.cc',
'crankshaft/hydrogen-mark-deoptimize.h',
'crankshaft/hydrogen-mark-unreachable.cc',
'crankshaft/hydrogen-mark-unreachable.h',
'crankshaft/hydrogen-osr.cc',
'crankshaft/hydrogen-osr.h',
'crankshaft/hydrogen-range-analysis.cc',
'crankshaft/hydrogen-range-analysis.h',
'crankshaft/hydrogen-redundant-phi.cc',
'crankshaft/hydrogen-redundant-phi.h',
'crankshaft/hydrogen-removable-simulates.cc',
'crankshaft/hydrogen-removable-simulates.h',
'crankshaft/hydrogen-representation-changes.cc',
'crankshaft/hydrogen-representation-changes.h',
'crankshaft/hydrogen-sce.cc',
'crankshaft/hydrogen-sce.h',
'crankshaft/hydrogen-store-elimination.cc',
'crankshaft/hydrogen-store-elimination.h',
'crankshaft/hydrogen-types.cc',
'crankshaft/hydrogen-types.h',
'crankshaft/hydrogen-uint32-analysis.cc',
'crankshaft/hydrogen-uint32-analysis.h',
'crankshaft/hydrogen.cc',
'crankshaft/hydrogen.h',
'crankshaft/lithium-allocator-inl.h',
'crankshaft/lithium-allocator.cc',
'crankshaft/lithium-allocator.h',
'crankshaft/lithium-codegen.cc',
'crankshaft/lithium-codegen.h',
'crankshaft/lithium.cc',
'crankshaft/lithium.h',
'crankshaft/lithium-inl.h',
'crankshaft/typing.cc',
'crankshaft/typing.h',
'crankshaft/unique.h',
'date.cc',
'date.h',
'dateparser-inl.h',
'dateparser.cc',
'dateparser.h',
'debug/debug-evaluate.cc',
'debug/debug-evaluate.h',
'debug/debug-frames.cc',
'debug/debug-frames.h',
'debug/debug-scopes.cc',
'debug/debug-scopes.h',
'debug/debug.cc',
'debug/debug.h',
'debug/liveedit.cc',
'debug/liveedit.h',
'deoptimizer.cc',
'deoptimizer.h',
'disasm.h',
'disassembler.cc',
'disassembler.h',
'diy-fp.cc',
'diy-fp.h',
'double.h',
'dtoa.cc',
'dtoa.h',
'effects.h',
'elements-kind.cc',
'elements-kind.h',
'elements.cc',
'elements.h',
'execution.cc',
'execution.h',
'extensions/externalize-string-extension.cc',
'extensions/externalize-string-extension.h',
'extensions/free-buffer-extension.cc',
'extensions/free-buffer-extension.h',
'extensions/gc-extension.cc',
'extensions/gc-extension.h',
'extensions/ignition-statistics-extension.cc',
'extensions/ignition-statistics-extension.h',
'extensions/statistics-extension.cc',
'extensions/statistics-extension.h',
'extensions/trigger-failure-extension.cc',
'extensions/trigger-failure-extension.h',
'external-reference-table.cc',
'external-reference-table.h',
'factory.cc',
'factory.h',
'fast-accessor-assembler.cc',
'fast-accessor-assembler.h',
'fast-dtoa.cc',
'fast-dtoa.h',
'field-index.h',
'field-index-inl.h',
'field-type.cc',
'field-type.h',
'fixed-dtoa.cc',
'fixed-dtoa.h',
'flag-definitions.h',
'flags.cc',
'flags.h',
'frames-inl.h',
'frames.cc',
'frames.h',
'full-codegen/full-codegen.cc',
'full-codegen/full-codegen.h',
'futex-emulation.cc',
'futex-emulation.h',
'gdb-jit.cc',
'gdb-jit.h',
'global-handles.cc',
'global-handles.h',
'globals.h',
'handles-inl.h',
'handles.cc',
'handles.h',
'hashmap.h',
'heap-symbols.h',
'heap/array-buffer-tracker.cc',
'heap/array-buffer-tracker.h',
'heap/memory-reducer.cc',
'heap/memory-reducer.h',
'heap/gc-idle-time-handler.cc',
'heap/gc-idle-time-handler.h',
'heap/gc-tracer.cc',
'heap/gc-tracer.h',
'heap/heap-inl.h',
'heap/heap.cc',
'heap/heap.h',
'heap/incremental-marking-inl.h',
'heap/incremental-marking-job.cc',
'heap/incremental-marking-job.h',
'heap/incremental-marking.cc',
'heap/incremental-marking.h',
'heap/mark-compact-inl.h',
'heap/mark-compact.cc',
'heap/mark-compact.h',
'heap/object-stats.cc',
'heap/object-stats.h',
'heap/objects-visiting-inl.h',
'heap/objects-visiting.cc',
'heap/objects-visiting.h',
'heap/page-parallel-job.h',
'heap/remembered-set.cc',
'heap/remembered-set.h',
'heap/scavenge-job.h',
'heap/scavenge-job.cc',
'heap/scavenger-inl.h',
'heap/scavenger.cc',
'heap/scavenger.h',
'heap/slot-set.h',
'heap/spaces-inl.h',
'heap/spaces.cc',
'heap/spaces.h',
'heap/store-buffer.cc',
'heap/store-buffer.h',
'i18n.cc',
'i18n.h',
'icu_util.cc',
'icu_util.h',
'ic/access-compiler.cc',
'ic/access-compiler.h',
'ic/call-optimization.cc',
'ic/call-optimization.h',
'ic/handler-compiler.cc',
'ic/handler-compiler.h',
'ic/ic-inl.h',
'ic/ic-state.cc',
'ic/ic-state.h',
'ic/ic.cc',
'ic/ic.h',
'ic/ic-compiler.cc',
'ic/ic-compiler.h',
'identity-map.cc',
'identity-map.h',
'interface-descriptors.cc',
'interface-descriptors.h',
'interpreter/bytecodes.cc',
'interpreter/bytecodes.h',
'interpreter/bytecode-array-builder.cc',
'interpreter/bytecode-array-builder.h',
'interpreter/bytecode-array-iterator.cc',
'interpreter/bytecode-array-iterator.h',
'interpreter/bytecode-register-allocator.cc',
'interpreter/bytecode-register-allocator.h',
'interpreter/bytecode-generator.cc',
'interpreter/bytecode-generator.h',
'interpreter/bytecode-traits.h',
'interpreter/constant-array-builder.cc',
'interpreter/constant-array-builder.h',
'interpreter/control-flow-builders.cc',
'interpreter/control-flow-builders.h',
'interpreter/handler-table-builder.cc',
'interpreter/handler-table-builder.h',
'interpreter/interpreter.cc',
'interpreter/interpreter.h',
'interpreter/interpreter-assembler.cc',
'interpreter/interpreter-assembler.h',
'interpreter/interpreter-intrinsics.cc',
'interpreter/interpreter-intrinsics.h',
'interpreter/source-position-table.cc',
'interpreter/source-position-table.h',
'isolate-inl.h',
'isolate.cc',
'isolate.h',
'json-parser.h',
'json-stringifier.h',
'keys.h',
'keys.cc',
'layout-descriptor-inl.h',
'layout-descriptor.cc',
'layout-descriptor.h',
'list-inl.h',
'list.h',
'locked-queue-inl.h',
'locked-queue.h',
'log-inl.h',
'log-utils.cc',
'log-utils.h',
'log.cc',
'log.h',
'lookup.cc',
'lookup.h',
'macro-assembler.h',
'machine-type.cc',
'machine-type.h',
'messages.cc',
'messages.h',
'msan.h',
'objects-body-descriptors-inl.h',
'objects-body-descriptors.h',
'objects-debug.cc',
'objects-inl.h',
'objects-printer.cc',
'objects.cc',
'objects.h',
'optimizing-compile-dispatcher.cc',
'optimizing-compile-dispatcher.h',
'ostreams.cc',
'ostreams.h',
'parsing/expression-classifier.h',
'parsing/func-name-inferrer.cc',
'parsing/func-name-inferrer.h',
'parsing/parameter-initializer-rewriter.cc',
'parsing/parameter-initializer-rewriter.h',
'parsing/parser-base.h',
'parsing/parser.cc',
'parsing/parser.h',
'parsing/pattern-rewriter.cc',
'parsing/preparse-data-format.h',
'parsing/preparse-data.cc',
'parsing/preparse-data.h',
'parsing/preparser.cc',
'parsing/preparser.h',
'parsing/rewriter.cc',
'parsing/rewriter.h',
'parsing/scanner-character-streams.cc',
'parsing/scanner-character-streams.h',
'parsing/scanner.cc',
'parsing/scanner.h',
'parsing/token.cc',
'parsing/token.h',
'pending-compilation-error-handler.cc',
'pending-compilation-error-handler.h',
'perf-jit.cc',
'perf-jit.h',
'profiler/allocation-tracker.cc',
'profiler/allocation-tracker.h',
'profiler/circular-queue-inl.h',
'profiler/circular-queue.h',
'profiler/cpu-profiler-inl.h',
'profiler/cpu-profiler.cc',
'profiler/cpu-profiler.h',
'profiler/heap-profiler.cc',
'profiler/heap-profiler.h',
'profiler/heap-snapshot-generator-inl.h',
'profiler/heap-snapshot-generator.cc',
'profiler/heap-snapshot-generator.h',
'profiler/profile-generator-inl.h',
'profiler/profile-generator.cc',
'profiler/profile-generator.h',
'profiler/sampler.cc',
'profiler/sampler.h',
'profiler/sampling-heap-profiler.cc',
'profiler/sampling-heap-profiler.h',
'profiler/strings-storage.cc',
'profiler/strings-storage.h',
'profiler/unbound-queue-inl.h',
'profiler/unbound-queue.h',
'property-descriptor.cc',
'property-descriptor.h',
'property-details.h',
'property.cc',
'property.h',
'prototype.h',
'regexp/bytecodes-irregexp.h',
'regexp/interpreter-irregexp.cc',
'regexp/interpreter-irregexp.h',
'regexp/jsregexp-inl.h',
'regexp/jsregexp.cc',
'regexp/jsregexp.h',
'regexp/regexp-ast.cc',
'regexp/regexp-ast.h',
'regexp/regexp-macro-assembler-irregexp-inl.h',
'regexp/regexp-macro-assembler-irregexp.cc',
'regexp/regexp-macro-assembler-irregexp.h',
'regexp/regexp-macro-assembler-tracer.cc',
'regexp/regexp-macro-assembler-tracer.h',
'regexp/regexp-macro-assembler.cc',
'regexp/regexp-macro-assembler.h',
'regexp/regexp-parser.cc',
'regexp/regexp-parser.h',
'regexp/regexp-stack.cc',
'regexp/regexp-stack.h',
'register-configuration.cc',
'register-configuration.h',
'runtime-profiler.cc',
'runtime-profiler.h',
'runtime/runtime-array.cc',
'runtime/runtime-atomics.cc',
'runtime/runtime-classes.cc',
'runtime/runtime-collections.cc',
'runtime/runtime-compiler.cc',
'runtime/runtime-date.cc',
'runtime/runtime-debug.cc',
'runtime/runtime-forin.cc',
'runtime/runtime-function.cc',
'runtime/runtime-futex.cc',
'runtime/runtime-generator.cc',
'runtime/runtime-i18n.cc',
'runtime/runtime-internal.cc',
'runtime/runtime-interpreter.cc',
'runtime/runtime-json.cc',
'runtime/runtime-literals.cc',
'runtime/runtime-liveedit.cc',
'runtime/runtime-maths.cc',
'runtime/runtime-numbers.cc',
'runtime/runtime-object.cc',
'runtime/runtime-operators.cc',
'runtime/runtime-proxy.cc',
'runtime/runtime-regexp.cc',
'runtime/runtime-scopes.cc',
'runtime/runtime-simd.cc',
'runtime/runtime-strings.cc',
'runtime/runtime-symbol.cc',
'runtime/runtime-test.cc',
'runtime/runtime-typedarray.cc',
'runtime/runtime-uri.cc',
'runtime/runtime-utils.h',
'runtime/runtime.cc',
'runtime/runtime.h',
'safepoint-table.cc',
'safepoint-table.h',
'signature.h',
'simulator.h',
'small-pointer-list.h',
'snapshot/code-serializer.cc',
'snapshot/code-serializer.h',
'snapshot/deserializer.cc',
'snapshot/deserializer.h',
'snapshot/natives.h',
'snapshot/natives-common.cc',
'snapshot/partial-serializer.cc',
'snapshot/partial-serializer.h',
'snapshot/serializer.cc',
'snapshot/serializer.h',
'snapshot/serializer-common.cc',
'snapshot/serializer-common.h',
'snapshot/snapshot.h',
'snapshot/snapshot-common.cc',
'snapshot/snapshot-source-sink.cc',
'snapshot/snapshot-source-sink.h',
'snapshot/startup-serializer.cc',
'snapshot/startup-serializer.h',
'source-position.h',
'splay-tree.h',
'splay-tree-inl.h',
'startup-data-util.cc',
'startup-data-util.h',
'string-builder.cc',
'string-builder.h',
'string-search.h',
'string-stream.cc',
'string-stream.h',
'strtod.cc',
'strtod.h',
'ic/stub-cache.cc',
'ic/stub-cache.h',
'tracing/trace-event.cc',
'tracing/trace-event.h',
'transitions-inl.h',
'transitions.cc',
'transitions.h',
'type-cache.cc',
'type-cache.h',
'type-feedback-vector-inl.h',
'type-feedback-vector.cc',
'type-feedback-vector.h',
'type-info.cc',
'type-info.h',
'types.cc',
'types.h',
'typing-asm.cc',
'typing-asm.h',
'typing-reset.cc',
'typing-reset.h',
'unicode-inl.h',
'unicode.cc',
'unicode.h',
'unicode-cache-inl.h',
'unicode-cache.h',
'unicode-decoder.cc',
'unicode-decoder.h',
'utils-inl.h',
'utils.cc',
'utils.h',
'v8.cc',
'v8.h',
'v8memory.h',
'v8threads.cc',
'v8threads.h',
'vector.h',
'version.cc',
'version.h',
'vm-state-inl.h',
'vm-state.h',
'wasm/asm-wasm-builder.cc',
'wasm/asm-wasm-builder.h',
'wasm/ast-decoder.cc',
'wasm/ast-decoder.h',
'wasm/decoder.h',
'wasm/encoder.cc',
'wasm/encoder.h',
'wasm/leb-helper.h',
'wasm/module-decoder.cc',
'wasm/module-decoder.h',
'wasm/switch-logic.h',
'wasm/switch-logic.cc',
'wasm/wasm-external-refs.cc',
'wasm/wasm-external-refs.h',
'wasm/wasm-function-name-table.cc',
'wasm/wasm-function-name-table.h',
'wasm/wasm-js.cc',
'wasm/wasm-js.h',
'wasm/wasm-macro-gen.h',
'wasm/wasm-module.cc',
'wasm/wasm-module.h',
'wasm/wasm-opcodes.cc',
'wasm/wasm-opcodes.h',
'wasm/wasm-result.cc',
'wasm/wasm-result.h',
'zone.cc',
'zone.h',
'zone-allocator.h',
'zone-containers.h',
'third_party/fdlibm/fdlibm.cc',
'third_party/fdlibm/fdlibm.h',
],
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
}, {
'toolsets': ['target'],
}],
['v8_target_arch=="arm"', {
'sources': [ ### gcmole(arch:arm) ###
'arm/assembler-arm-inl.h',
'arm/assembler-arm.cc',
'arm/assembler-arm.h',
'arm/builtins-arm.cc',
'arm/code-stubs-arm.cc',
'arm/code-stubs-arm.h',
'arm/codegen-arm.cc',
'arm/codegen-arm.h',
'arm/constants-arm.h',
'arm/constants-arm.cc',
'arm/cpu-arm.cc',
'arm/deoptimizer-arm.cc',
'arm/disasm-arm.cc',
'arm/frames-arm.cc',
'arm/frames-arm.h',
'arm/interface-descriptors-arm.cc',
'arm/interface-descriptors-arm.h',
'arm/macro-assembler-arm.cc',
'arm/macro-assembler-arm.h',
'arm/simulator-arm.cc',
'arm/simulator-arm.h',
'compiler/arm/code-generator-arm.cc',
'compiler/arm/instruction-codes-arm.h',
'compiler/arm/instruction-scheduler-arm.cc',
'compiler/arm/instruction-selector-arm.cc',
'crankshaft/arm/lithium-arm.cc',
'crankshaft/arm/lithium-arm.h',
'crankshaft/arm/lithium-codegen-arm.cc',
'crankshaft/arm/lithium-codegen-arm.h',
'crankshaft/arm/lithium-gap-resolver-arm.cc',
'crankshaft/arm/lithium-gap-resolver-arm.h',
'debug/arm/debug-arm.cc',
'full-codegen/arm/full-codegen-arm.cc',
'ic/arm/access-compiler-arm.cc',
'ic/arm/handler-compiler-arm.cc',
'ic/arm/ic-arm.cc',
'ic/arm/ic-compiler-arm.cc',
'ic/arm/stub-cache-arm.cc',
'regexp/arm/regexp-macro-assembler-arm.cc',
'regexp/arm/regexp-macro-assembler-arm.h',
],
}],
['v8_target_arch=="arm64"', {
'sources': [ ### gcmole(arch:arm64) ###
'arm64/assembler-arm64.cc',
'arm64/assembler-arm64.h',
'arm64/assembler-arm64-inl.h',
'arm64/builtins-arm64.cc',
'arm64/codegen-arm64.cc',
'arm64/codegen-arm64.h',
'arm64/code-stubs-arm64.cc',
'arm64/code-stubs-arm64.h',
'arm64/constants-arm64.h',
'arm64/cpu-arm64.cc',
'arm64/decoder-arm64.cc',
'arm64/decoder-arm64.h',
'arm64/decoder-arm64-inl.h',
'arm64/deoptimizer-arm64.cc',
'arm64/disasm-arm64.cc',
'arm64/disasm-arm64.h',
'arm64/frames-arm64.cc',
'arm64/frames-arm64.h',
'arm64/instructions-arm64.cc',
'arm64/instructions-arm64.h',
'arm64/instrument-arm64.cc',
'arm64/instrument-arm64.h',
'arm64/interface-descriptors-arm64.cc',
'arm64/interface-descriptors-arm64.h',
'arm64/macro-assembler-arm64.cc',
'arm64/macro-assembler-arm64.h',
'arm64/macro-assembler-arm64-inl.h',
'arm64/simulator-arm64.cc',
'arm64/simulator-arm64.h',
'arm64/utils-arm64.cc',
'arm64/utils-arm64.h',
'compiler/arm64/code-generator-arm64.cc',
'compiler/arm64/instruction-codes-arm64.h',
'compiler/arm64/instruction-scheduler-arm64.cc',
'compiler/arm64/instruction-selector-arm64.cc',
'crankshaft/arm64/delayed-masm-arm64.cc',
'crankshaft/arm64/delayed-masm-arm64.h',
'crankshaft/arm64/delayed-masm-arm64-inl.h',
'crankshaft/arm64/lithium-arm64.cc',
'crankshaft/arm64/lithium-arm64.h',
'crankshaft/arm64/lithium-codegen-arm64.cc',
'crankshaft/arm64/lithium-codegen-arm64.h',
'crankshaft/arm64/lithium-gap-resolver-arm64.cc',
'crankshaft/arm64/lithium-gap-resolver-arm64.h',
'debug/arm64/debug-arm64.cc',
'full-codegen/arm64/full-codegen-arm64.cc',
'ic/arm64/access-compiler-arm64.cc',
'ic/arm64/handler-compiler-arm64.cc',
'ic/arm64/ic-arm64.cc',
'ic/arm64/ic-compiler-arm64.cc',
'ic/arm64/stub-cache-arm64.cc',
'regexp/arm64/regexp-macro-assembler-arm64.cc',
'regexp/arm64/regexp-macro-assembler-arm64.h',
],
}],
['v8_target_arch=="ia32"', {
'sources': [ ### gcmole(arch:ia32) ###
'ia32/assembler-ia32-inl.h',
'ia32/assembler-ia32.cc',
'ia32/assembler-ia32.h',
'ia32/builtins-ia32.cc',
'ia32/code-stubs-ia32.cc',
'ia32/code-stubs-ia32.h',
'ia32/codegen-ia32.cc',
'ia32/codegen-ia32.h',
'ia32/cpu-ia32.cc',
'ia32/deoptimizer-ia32.cc',
'ia32/disasm-ia32.cc',
'ia32/frames-ia32.cc',
'ia32/frames-ia32.h',
'ia32/interface-descriptors-ia32.cc',
'ia32/macro-assembler-ia32.cc',
'ia32/macro-assembler-ia32.h',
'compiler/ia32/code-generator-ia32.cc',
'compiler/ia32/instruction-codes-ia32.h',
'compiler/ia32/instruction-scheduler-ia32.cc',
'compiler/ia32/instruction-selector-ia32.cc',
'crankshaft/ia32/lithium-codegen-ia32.cc',
'crankshaft/ia32/lithium-codegen-ia32.h',
'crankshaft/ia32/lithium-gap-resolver-ia32.cc',
'crankshaft/ia32/lithium-gap-resolver-ia32.h',
'crankshaft/ia32/lithium-ia32.cc',
'crankshaft/ia32/lithium-ia32.h',
'debug/ia32/debug-ia32.cc',
'full-codegen/ia32/full-codegen-ia32.cc',
'ic/ia32/access-compiler-ia32.cc',
'ic/ia32/handler-compiler-ia32.cc',
'ic/ia32/ic-ia32.cc',
'ic/ia32/ic-compiler-ia32.cc',
'ic/ia32/stub-cache-ia32.cc',
'regexp/ia32/regexp-macro-assembler-ia32.cc',
'regexp/ia32/regexp-macro-assembler-ia32.h',
],
}],
['v8_target_arch=="x87"', {
'sources': [ ### gcmole(arch:x87) ###
'x87/assembler-x87-inl.h',
'x87/assembler-x87.cc',
'x87/assembler-x87.h',
'x87/builtins-x87.cc',
'x87/code-stubs-x87.cc',
'x87/code-stubs-x87.h',
'x87/codegen-x87.cc',
'x87/codegen-x87.h',
'x87/cpu-x87.cc',
'x87/deoptimizer-x87.cc',
'x87/disasm-x87.cc',
'x87/frames-x87.cc',
'x87/frames-x87.h',
'x87/interface-descriptors-x87.cc',
'x87/macro-assembler-x87.cc',
'x87/macro-assembler-x87.h',
'compiler/x87/code-generator-x87.cc',
'compiler/x87/instruction-codes-x87.h',
'compiler/x87/instruction-scheduler-x87.cc',
'compiler/x87/instruction-selector-x87.cc',
'crankshaft/x87/lithium-codegen-x87.cc',
'crankshaft/x87/lithium-codegen-x87.h',
'crankshaft/x87/lithium-gap-resolver-x87.cc',
'crankshaft/x87/lithium-gap-resolver-x87.h',
'crankshaft/x87/lithium-x87.cc',
'crankshaft/x87/lithium-x87.h',
'debug/x87/debug-x87.cc',
'full-codegen/x87/full-codegen-x87.cc',
'ic/x87/access-compiler-x87.cc',
'ic/x87/handler-compiler-x87.cc',
'ic/x87/ic-x87.cc',
'ic/x87/ic-compiler-x87.cc',
'ic/x87/stub-cache-x87.cc',
'regexp/x87/regexp-macro-assembler-x87.cc',
'regexp/x87/regexp-macro-assembler-x87.h',
],
}],
['v8_target_arch=="mips" or v8_target_arch=="mipsel"', {
'sources': [ ### gcmole(arch:mipsel) ###
'mips/assembler-mips.cc',
'mips/assembler-mips.h',
'mips/assembler-mips-inl.h',
'mips/builtins-mips.cc',
'mips/codegen-mips.cc',
'mips/codegen-mips.h',
'mips/code-stubs-mips.cc',
'mips/code-stubs-mips.h',
'mips/constants-mips.cc',
'mips/constants-mips.h',
'mips/cpu-mips.cc',
'mips/deoptimizer-mips.cc',
'mips/disasm-mips.cc',
'mips/frames-mips.cc',
'mips/frames-mips.h',
'mips/interface-descriptors-mips.cc',
'mips/macro-assembler-mips.cc',
'mips/macro-assembler-mips.h',
'mips/simulator-mips.cc',
'mips/simulator-mips.h',
'compiler/mips/code-generator-mips.cc',
'compiler/mips/instruction-codes-mips.h',
'compiler/mips/instruction-scheduler-mips.cc',
'compiler/mips/instruction-selector-mips.cc',
'crankshaft/mips/lithium-codegen-mips.cc',
'crankshaft/mips/lithium-codegen-mips.h',
'crankshaft/mips/lithium-gap-resolver-mips.cc',
'crankshaft/mips/lithium-gap-resolver-mips.h',
'crankshaft/mips/lithium-mips.cc',
'crankshaft/mips/lithium-mips.h',
'full-codegen/mips/full-codegen-mips.cc',
'debug/mips/debug-mips.cc',
'ic/mips/access-compiler-mips.cc',
'ic/mips/handler-compiler-mips.cc',
'ic/mips/ic-mips.cc',
'ic/mips/ic-compiler-mips.cc',
'ic/mips/stub-cache-mips.cc',
'regexp/mips/regexp-macro-assembler-mips.cc',
'regexp/mips/regexp-macro-assembler-mips.h',
],
}],
['v8_target_arch=="mips64" or v8_target_arch=="mips64el"', {
'sources': [ ### gcmole(arch:mips64el) ###
'mips64/assembler-mips64.cc',
'mips64/assembler-mips64.h',
'mips64/assembler-mips64-inl.h',
'mips64/builtins-mips64.cc',
'mips64/codegen-mips64.cc',
'mips64/codegen-mips64.h',
'mips64/code-stubs-mips64.cc',
'mips64/code-stubs-mips64.h',
'mips64/constants-mips64.cc',
'mips64/constants-mips64.h',
'mips64/cpu-mips64.cc',
'mips64/deoptimizer-mips64.cc',
'mips64/disasm-mips64.cc',
'mips64/frames-mips64.cc',
'mips64/frames-mips64.h',
'mips64/interface-descriptors-mips64.cc',
'mips64/macro-assembler-mips64.cc',
'mips64/macro-assembler-mips64.h',
'mips64/simulator-mips64.cc',
'mips64/simulator-mips64.h',
'compiler/mips64/code-generator-mips64.cc',
'compiler/mips64/instruction-codes-mips64.h',
'compiler/mips64/instruction-scheduler-mips64.cc',
'compiler/mips64/instruction-selector-mips64.cc',
'crankshaft/mips64/lithium-codegen-mips64.cc',
'crankshaft/mips64/lithium-codegen-mips64.h',
'crankshaft/mips64/lithium-gap-resolver-mips64.cc',
'crankshaft/mips64/lithium-gap-resolver-mips64.h',
'crankshaft/mips64/lithium-mips64.cc',
'crankshaft/mips64/lithium-mips64.h',
'debug/mips64/debug-mips64.cc',
'full-codegen/mips64/full-codegen-mips64.cc',
'ic/mips64/access-compiler-mips64.cc',
'ic/mips64/handler-compiler-mips64.cc',
'ic/mips64/ic-mips64.cc',
'ic/mips64/ic-compiler-mips64.cc',
'ic/mips64/stub-cache-mips64.cc',
'regexp/mips64/regexp-macro-assembler-mips64.cc',
'regexp/mips64/regexp-macro-assembler-mips64.h',
],
}],
['v8_target_arch=="x64" or v8_target_arch=="x32"', {
'sources': [ ### gcmole(arch:x64) ###
'crankshaft/x64/lithium-codegen-x64.cc',
'crankshaft/x64/lithium-codegen-x64.h',
'crankshaft/x64/lithium-gap-resolver-x64.cc',
'crankshaft/x64/lithium-gap-resolver-x64.h',
'crankshaft/x64/lithium-x64.cc',
'crankshaft/x64/lithium-x64.h',
'x64/assembler-x64-inl.h',
'x64/assembler-x64.cc',
'x64/assembler-x64.h',
'x64/builtins-x64.cc',
'x64/code-stubs-x64.cc',
'x64/code-stubs-x64.h',
'x64/codegen-x64.cc',
'x64/codegen-x64.h',
'x64/cpu-x64.cc',
'x64/deoptimizer-x64.cc',
'x64/disasm-x64.cc',
'x64/frames-x64.cc',
'x64/frames-x64.h',
'x64/interface-descriptors-x64.cc',
'x64/macro-assembler-x64.cc',
'x64/macro-assembler-x64.h',
'debug/x64/debug-x64.cc',
'full-codegen/x64/full-codegen-x64.cc',
'ic/x64/access-compiler-x64.cc',
'ic/x64/handler-compiler-x64.cc',
'ic/x64/ic-x64.cc',
'ic/x64/ic-compiler-x64.cc',
'ic/x64/stub-cache-x64.cc',
'regexp/x64/regexp-macro-assembler-x64.cc',
'regexp/x64/regexp-macro-assembler-x64.h',
],
}],
['v8_target_arch=="x64"', {
'sources': [
'compiler/x64/code-generator-x64.cc',
'compiler/x64/instruction-codes-x64.h',
'compiler/x64/instruction-scheduler-x64.cc',
'compiler/x64/instruction-selector-x64.cc',
],
}],
['v8_target_arch=="ppc" or v8_target_arch=="ppc64"', {
'sources': [ ### gcmole(arch:ppc) ###
'compiler/ppc/code-generator-ppc.cc',
'compiler/ppc/instruction-codes-ppc.h',
'compiler/ppc/instruction-scheduler-ppc.cc',
'compiler/ppc/instruction-selector-ppc.cc',
'crankshaft/ppc/lithium-ppc.cc',
'crankshaft/ppc/lithium-ppc.h',
'crankshaft/ppc/lithium-codegen-ppc.cc',
'crankshaft/ppc/lithium-codegen-ppc.h',
'crankshaft/ppc/lithium-gap-resolver-ppc.cc',
'crankshaft/ppc/lithium-gap-resolver-ppc.h',
'debug/ppc/debug-ppc.cc',
'full-codegen/ppc/full-codegen-ppc.cc',
'ic/ppc/access-compiler-ppc.cc',
'ic/ppc/handler-compiler-ppc.cc',
'ic/ppc/ic-ppc.cc',
'ic/ppc/ic-compiler-ppc.cc',
'ic/ppc/stub-cache-ppc.cc',
'ppc/assembler-ppc-inl.h',
'ppc/assembler-ppc.cc',
'ppc/assembler-ppc.h',
'ppc/builtins-ppc.cc',
'ppc/code-stubs-ppc.cc',
'ppc/code-stubs-ppc.h',
'ppc/codegen-ppc.cc',
'ppc/codegen-ppc.h',
'ppc/constants-ppc.h',
'ppc/constants-ppc.cc',
'ppc/cpu-ppc.cc',
'ppc/deoptimizer-ppc.cc',
'ppc/disasm-ppc.cc',
'ppc/frames-ppc.cc',
'ppc/frames-ppc.h',
'ppc/interface-descriptors-ppc.cc',
'ppc/macro-assembler-ppc.cc',
'ppc/macro-assembler-ppc.h',
'ppc/simulator-ppc.cc',
'ppc/simulator-ppc.h',
'regexp/ppc/regexp-macro-assembler-ppc.cc',
'regexp/ppc/regexp-macro-assembler-ppc.h',
],
}],
['v8_target_arch=="s390" or v8_target_arch=="s390x"', {
'sources': [ ### gcmole(arch:s390) ###
'compiler/s390/code-generator-s390.cc',
'compiler/s390/instruction-codes-s390.h',
'compiler/s390/instruction-scheduler-s390.cc',
'compiler/s390/instruction-selector-s390.cc',
'crankshaft/s390/lithium-codegen-s390.cc',
'crankshaft/s390/lithium-codegen-s390.h',
'crankshaft/s390/lithium-gap-resolver-s390.cc',
'crankshaft/s390/lithium-gap-resolver-s390.h',
'crankshaft/s390/lithium-s390.cc',
'crankshaft/s390/lithium-s390.h',
'debug/s390/debug-s390.cc',
'full-codegen/s390/full-codegen-s390.cc',
'ic/s390/access-compiler-s390.cc',
'ic/s390/handler-compiler-s390.cc',
'ic/s390/ic-compiler-s390.cc',
'ic/s390/ic-s390.cc',
'ic/s390/stub-cache-s390.cc',
'regexp/s390/regexp-macro-assembler-s390.cc',
'regexp/s390/regexp-macro-assembler-s390.h',
's390/assembler-s390.cc',
's390/assembler-s390.h',
's390/assembler-s390-inl.h',
's390/builtins-s390.cc',
's390/codegen-s390.cc',
's390/codegen-s390.h',
's390/code-stubs-s390.cc',
's390/code-stubs-s390.h',
's390/constants-s390.cc',
's390/constants-s390.h',
's390/cpu-s390.cc',
's390/deoptimizer-s390.cc',
's390/disasm-s390.cc',
's390/frames-s390.cc',
's390/frames-s390.h',
's390/interface-descriptors-s390.cc',
's390/macro-assembler-s390.cc',
's390/macro-assembler-s390.h',
's390/simulator-s390.cc',
's390/simulator-s390.h',
],
}],
['OS=="win"', {
'variables': {
'gyp_generators': '<!(echo $GYP_GENERATORS)',
},
'msvs_disabled_warnings': [4351, 4355, 4800],
# When building Official, the .lib is too large and exceeds the 2G
# limit. This breaks it into multiple pieces to avoid the limit.
# See http://crbug.com/485155.
'msvs_shard': 4,
}],
['component=="shared_library"', {
'defines': [
'BUILDING_V8_SHARED',
'V8_SHARED',
],
}],
['v8_postmortem_support=="true"', {
'sources': [
'<(SHARED_INTERMEDIATE_DIR)/debug-support.cc',
]
}],
['v8_enable_i18n_support==1', {
'dependencies': [
'<(icu_gyp_path):icui18n',
'<(icu_gyp_path):icuuc',
]
}, { # v8_enable_i18n_support==0
'sources!': [
'i18n.cc',
'i18n.h',
],
}],
['OS=="win" and v8_enable_i18n_support==1', {
'dependencies': [
'<(icu_gyp_path):icudata',
],
}],
['icu_use_data_file_flag==1', {
'defines': ['ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_FILE'],
}, { # else icu_use_data_file_flag !=1
'conditions': [
['OS=="win"', {
'defines': ['ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_SHARED'],
}, {
'defines': ['ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_STATIC'],
}],
],
}],
],
},
{
'target_name': 'v8_libbase',
'type': 'static_library',
'variables': {
'optimize': 'max',
},
'include_dirs+': [
'..',
],
'sources': [
'base/accounting-allocator.cc',
'base/accounting-allocator.h',
'base/adapters.h',
'base/atomicops.h',
'base/atomicops_internals_arm64_gcc.h',
'base/atomicops_internals_arm_gcc.h',
'base/atomicops_internals_atomicword_compat.h',
'base/atomicops_internals_mac.h',
'base/atomicops_internals_mips_gcc.h',
'base/atomicops_internals_mips64_gcc.h',
'base/atomicops_internals_portable.h',
'base/atomicops_internals_ppc_gcc.h',
'base/atomicops_internals_s390_gcc.h',
'base/atomicops_internals_tsan.h',
'base/atomicops_internals_x86_gcc.cc',
'base/atomicops_internals_x86_gcc.h',
'base/atomicops_internals_x86_msvc.h',
'base/bits.cc',
'base/bits.h',
'base/build_config.h',
'base/compiler-specific.h',
'base/cpu.cc',
'base/cpu.h',
'base/division-by-constant.cc',
'base/division-by-constant.h',
'base/flags.h',
'base/format-macros.h',
'base/functional.cc',
'base/functional.h',
'base/iterator.h',
'base/lazy-instance.h',
'base/logging.cc',
'base/logging.h',
'base/macros.h',
'base/once.cc',
'base/once.h',
'base/platform/elapsed-timer.h',
'base/platform/time.cc',
'base/platform/time.h',
'base/platform/condition-variable.cc',
'base/platform/condition-variable.h',
'base/platform/mutex.cc',
'base/platform/mutex.h',
'base/platform/platform.h',
'base/platform/semaphore.cc',
'base/platform/semaphore.h',
'base/safe_conversions.h',
'base/safe_conversions_impl.h',
'base/safe_math.h',
'base/safe_math_impl.h',
'base/smart-pointers.h',
'base/sys-info.cc',
'base/sys-info.h',
'base/utils/random-number-generator.cc',
'base/utils/random-number-generator.h',
],
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
}, {
'toolsets': ['target'],
}],
['OS=="linux"', {
'conditions': [
['nacl_target_arch=="none"', {
'link_settings': {
'libraries': [
'-ldl',
'-lrt'
],
},
}, {
'defines': [
'V8_LIBRT_NOT_AVAILABLE=1',
],
}],
],
'sources': [
'base/platform/platform-linux.cc',
'base/platform/platform-posix.cc'
],
}
],
['OS=="android"', {
'sources': [
'base/platform/platform-posix.cc'
],
'link_settings': {
'target_conditions': [
['_toolset=="host" and host_os!="mac"', {
# Only include libdl and librt on host builds because they
# are included by default on Android target builds, and we
# don't want to re-include them here since this will change
# library order and break (see crbug.com/469973).
# These libraries do not exist on Mac hosted builds.
'libraries': [
'-ldl',
'-lrt'
]
}]
]
},
'conditions': [
['host_os=="mac"', {
'target_conditions': [
['_toolset=="host"', {
'sources': [
'base/platform/platform-macos.cc'
]
}, {
'sources': [
'base/platform/platform-linux.cc'
]
}],
],
}, {
'sources': [
'base/platform/platform-linux.cc'
]
}],
],
},
],
['OS=="qnx"', {
'link_settings': {
'target_conditions': [
['_toolset=="host" and host_os=="linux"', {
'libraries': [
'-lrt'
],
}],
['_toolset=="target"', {
'libraries': [
'-lbacktrace'
],
}],
],
},
'sources': [
'base/platform/platform-posix.cc',
'base/qnx-math.h',
],
'target_conditions': [
['_toolset=="host" and host_os=="linux"', {
'sources': [
'base/platform/platform-linux.cc'
],
}],
['_toolset=="host" and host_os=="mac"', {
'sources': [
'base/platform/platform-macos.cc'
],
}],
['_toolset=="target"', {
'sources': [
'base/platform/platform-qnx.cc'
],
}],
],
},
],
['OS=="freebsd"', {
'link_settings': {
'libraries': [
'-L/usr/local/lib -lexecinfo',
]},
'sources': [
'base/platform/platform-freebsd.cc',
'base/platform/platform-posix.cc'
],
}
],
['OS=="openbsd"', {
'link_settings': {
'libraries': [
'-L/usr/local/lib -lexecinfo',
]},
'sources': [
'base/platform/platform-openbsd.cc',
'base/platform/platform-posix.cc'
],
}
],
['OS=="netbsd"', {
'link_settings': {
'libraries': [
'-L/usr/pkg/lib -Wl,-R/usr/pkg/lib -lexecinfo',
]},
'sources': [
'base/platform/platform-openbsd.cc',
'base/platform/platform-posix.cc'
],
}
],
['OS=="aix"', {
'sources': [
'base/platform/platform-aix.cc',
'base/platform/platform-posix.cc'
]},
],
['OS=="solaris"', {
'link_settings': {
'libraries': [
'-lnsl -lrt',
]},
'sources': [
'base/platform/platform-solaris.cc',
'base/platform/platform-posix.cc'
],
}
],
['OS=="mac"', {
'sources': [
'base/platform/platform-macos.cc',
'base/platform/platform-posix.cc'
]},
],
['OS=="win"', {
'defines': [
'_CRT_RAND_S' # for rand_s()
],
'variables': {
'gyp_generators': '<!(echo $GYP_GENERATORS)',
},
'conditions': [
['gyp_generators=="make"', {
'variables': {
'build_env': '<!(uname -o)',
},
'conditions': [
['build_env=="Cygwin"', {
'sources': [
'base/platform/platform-cygwin.cc',
'base/platform/platform-posix.cc'
],
}, {
'sources': [
'base/platform/platform-win32.cc',
'base/win32-headers.h',
],
}],
],
'link_settings': {
'libraries': [ '-lwinmm', '-lws2_32' ],
},
}, {
'sources': [
'base/platform/platform-win32.cc',
'base/win32-headers.h',
],
'msvs_disabled_warnings': [4351, 4355, 4800],
'link_settings': {
'libraries': [ '-lwinmm.lib', '-lws2_32.lib' ],
},
}],
],
}],
],
},
{
'target_name': 'v8_libplatform',
'type': 'static_library',
'variables': {
'optimize': 'max',
},
'dependencies': [
'v8_libbase',
],
'include_dirs+': [
'..',
],
'sources': [
'../include/libplatform/libplatform.h',
'libplatform/default-platform.cc',
'libplatform/default-platform.h',
'libplatform/task-queue.cc',
'libplatform/task-queue.h',
'libplatform/worker-thread.cc',
'libplatform/worker-thread.h',
],
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
}, {
'toolsets': ['target'],
}],
],
'direct_dependent_settings': {
'include_dirs': [
'../include',
],
},
},
{
'target_name': 'natives_blob',
'type': 'none',
'conditions': [
[ 'v8_use_external_startup_data==1', {
'conditions': [
['want_separate_host_toolset==1', {
'dependencies': ['js2c#host'],
}, {
'dependencies': ['js2c'],
}],
],
'actions': [{
'action_name': 'concatenate_natives_blob',
'inputs': [
'../tools/concatenate-files.py',
'<(SHARED_INTERMEDIATE_DIR)/libraries.bin',
'<(SHARED_INTERMEDIATE_DIR)/libraries-experimental.bin',
'<(SHARED_INTERMEDIATE_DIR)/libraries-extras.bin',
'<(SHARED_INTERMEDIATE_DIR)/libraries-experimental-extras.bin',
],
'conditions': [
['want_separate_host_toolset==1', {
'target_conditions': [
['_toolset=="host"', {
'outputs': [
'<(PRODUCT_DIR)/natives_blob_host.bin',
],
'action': [
'python', '<@(_inputs)', '<(PRODUCT_DIR)/natives_blob_host.bin'
],
}, {
'outputs': [
'<(PRODUCT_DIR)/natives_blob.bin',
],
'action': [
'python', '<@(_inputs)', '<(PRODUCT_DIR)/natives_blob.bin'
],
}],
],
}, {
'outputs': [
'<(PRODUCT_DIR)/natives_blob.bin',
],
'action': [
'python', '<@(_inputs)', '<(PRODUCT_DIR)/natives_blob.bin'
],
}],
],
}],
}],
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
}, {
'toolsets': ['target'],
}],
]
},
{
'target_name': 'js2c',
'type': 'none',
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host'],
}, {
'toolsets': ['target'],
}],
['v8_enable_i18n_support==1', {
'variables': {
'i18n_library_files': [
'js/i18n.js',
],
},
}, {
'variables': {
'i18n_library_files': [],
},
}],
],
'variables': {
'library_files': [
'js/macros.py',
'messages.h',
'js/prologue.js',
'js/runtime.js',
'js/v8natives.js',
'js/symbol.js',
'js/array.js',
'js/string.js',
'js/uri.js',
'js/math.js',
'third_party/fdlibm/fdlibm.js',
'js/regexp.js',
'js/arraybuffer.js',
'js/typedarray.js',
'js/iterator-prototype.js',
'js/collection.js',
'js/weak-collection.js',
'js/collection-iterator.js',
'js/promise.js',
'js/messages.js',
'js/json.js',
'js/array-iterator.js',
'js/string-iterator.js',
'js/templates.js',
'js/spread.js',
'js/proxy.js',
'debug/mirrors.js',
'debug/debug.js',
'debug/liveedit.js',
],
'experimental_library_files': [
'js/macros.py',
'messages.h',
'js/harmony-atomics.js',
'js/harmony-regexp-exec.js',
'js/harmony-sharedarraybuffer.js',
'js/harmony-simd.js',
'js/harmony-species.js',
'js/harmony-unicode-regexps.js',
'js/harmony-string-padding.js',
'js/promise-extra.js',
],
'libraries_bin_file': '<(SHARED_INTERMEDIATE_DIR)/libraries.bin',
'libraries_experimental_bin_file': '<(SHARED_INTERMEDIATE_DIR)/libraries-experimental.bin',
'libraries_extras_bin_file': '<(SHARED_INTERMEDIATE_DIR)/libraries-extras.bin',
'libraries_experimental_extras_bin_file': '<(SHARED_INTERMEDIATE_DIR)/libraries-experimental-extras.bin',
},
'actions': [
{
'action_name': 'js2c',
'inputs': [
'../tools/js2c.py',
'<@(library_files)',
'<@(i18n_library_files)'
],
'outputs': ['<(SHARED_INTERMEDIATE_DIR)/libraries.cc'],
'action': [
'python',
'../tools/js2c.py',
'<(SHARED_INTERMEDIATE_DIR)/libraries.cc',
'CORE',
'<@(library_files)',
'<@(i18n_library_files)'
],
},
{
'action_name': 'js2c_bin',
'inputs': [
'../tools/js2c.py',
'<@(library_files)',
'<@(i18n_library_files)'
],
'outputs': ['<@(libraries_bin_file)'],
'action': [
'python',
'../tools/js2c.py',
'<(SHARED_INTERMEDIATE_DIR)/libraries.cc',
'CORE',
'<@(library_files)',
'<@(i18n_library_files)',
'--startup_blob', '<@(libraries_bin_file)',
'--nojs',
],
},
{
'action_name': 'js2c_experimental',
'inputs': [
'../tools/js2c.py',
'<@(experimental_library_files)',
],
'outputs': ['<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc'],
'action': [
'python',
'../tools/js2c.py',
'<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc',
'EXPERIMENTAL',
'<@(experimental_library_files)'
],
},
{
'action_name': 'js2c_experimental_bin',
'inputs': [
'../tools/js2c.py',
'<@(experimental_library_files)',
],
'outputs': ['<@(libraries_experimental_bin_file)'],
'action': [
'python',
'../tools/js2c.py',
'<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc',
'EXPERIMENTAL',
'<@(experimental_library_files)',
'--startup_blob', '<@(libraries_experimental_bin_file)',
'--nojs',
],
},
{
'action_name': 'js2c_extras',
'inputs': [
'../tools/js2c.py',
'<@(v8_extra_library_files)',
],
'outputs': ['<(SHARED_INTERMEDIATE_DIR)/extras-libraries.cc'],
'action': [
'python',
'../tools/js2c.py',
'<(SHARED_INTERMEDIATE_DIR)/extras-libraries.cc',
'EXTRAS',
'<@(v8_extra_library_files)',
],
},
{
'action_name': 'js2c_extras_bin',
'inputs': [
'../tools/js2c.py',
'<@(v8_extra_library_files)',
],
'outputs': ['<@(libraries_extras_bin_file)'],
'action': [
'python',
'../tools/js2c.py',
'<(SHARED_INTERMEDIATE_DIR)/extras-libraries.cc',
'EXTRAS',
'<@(v8_extra_library_files)',
'--startup_blob', '<@(libraries_extras_bin_file)',
'--nojs',
],
},
{
'action_name': 'js2c_experimental_extras',
'inputs': [
'../tools/js2c.py',
'<@(v8_experimental_extra_library_files)',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/experimental-extras-libraries.cc',
],
'action': [
'python',
'../tools/js2c.py',
'<(SHARED_INTERMEDIATE_DIR)/experimental-extras-libraries.cc',
'EXPERIMENTAL_EXTRAS',
'<@(v8_experimental_extra_library_files)',
],
},
{
'action_name': 'js2c_experimental_extras_bin',
'inputs': [
'../tools/js2c.py',
'<@(v8_experimental_extra_library_files)',
],
'outputs': ['<@(libraries_experimental_extras_bin_file)'],
'action': [
'python',
'../tools/js2c.py',
'<(SHARED_INTERMEDIATE_DIR)/experimental-extras-libraries.cc',
'EXPERIMENTAL_EXTRAS',
'<@(v8_experimental_extra_library_files)',
'--startup_blob', '<@(libraries_experimental_extras_bin_file)',
'--nojs',
],
},
],
},
{
'target_name': 'postmortem-metadata',
'type': 'none',
'variables': {
'heapobject_files': [
'objects.h',
'objects-inl.h',
],
},
'actions': [
{
'action_name': 'gen-postmortem-metadata',
'inputs': [
'../tools/gen-postmortem-metadata.py',
'<@(heapobject_files)',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/debug-support.cc',
],
'action': [
'python',
'../tools/gen-postmortem-metadata.py',
'<@(_outputs)',
'<@(heapobject_files)'
]
}
]
},
{
'target_name': 'mksnapshot',
'type': 'executable',
'dependencies': ['v8_base', 'v8_nosnapshot', 'v8_libplatform'],
'include_dirs+': [
'..',
],
'sources': [
'snapshot/mksnapshot.cc',
],
'conditions': [
['v8_enable_i18n_support==1', {
'dependencies': [
'<(icu_gyp_path):icui18n',
'<(icu_gyp_path):icuuc',
]
}],
['want_separate_host_toolset==1', {
'toolsets': ['host'],
}, {
'toolsets': ['target'],
}],
],
},
],
}
|
[
"commit-bot@chromium.org"
] |
commit-bot@chromium.org
|
399ed94f93338a15edd22605ebee69e2a38d265c
|
4ff6e4c835eaad152e79d1e8f4477f3641daadcc
|
/scripts/lbm/k_torr.py
|
da62aa3698818a05f636799fb0edfd0a577dfefe
|
[] |
no_license
|
jtvanlew/vanlew-ucla-phd-dissertation
|
eb00d164127a495d198f45826353b6335991974a
|
6d99f803a6fb772fa8c9da0fdea18c77919f2bd3
|
refs/heads/master
| 2021-04-30T22:39:47.597365
| 2016-08-30T23:15:24
| 2016-08-30T23:15:24
| 27,930,030
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 124
|
py
|
ks = 2.4
kf = 0.34
kappa = ks/kf
phi = 0.64
k_torr = kf * (kappa -1)*( phi / (phi**0.5 + kappa*(1-phi**0.5)) )
print(k_torr)
|
[
"jonvanlew@gmail.com"
] |
jonvanlew@gmail.com
|
7216bffe4596549755b96a71080cb7e480e83c65
|
1f60ca0673ea1b74dd9a74862e023bf0ac705e08
|
/evaluations/control_model.py
|
d4c7f0c35c0c4a74d986090b191cc13d9d3c0bb6
|
[] |
no_license
|
codeslord/supervise-thyself
|
17c9e00ea71a3d7a90a73914a9098f4b504674dd
|
7c40f1ad9e1425c5f85c2aa123cc38095b61e0ce
|
refs/heads/master
| 2020-07-22T18:50:25.178724
| 2019-09-05T00:05:01
| 2019-09-05T00:05:01
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,620
|
py
|
import torch
from torch import nn
import cma
import gym
import numpy as np
class ControlModel(nn.Module):
def __init__(self,encoder, parameters=None, num_actions=3, **kwargs):
super(ControlModel,self).__init__()
self.encoder = encoder
self.num_actions = num_actions
self.fc = nn.Linear(in_features=encoder.embed_len,out_features=num_actions)
if parameters is not None:
weight_len = np.prod(self.fc.weight.size())
weight = parameters[:weight_len]
weight = np.resize(weight,self.fc.weight.size())
bias = parameters[weight_len:]
bias = np.resize(bias,self.fc.bias.size())
self.fc.weight.requires_grad_(False)
self.fc.bias.requires_grad_(False)
self.fc.weight.set_(torch.from_numpy(weight).float())
self.fc.bias.set_(torch.from_numpy(bias).float())
# def posprocess_output(self,raw_output):
# raw_steer, raw_gas, raw_brake = raw_output[0],raw_output[1],raw_output[2]
# steer = F.tanh(raw_steer) # between -1 and 1
# gas = F.softplus(raw_gas) # between 0 and 1
# brake = F.softplus(raw_brake) # between 0 and 1
# action = torch.cat((steer,gas,brake))
# return action
def postprocess_output(self, raw_output):
action = torch.argmax(raw_output)
return action
def forward(self,x):
z = self.encoder(x)
z = z.detach()
raw_output = self.fc(z)
action = self.postprocess_output(raw_output)
return action
|
[
"ejracah@gmail.com"
] |
ejracah@gmail.com
|
e042cf44f85853e4a46cd97a8a9ab2065be61038
|
c366f5d83f0b958c405de688af5de3c29d46bca9
|
/src/raid_coordinator/_version.py
|
5f5a7e95ae1f4a6a2365de81e29b60e9f25a849c
|
[
"LicenseRef-scancode-public-domain",
"Unlicense"
] |
permissive
|
spoofproof/discord-raid-coordinator
|
16b9168e5dccf73dd08bcbcddafbbe94ff504f21
|
18e31e31dbdf84373146d8d90247f50eaf5e793a
|
refs/heads/master
| 2020-03-23T23:00:24.555648
| 2018-03-04T20:51:41
| 2018-03-04T20:51:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 18,459
|
py
|
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = ""
cfg.parentdir_prefix = "''"
cfg.versionfile_source = "src/raid_coordinator/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}
|
[
"github@daniel.desousa.cc"
] |
github@daniel.desousa.cc
|
3c1e628805d5f62f8306bf0d7df527a0e6a3d473
|
8420ff28c4cb72085e87e23112de63f114af64b5
|
/reverse_map.py
|
52ef20462dc870758315b86ca0b68f279bbfab42
|
[] |
no_license
|
BnetButter/rolla-quest-3d
|
783bce935fa04808918a2ba11e63a607a939489e
|
6f5bc043dfb94bcc311271660abc2badc463f8bb
|
refs/heads/master
| 2023-07-03T12:28:42.577164
| 2021-08-04T00:51:13
| 2021-08-04T00:51:13
| 392,497,508
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 259
|
py
|
lines = []
with open("maps/mst_campus.txt", "r") as fp:
data = fp.read().split("\n")
for string in data:
string = string[::-1]
lines.append(string)
with open("maps/reversed_mst_campus.txt", "w") as fp:
fp.write("\n".join(lines))
|
[
"zlnh4@umsystem.edu"
] |
zlnh4@umsystem.edu
|
5d0bb8fe302eb7ee3a4f6a38835b1ead2cbd977e
|
026261a974e20bedd33b40b97a52300b928936f0
|
/typeidea/typeidea/custom_site.py
|
74011a69a2c6727e98ab040c4ebd3f7652f79d4f
|
[] |
no_license
|
Desperoid/typeidea
|
3acaa5ad31360ef579ce6c31d6abe87787be1b71
|
e0d427002625b2200ac69c7643add511e4ec4eec
|
refs/heads/master
| 2020-05-28T03:14:13.661456
| 2019-06-18T16:31:10
| 2019-06-18T16:31:10
| 188,864,250
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 217
|
py
|
from django.contrib.admin import AdminSite
class CustomSite(AdminSite):
site_header = 'Typeidea'
site_title = 'Typeidea后台管理系统'
index_title ='首页'
custom_site = CustomSite(name='cus_admin')
|
[
"623016526@qq.com"
] |
623016526@qq.com
|
e087d3ba04828b183b3d9de2207edc6af01079bb
|
35b07f594de1bc52168ed5b54360150acdd1d9e6
|
/sudokuVisualController/SudokuVisualController.py
|
048d0cfea36f38f0d1e11f2ca13dbe71bc089f84
|
[] |
no_license
|
Admi99/SudokuSolver
|
3ba1c1623601c77f0ea1771aa47291f5e7ed0e2c
|
8bb3025d4a83e8a61f0cc6e03483d4ff01bc8f98
|
refs/heads/master
| 2022-04-07T12:57:03.774346
| 2020-03-11T14:37:14
| 2020-03-11T14:37:14
| 242,588,475
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,054
|
py
|
from sudokuSolver.SudokuOperationProvider import SudokuOperationProvider
from sudokuSolver.SudokuDifficultyEnum import SudokuDifficultyEnum
from graphics import *
from sudokuVisualController.Button import Button
def main():
difficulty = [SudokuDifficultyEnum.pathetic,
SudokuDifficultyEnum.easy,
SudokuDifficultyEnum.medium,
SudokuDifficultyEnum.hard,
SudokuDifficultyEnum.evil]
sudoku = SudokuOperationProvider()
sudoku.generateSudoku(difficulty[0], 100, 2)
gridLenght = 9
win = GraphWin("Sudoku", 500, 550)
win.setBackground("azure1")
btnCheck = Button(win, Point(67, 515), 70, 40, "Check")
btnReset = Button(win, Point(150, 515), 70, 40, "Reset")
btnGen = Button(win, Point(234, 515), 70, 40, "Generate")
btnDiff = Button(win, Point(323, 515), 80, 40, "Difficulty")
btnCheck.activate()
btnReset.activate()
btnGen.activate()
btnDiff.activate()
diffText = Text(Point(415, 515), difficulty[0].name)
diffText.setStyle("bold")
diffText.draw(win)
setHorLinesForGrid(win)
setVerLinesForGrid(win)
entryText = []
labelText = []
generateGraphics(win, sudoku, entryText,labelText, gridLenght)
difficultyIndex = 0
while True:
sudokuOrigin = sudoku.getSudokuGridDeepCopy()
clickPoint = win.getMouse()
if btnCheck.clicked(clickPoint):
if not fillSudokuFromInput(sudokuOrigin, entryText):
continue
if sudoku.checkIfSolved(sudokuOrigin):
win.setBackground("Green")
else:
win.setBackground("Red")
time.sleep(1)
win.setBackground("azure1")
if btnReset.clicked(clickPoint):
cleareEntryPoints(entryText)
win.setBackground("azure1")
if btnGen.clicked(clickPoint):
sudoku.generateSudoku(difficulty[difficultyIndex], 100, 2)
clearChangingGraphics(win, labelText, entryText)
win.setBackground("azure1")
generateGraphics(win, sudoku, entryText,labelText, gridLenght)
if btnDiff.clicked(clickPoint):
if difficultyIndex <= 3:
difficultyIndex += 1
else:
difficultyIndex = 0
diffText.setText(difficulty[difficultyIndex].name)
win.getMouse()
win.close()
def generateGraphics(win, sudoku, entryText,labelText, gridLength):
counterEntry = 0
counterLabel = 0
for x in range(1, gridLength + 1):
for y in range(1, gridLength + 1):
if sudoku.getSudokuGrid()[x - 1][y - 1] != 0:
labelText.append(Text(Point(y * 50, x * 50), sudoku.getSudokuGrid()[x - 1][y - 1]))
labelText[counterLabel].setTextColor("Blue")
labelText[counterLabel].setStyle("bold")
labelText[counterLabel].draw(win)
counterLabel += 1
else:
entryText.append(Entry(Point(y * 50, x * 50), 1))
entryText[counterEntry].draw(win)
counterEntry += 1
def clearChangingGraphics(win, labelText, entryText):
for item in labelText[:]:
item.undraw()
for item in entryText[:]:
item.undraw()
labelText.clear()
entryText.clear()
win.update()
def fillSudokuFromInput(sudoku, entryPoint):
counter = 0
for i in range(0, 9):
for j in range(0, 9):
if sudoku[i][j] == 0:
cislo = entryPoint[counter].getText()
if cislo == "" or not limitNumberEntryCount(cislo, 1):
return False
sudoku[i][j] = int(entryPoint[counter].getText())
counter += 1
return True
def limitNumberEntryCount(number, n):
return len(number) == n;
def cleareEntryPoints(entryPoint):
for i in range(0, len(entryPoint)):
entryPoint[i].setText("")
def setHorLinesForGrid(win):
line1Hor = Line(Point(30, 25), Point(468, 25))
line2Hor = Line(Point(30, 175), Point(468, 175))
line3Hor = Line(Point(30, 325), Point(468, 325))
line4Hor = Line(Point(30, 475), Point(468, 475))
line5Hor = Line(Point(30, 75), Point(468, 75))
line6Hor = Line(Point(30, 125), Point(468, 125))
line7Hor = Line(Point(30, 225), Point(468, 225))
line8Hor = Line(Point(30, 275), Point(468, 275))
line9Hor = Line(Point(30, 375), Point(468, 375))
line10Hor = Line(Point(30, 425), Point(468, 425))
line1Hor.setWidth(5)
line2Hor.setWidth(5)
line3Hor.setWidth(5)
line4Hor.setWidth(5)
line5Hor.setWidth(3)
line6Hor.setWidth(3)
line7Hor.setWidth(3)
line8Hor.setWidth(3)
line9Hor.setWidth(3)
line10Hor.setWidth(3)
line1Hor.draw(win)
line2Hor.draw(win)
line3Hor.draw(win)
line4Hor.draw(win)
line5Hor.draw(win)
line6Hor.draw(win)
line7Hor.draw(win)
line9Hor.draw(win)
line8Hor.draw(win)
line10Hor.draw(win)
def setVerLinesForGrid(win):
line1Ver = Line(Point(32, 25), Point(32, 475))
line2Ver = Line(Point(175, 25), Point(175, 475))
line3Ver = Line(Point(325, 25), Point(325, 475))
line4Ver = Line(Point(465, 25), Point(465, 475))
line5Ver = Line(Point(75, 25), Point(75, 475))
line6Ver = Line(Point(122, 25), Point(122, 475))
line7Ver = Line(Point(223, 25), Point(223, 475))
line8Ver = Line(Point(275, 25), Point(275, 475))
line9Ver = Line(Point(375, 25), Point(375, 475))
line10Ver = Line(Point(426, 25), Point(426, 475))
line1Ver.setWidth(5)
line2Ver.setWidth(5)
line3Ver.setWidth(5)
line4Ver.setWidth(5)
line5Ver.setWidth(3)
line6Ver.setWidth(3)
line7Ver.setWidth(3)
line8Ver.setWidth(3)
line9Ver.setWidth(3)
line10Ver.setWidth(3)
line1Ver.draw(win)
line2Ver.draw(win)
line3Ver.draw(win)
line4Ver.draw(win)
line5Ver.draw(win)
line6Ver.draw(win)
line7Ver.draw(win)
line8Ver.draw(win)
line9Ver.draw(win)
line10Ver.draw(win)
main()
|
[
"a_michalek@utb.cz"
] |
a_michalek@utb.cz
|
271c63e30c938dbc27f78a8307a0c62af1b5bd44
|
e152344168d3ff2765696354d020a41ae00c8b17
|
/Tree/check_symmetric_tree.py
|
25a6e423a8abeaae752c0d33b7678794d03b2c32
|
[] |
no_license
|
Sasha2508/Python-Codes
|
298e176aee2ff4a737676b62e02bdf6df9063ced
|
d0da8ecd6d6045be29fe449011e8c69c2882cac3
|
refs/heads/master
| 2023-04-29T23:28:24.383711
| 2021-05-10T18:00:12
| 2021-05-10T18:00:12
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,235
|
py
|
# return true/false denoting whether the tree is Symmetric or not
def condition(root1, root2):
if root1 == root2 is None:
return True
if root1 is not None and root2 is not None:
if root1.data == root2.data:
return (condition(root1.left, root2.right) and condition(root1.right, root2.left))
return False
def isSymmetric(root):
return condition(root, root)
# {
# Driver Code Starts
# Initial Template for Python 3
# Contributed by Sudarshan Sharma
from collections import deque
# Tree Node
class Node:
def __init__(self, val):
self.right = None
self.data = val
self.left = None
# Function to Build Tree
def buildTree(s):
# Corner Case
if(len(s)==0 or s[0]=="N"):
return None
# Creating list of strings from input
# string after spliting by space
ip =list(map(str ,s.split()))
# Create the root of the tree
root =Node(int(ip[0]))
size =0
q=deque()
# Push the root to the queue
q.append(root)
size=size +1
# Starting from the second element
i=1
while(size>0 and i<len(ip)):
# Get and remove the front of the queue
currNode=q[0 ]
q.popleft()
size=size -1
# Get the current node's value from the string
currVal=ip[ i ]
# If the left child is not null
if(currVal!="N ") :
# Create the left child for the current node
currNode.left=Node (int(currVal))
# Push it to the queue
q.append(currNode.left)
size=size +1
# For the right child
i=i+1
if(i>= len(ip)):
break
currVal=ip[ i ]
# If the right child is not null
if(currVal!="N ") :
# Create the right child for the current node
currNode.right=Node(int(currVal))
# Push it to the queue
q.append(currNode.right)
size=size +1
i=i+1
return root
if __name__=="_ _m ain__":
t=int ( input())
for _ in range(0,t):
s=input()
root=buildTree(s)
if isSymmetric(root):
print("True")
else:
print("False")
# } Driver Code Ends
|
[
"tuhinmitra190221@gmail.com"
] |
tuhinmitra190221@gmail.com
|
fcfd4720b7337fbb7693d474198644705bf10bd6
|
77311ad9622a7d8b88707d7cee3f44de7c8860cb
|
/res/scripts/client/gui/shared/gui_items/tankman.py
|
9dea8175e40fc303f9677f51081a9961da83f88f
|
[] |
no_license
|
webiumsk/WOT-0.9.14-CT
|
9b193191505a4560df4e872e022eebf59308057e
|
cfe0b03e511d02c36ce185f308eb48f13ecc05ca
|
refs/heads/master
| 2021-01-10T02:14:10.830715
| 2016-02-14T11:59:59
| 2016-02-14T11:59:59
| 51,606,676
| 0
| 0
| null | null | null | null |
WINDOWS-1250
|
Python
| false
| false
| 16,364
|
py
|
# 2016.02.14 12:41:32 Střední Evropa (běžný čas)
# Embedded file name: scripts/client/gui/shared/gui_items/Tankman.py
from helpers import i18n
from items import tankmen, vehicles, ITEM_TYPE_NAMES
from gui import nationCompareByIndex
from gui.shared.utils.functions import getShortDescr
from gui.shared.gui_items import HasStrCD, GUIItem, ItemsCollection
class TankmenCollection(ItemsCollection):
def _filterItem(self, item, nation = None, role = None, isInTank = None):
if role is not None and item.descriptor.role != role:
return False
elif isInTank is not None and item.isInTank != isInTank:
return False
else:
return ItemsCollection._filterItem(self, item, nation)
class TankmenComparator(object):
def __init__(self, vehicleGetter = None):
self._vehicleGetter = vehicleGetter
def __call__(self, first, second):
if first is None or second is None:
return 1
else:
res = nationCompareByIndex(first.nationID, second.nationID)
if res:
return res
if first.isInTank and not second.isInTank:
return -1
if not first.isInTank and second.isInTank:
return 1
if first.isInTank and second.isInTank:
if self._vehicleGetter is not None:
tman1vehicle = self._vehicleGetter(first.vehicleInvID)
tman2vehicle = self._vehicleGetter(second.vehicleInvID)
if tman1vehicle is not None and tman2vehicle is not None:
res = tman1vehicle.__cmp__(tman2vehicle)
if res:
return res
TANKMEN_ROLES_ORDER = Tankman.TANKMEN_ROLES_ORDER
if TANKMEN_ROLES_ORDER[first.descriptor.role] < TANKMEN_ROLES_ORDER[second.descriptor.role]:
return -1
if TANKMEN_ROLES_ORDER[first.descriptor.role] > TANKMEN_ROLES_ORDER[second.descriptor.role]:
return 1
return cmp(first.lastUserName, second.lastUserName) or 1
class Tankman(GUIItem, HasStrCD):
class ROLES:
COMMANDER = 'commander'
RADIOMAN = 'radioman'
DRIVER = 'driver'
GUNNER = 'gunner'
LOADER = 'loader'
TANKMEN_ROLES_ORDER = {ROLES.COMMANDER: 0,
ROLES.GUNNER: 1,
ROLES.DRIVER: 2,
ROLES.RADIOMAN: 3,
ROLES.LOADER: 4}
def __init__(self, strCompactDescr, inventoryID = -1, vehicle = None, proxy = None):
GUIItem.__init__(self, proxy)
HasStrCD.__init__(self, strCompactDescr)
self.__descriptor = None
self.invID = inventoryID
self.nationID = self.descriptor.nationID
self.itemTypeID = vehicles._TANKMAN
self.itemTypeName = ITEM_TYPE_NAMES[self.itemTypeID]
self.combinedRoles = (self.descriptor.role,)
self.vehicleNativeDescr = vehicles.VehicleDescr(typeID=(self.nationID, self.descriptor.vehicleTypeID))
self.vehicleInvID = -1
self.vehicleDescr = None
self.vehicleBonuses = dict()
self.vehicleSlotIdx = -1
if vehicle is not None:
self.vehicleInvID = vehicle.invID
self.vehicleDescr = vehicle.descriptor
self.vehicleBonuses = dict(vehicle.bonuses)
self.vehicleSlotIdx = vehicle.crewIndices.get(inventoryID, -1)
crewRoles = self.vehicleDescr.type.crewRoles
if -1 < self.vehicleSlotIdx < len(crewRoles):
self.combinedRoles = crewRoles[self.vehicleSlotIdx]
self.skills = self._buildSkills(proxy)
self.skillsMap = self._buildSkillsMap()
if proxy is not None:
pass
self.__cmp__ = TankmenComparator()
return
def _buildSkills(self, proxy):
return [ TankmanSkill(skill, self, proxy) for skill in self.descriptor.skills ]
def _buildSkillsMap(self):
return dict([ (skill.name, skill) for skill in self.skills ])
@property
def realRoleLevel(self):
effRoleLevel = self.efficiencyRoleLevel
penalty = effRoleLevel - self.roleLevel
commBonus = self.vehicleBonuses.get('commander', 0)
if self.descriptor.role == self.ROLES.COMMANDER:
commBonus = 0
brothersBonus = self.vehicleBonuses.get('brotherhood', 0)
eqsBonus = self.vehicleBonuses.get('equipment', 0)
optDevsBonus = self.vehicleBonuses.get('optDevices', 0)
realRoleLevel = effRoleLevel + commBonus + brothersBonus + eqsBonus + optDevsBonus
return (realRoleLevel, (commBonus,
brothersBonus,
eqsBonus,
optDevsBonus,
penalty))
@property
def descriptor(self):
if self.__descriptor is None or self.__descriptor.dossierCompactDescr != self.strCompactDescr:
self.__descriptor = tankmen.TankmanDescr(compactDescr=self.strCompactDescr)
return self.__descriptor
@property
def isInTank(self):
return self.vehicleDescr is not None
@property
def roleLevel(self):
return self.descriptor.roleLevel
@property
def isFemale(self):
return self.descriptor.isFemale
@property
def icon(self):
return getIconName(self.nationID, self.descriptor.iconID)
@property
def iconRank(self):
return getRankIconName(self.nationID, self.descriptor.rankID)
@property
def iconRole(self):
return getRoleIconName(self.descriptor.role)
@property
def firstUserName(self):
return getFirstUserName(self.nationID, self.descriptor.firstNameID)
@property
def lastUserName(self):
return getLastUserName(self.nationID, self.descriptor.lastNameID)
@property
def fullUserName(self):
return getFullUserName(self.nationID, self.descriptor.firstNameID, self.descriptor.lastNameID)
@property
def rankUserName(self):
return getRankUserName(self.nationID, self.descriptor.rankID)
@property
def roleUserName(self):
return getRoleUserName(self.descriptor.role)
@property
def hasNewSkill(self):
return self.roleLevel == tankmen.MAX_SKILL_LEVEL and (self.descriptor.lastSkillLevel == tankmen.MAX_SKILL_LEVEL or not len(self.skills))
@property
def newSkillCount(self):
if self.hasNewSkill:
tmanDescr = tankmen.TankmanDescr(self.strCD)
i = 0
skills_list = list(tankmen.ACTIVE_SKILLS)
while tmanDescr.roleLevel == 100 and (tmanDescr.lastSkillLevel == 100 or len(tmanDescr.skills) == 0) and len(skills_list) > 0:
skillname = skills_list.pop()
if skillname not in tmanDescr.skills:
tmanDescr.addSkill(skillname)
i += 1
return (i, tmanDescr.lastSkillLevel)
return (0, 0)
@property
def efficiencyRoleLevel(self):
factor, addition = (1, 0)
if self.isInTank:
factor, addition = self.descriptor.efficiencyOnVehicle(self.vehicleDescr)
return round(self.roleLevel * factor)
def getNextLevelXpCost(self):
if self.roleLevel != tankmen.MAX_SKILL_LEVEL or not self.hasNewSkill:
descr = self.descriptor
lastSkillNumValue = descr.lastSkillNumber - descr.freeSkillsNumber
if lastSkillNumValue == 0 or self.roleLevel != tankmen.MAX_SKILL_LEVEL:
nextSkillLevel = self.roleLevel
else:
nextSkillLevel = descr.lastSkillLevel
skillSeqNum = 0
if self.roleLevel == tankmen.MAX_SKILL_LEVEL:
skillSeqNum = lastSkillNumValue
return descr.levelUpXpCost(nextSkillLevel, skillSeqNum) - descr.freeXP
return 0
def getNextSkillXpCost(self):
if self.roleLevel != tankmen.MAX_SKILL_LEVEL or not self.hasNewSkill:
descr = self.descriptor
lastSkillNumValue = descr.lastSkillNumber - descr.freeSkillsNumber
if lastSkillNumValue == 0 or self.roleLevel != tankmen.MAX_SKILL_LEVEL:
nextSkillLevel = self.roleLevel
else:
nextSkillLevel = descr.lastSkillLevel
skillSeqNum = 0
if self.roleLevel == tankmen.MAX_SKILL_LEVEL:
skillSeqNum = lastSkillNumValue
needXp = 0
for level in range(nextSkillLevel, tankmen.MAX_SKILL_LEVEL):
needXp += descr.levelUpXpCost(level, skillSeqNum)
return needXp - descr.freeXP
return 0
@property
def vehicleNativeType(self):
for tag in vehicles.VEHICLE_CLASS_TAGS.intersection(self.vehicleNativeDescr.type.tags):
return tag
def __cmp__(self, other):
if other is None:
return -1
res = nationCompareByIndex(self.nationID, other.nationID)
if res:
return res
elif self.isInTank and not other.isInTank:
return -1
elif not self.isInTank and other.isInTank:
return 1
if self.isInTank and other.isInTank:
if self.vehicleInvID != other.vehicleInvID:
return -1
res = self.TANKMEN_ROLES_ORDER[self.descriptor.role] - self.TANKMEN_ROLES_ORDER[other.descriptor.role]
if res:
return res
if self.lastUserName < other.lastUserName:
return -1
elif self.lastUserName > other.lastUserName:
return 1
else:
return 0
def __eq__(self, other):
if other is None or not isinstance(other, Tankman):
return False
else:
return self.invID == other.invID
def __repr__(self):
return 'Tankman<id:%d, nation:%d, vehicleID:%d>' % (self.invID, self.nationID, self.vehicleInvID)
class TankmanSkill(GUIItem):
def __init__(self, skillName, tankman = None, proxy = None):
super(TankmanSkill, self).__init__(proxy)
self.name = skillName
self.isPerk = self.name in tankmen.PERKS
self.level = 0
self.type = self.__getSkillType()
self.roleType = None
self.isActive = False
self.isEnable = False
self.isFemale = False
self.isPermanent = False
if tankman is not None:
tdescr = tankman.descriptor
skills = tdescr.skills
self.isFemale = tankman.isFemale
self.level = tdescr.lastSkillLevel if skills.index(self.name) == len(skills) - 1 else tankmen.MAX_SKILL_LEVEL
self.roleType = self.__getSkillRoleType(skillName)
self.isActive = self.__getSkillActivity(tankman)
self.isEnable = self.__getEnabledSkill(tankman)
self.isPermanent = skills.index(self.name) < tdescr.freeSkillsNumber
return
def __getEnabledSkill(self, tankman):
for role in tankman.combinedRoles:
roleSkills = tankmen.SKILLS_BY_ROLES.get(role, tuple())
if self.name in roleSkills:
return True
return False
@classmethod
def __getSkillRoleType(cls, skillName):
if skillName in tankmen.COMMON_SKILLS:
return 'common'
else:
for role, skills in tankmen.SKILLS_BY_ROLES.iteritems():
if skillName in skills:
return role
return None
def __getSkillActivity(self, tankman):
if tankman is None:
return True
else:
isBrotherhood = tankman.vehicleBonuses.get('brotherhood', 0) > 0
return not self.isPerk or self.name == 'brotherhood' and isBrotherhood or self.name != 'brotherhood' and self.level == tankmen.MAX_SKILL_LEVEL
def __getSkillType(self):
if self.isPerk:
if self.name == 'brotherhood':
return 'perk_common'
else:
return 'perk'
return 'skill'
@property
def userName(self):
if self.name == 'brotherhood':
if self.isFemale:
return i18n.makeString('#item_types:tankman/skills/brotherhood_female')
if self.isPermanent:
return i18n.makeString('#item_types:tankman/skills/brotherhood_permanent')
return getSkillUserName(self.name)
@property
def description(self):
if self.name == 'brotherhood':
if self.isFemale:
return i18n.makeString('#item_types:tankman/skills/brotherhood_female_descr')
if self.isPermanent:
return i18n.makeString('#item_types:tankman/skills/brotherhood_permanent_descr')
return getSkillUserDescription(self.name)
@property
def shortDescription(self):
return getShortDescr(self.description)
@property
def icon(self):
return getSkillIconName(self.name)
def __repr__(self):
return 'TankmanSkill<name:%s, level:%d, isActive:%s>' % (self.name, self.level, str(self.isActive))
def getFirstUserName(nationID, firstNameID):
return i18n.convert(tankmen.getNationConfig(nationID)['firstNames'][firstNameID])
def getLastUserName(nationID, lastNameID):
return i18n.convert(tankmen.getNationConfig(nationID)['lastNames'][lastNameID])
def getFullUserName(nationID, firstNameID, lastNameID):
return '%s %s' % (getFirstUserName(nationID, firstNameID), getLastUserName(nationID, lastNameID))
def getRoleUserName(role):
return i18n.convert(tankmen.getSkillsConfig()[role]['userString'])
def getRoleIconName(role):
return tankmen.getSkillsConfig()[role]['icon']
def getRoleBigIconPath(role):
return '../maps/icons/tankmen/roles/big/%s' % getRoleIconName(role)
def getRoleMediumIconPath(role):
return '../maps/icons/tankmen/roles/medium/%s' % getRoleIconName(role)
def getRoleSmallIconPath(role):
return '../maps/icons/tankmen/roles/small/%s' % getRoleIconName(role)
def getRankUserName(nationID, rankID):
return i18n.convert(tankmen.getNationConfig(nationID)['ranks'][rankID]['userString'])
def getIconName(nationID, iconID):
return tankmen.getNationConfig(nationID)['icons'][iconID]
def getBarracksIconPath(nationID, iconID):
return '../maps/icons/tankmen/icons/barracks/%s' % getIconName(nationID, iconID)
def getBigIconPath(nationID, iconID):
return '../maps/icons/tankmen/icons/big/%s' % getIconName(nationID, iconID)
def getSmallIconPath(nationID, iconID):
return '../maps/icons/tankmen/icons/small/%s' % getIconName(nationID, iconID)
def getRankIconName(nationID, rankID):
return tankmen.getNationConfig(nationID)['ranks'][rankID]['icon']
def getRankBigIconPath(nationID, rankID):
return '../maps/icons/tankmen/ranks/big/%s' % getRankIconName(nationID, rankID)
def getRankSmallIconPath(nationID, rankID):
return '../maps/icons/tankmen/ranks/small/%s' % getRankIconName(nationID, rankID)
def getSkillIconName(skillName):
return i18n.convert(tankmen.getSkillsConfig()[skillName]['icon'])
def getSkillBigIconPath(skillName):
return '../maps/icons/tankmen/skills/big/%s' % getSkillIconName(skillName)
def getSkillSmallIconPath(skillName):
return '../maps/icons/tankmen/skills/small/%s' % getSkillIconName(skillName)
def getSkillUserName(skillName):
return tankmen.getSkillsConfig()[skillName]['userString']
def getSkillUserDescription(skillName):
return tankmen.getSkillsConfig()[skillName]['description']
def calculateRoleLevel(startRoleLevel, freeXpValue = 0, typeID = (0, 0)):
return __makeFakeTankmanDescr(startRoleLevel, freeXpValue, typeID).roleLevel
def calculateRankID(startRoleLevel, freeXpValue = 0, typeID = (0, 0)):
return __makeFakeTankmanDescr(startRoleLevel, freeXpValue, typeID).rankID
def __makeFakeTankmanDescr(startRoleLevel, freeXpValue, typeID):
vehType = vehicles.VehicleDescr(typeID=typeID).type
tmanDescr = tankmen.TankmanDescr(tankmen.generateCompactDescr(tankmen.generatePassport(vehType.id[0], False), vehType.id[1], vehType.crewRoles[0][0], startRoleLevel))
tmanDescr.addXP(freeXpValue)
return tmanDescr
# okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\client\gui\shared\gui_items\tankman.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.02.14 12:41:33 Střední Evropa (běžný čas)
|
[
"info@webium.sk"
] |
info@webium.sk
|
12c70fb3da4c5fcd84f5b1c2debf9d5401231aa9
|
d694368d9ec045d7377fe245814c7f09c78b2b50
|
/Face_Enhancement/data/pix2pix_dataset.py
|
9e70a8cbc627a9de5f8c2a8a1fdb396c878b76fe
|
[
"MIT"
] |
permissive
|
ThunderboltLei/bringing-old-photos-back-to-life
|
c47e2086e30d9557ee82839218d1010c064099bf
|
65b547a166f53afd8b5cc9d339a688a3a2735183
|
refs/heads/master
| 2023-08-24T15:04:19.412541
| 2021-10-04T19:12:30
| 2021-10-04T19:12:30
| 413,504,539
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,920
|
py
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import os
import util.util as util
from PIL import Image
from data.base_dataset import BaseDataset, get_params, get_transform
class Pix2pixDataset(BaseDataset):
@staticmethod
def modify_commandline_options(parser, is_train):
parser.add_argument(
"--no_pairing_check",
action="store_true",
help="If specified, skip sanity check of correct label-image file pairing",
)
return parser
def initialize(self, opt):
self.opt = opt
label_paths, image_paths, instance_paths = self.get_paths(opt)
util.natural_sort(label_paths)
util.natural_sort(image_paths)
if not opt.no_instance:
util.natural_sort(instance_paths)
label_paths = label_paths[: opt.max_dataset_size]
image_paths = image_paths[: opt.max_dataset_size]
instance_paths = instance_paths[: opt.max_dataset_size]
if not opt.no_pairing_check:
for path1, path2 in zip(label_paths, image_paths):
assert self.paths_match(path1, path2), (
"The label-image pair (%s, %s) do not look like the right pair because the filenames are quite different. Are you sure about the pairing? Please see data/pix2pix_dataset.py to see what is going on, and use --no_pairing_check to bypass this."
% (path1, path2)
)
self.label_paths = label_paths
self.image_paths = image_paths
self.instance_paths = instance_paths
size = len(self.label_paths)
self.dataset_size = size
def get_paths(self, opt):
label_paths = []
image_paths = []
instance_paths = []
assert False, "A subclass of Pix2pixDataset must override self.get_paths(self, opt)"
return label_paths, image_paths, instance_paths
def paths_match(self, path1, path2):
filename1_without_ext = os.path.splitext(os.path.basename(path1))[0]
filename2_without_ext = os.path.splitext(os.path.basename(path2))[0]
return filename1_without_ext == filename2_without_ext
def __getitem__(self, index):
# Label Image
label_path = self.label_paths[index]
label = Image.open(label_path)
params = get_params(self.opt, label.size)
transform_label = get_transform(self.opt, params, method=Image.NEAREST, normalize=False)
label_tensor = transform_label(label) * 255.0
label_tensor[label_tensor == 255] = self.opt.label_nc # 'unknown' is opt.label_nc
# input image (real images)
image_path = self.image_paths[index]
assert self.paths_match(
label_path, image_path
), "The label_path %s and image_path %s don't match." % (label_path, image_path)
image = Image.open(image_path)
image = image.convert("RGB")
transform_image = get_transform(self.opt, params)
image_tensor = transform_image(image)
# if using instance maps
if self.opt.no_instance:
instance_tensor = 0
else:
instance_path = self.instance_paths[index]
instance = Image.open(instance_path)
if instance.mode == "L":
instance_tensor = transform_label(instance) * 255
instance_tensor = instance_tensor.long()
else:
instance_tensor = transform_label(instance)
input_dict = {
"label": label_tensor,
"instance": instance_tensor,
"image": image_tensor,
"path": image_path,
}
# Give subclasses a chance to modify the final output
self.postprocess(input_dict)
return input_dict
def postprocess(self, input_dict):
return input_dict
def __len__(self):
return self.dataset_size
|
[
"zhangboknight@gmail.com"
] |
zhangboknight@gmail.com
|
2be2d28e944086e83a47b6b161ea7972f6d369bf
|
c357270b3697fd2843b7d5f15567d1697603bb39
|
/Python/guessinggame.py
|
390a40104330283dda1a25d9ca5b828018d98f48
|
[] |
no_license
|
emehrawn/mehran.github.io
|
9862b75f49142db22cfc2e6e76caee1ea779d121
|
3d881c78ccd87e52d86f91ee509bdba648d5a46d
|
refs/heads/main
| 2023-05-13T09:43:44.973158
| 2021-06-07T11:01:40
| 2021-06-07T11:01:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 545
|
py
|
#this is a guessing game based on the concept of random modules(randint)
import random
comguess=random.randint(0,100) #computer chooses any random number between 0-100
while True: #until the condition is true code written below will be repeated
userguess=int(input("Enter your Number"))
if comguess>userguess:
print("Guess higher")
elif comguess<userguess:
print("Guess lower")
else:
print("You got it. that's the right number")
break
|
[
"noreply@github.com"
] |
emehrawn.noreply@github.com
|
46a1a817febf4a34a1754def8c4892926a080d5a
|
6ef0269a07cc77dcae6e0ee9b24e5f387a86b8c3
|
/test.py
|
55701f9978928e9fd75584706483db0de20f3eb1
|
[] |
no_license
|
sub-14-00/dog-shooter
|
94fd156bd3a677b6a7cbebcf627f9798eeae813a
|
4adead1fe6ca8e6708bea1779969f8c2142cb226
|
refs/heads/main
| 2023-04-09T00:55:30.783586
| 2021-04-24T12:20:03
| 2021-04-24T12:20:03
| 361,154,238
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 516
|
py
|
import pygame
pygame.init()
pygame.font.init()
shrift = pygame.font.Font(None, 70 )
text = shrift.render("привет!",False, (0,0,255) )
win = pygame.display.set_mode((500,500))
win.blit(text, (0,0) )
x = 0
timer = pygame.time.Clock()
while True:
for i in pygame.event.get():
if i.type == 12:
exit()
x += 1
text = shrift.render(str(x),False, (0,0,255) )
win.fill( (255,255,255) )
win.blit(text, (x,0) )
timer.tick(60)
pygame.display.update()
|
[
"noreply@github.com"
] |
sub-14-00.noreply@github.com
|
d5b6e8b783974452c7dcfadc43862db0ee48653d
|
485e5647b171fb04f22aa86929015c182f0bdae1
|
/talos_api/apps.py
|
5235d7373c80851954033e68b034ffbf5f1ca87a
|
[] |
no_license
|
mmcdevitt1997/talos_backend
|
92ae247479e623aa98ad4e4d7fe42b47feb6a930
|
e948ba70b14ca022653cd203cb54f638966995f8
|
refs/heads/main
| 2022-12-26T04:55:39.001386
| 2020-10-11T19:41:41
| 2020-10-11T19:41:41
| 303,190,753
| 0
| 0
| null | 2020-10-11T19:41:42
| 2020-10-11T18:53:17
|
Python
|
UTF-8
|
Python
| false
| false
| 92
|
py
|
from django.apps import AppConfig
class TalosApiConfig(AppConfig):
name = 'talos_api'
|
[
"mmcdevitt1997@gmail.com"
] |
mmcdevitt1997@gmail.com
|
186c1778897b62e7a6d3e0e90339358b7dae923b
|
7cad80770718e655766dd038bc654ebe1ad2ab3e
|
/network/superbitcoin.py
|
9350634aab0e8bbefd2288848919a768e369921e
|
[] |
no_license
|
kindanew/clove_bounty
|
d0e1f917dff5b3a23eae6a05d6449af068fb4dc9
|
e707eebde301ac3728a5835d10d927aa83d27ab5
|
refs/heads/master
| 2021-04-29T00:29:37.698092
| 2018-02-24T02:34:33
| 2018-02-24T02:34:33
| 121,830,043
| 0
| 0
| null | 2018-02-17T05:34:54
| 2018-02-17T05:26:51
|
Python
|
UTF-8
|
Python
| false
| false
| 840
|
py
|
from clove.network.bitcoin import Bitcoin
class SuperBitcoin(Bitcoin):
"""
Class with all the necessary SuperBitcoin (SBTC) network information based on
https://github.com/superbitcoin/SuperBitcoin/blob/master/src/config/chainparams.cpp
(date of access: 02/17/2018)
"""
name = 'superbitcoin'
symbols = ('SBTC', )
seeds = ('seed.superbtca.com', 'seed.superbtca.info', 'seed.superbtc.org')
port = 8334
class SuperBitcoinTestNet(SuperBitcoin):
"""
Class with all the necessary SuperBitcoin (SBTC) network information based on
https://github.com/superbitcoin/SuperBitcoin/blob/master/src/config/chainparams.cpp
(date of access: 02/17/2018)
"""
name = 'test-superbitcoin'
symbols = ('SBTC', )
seeds = ('seedtest.superbtc.org')
port = 18334
|
[
"noreply@github.com"
] |
kindanew.noreply@github.com
|
09f712430ef72b8ff171cc36e8fd09c4f7191c12
|
c189be45235b38e632ebf2b302e5797cd8c580b4
|
/doggo_proj/manage.py
|
41de38d8a4f9e41ca55d54bcf5ea815a9a7501ed
|
[] |
no_license
|
LukeBM21/doggo_proj
|
7c352a1f138ee0d0e558ada089533e392d5613a5
|
67678d035918a288c7a706b63328d1d97d9620be
|
refs/heads/master
| 2021-05-21T01:23:22.936193
| 2020-04-02T15:49:18
| 2020-04-02T15:49:18
| 252,480,737
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 630
|
py
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'doggo_proj.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
|
[
"luke.mckinley21@gmail.com"
] |
luke.mckinley21@gmail.com
|
ccab650069a4e826fbc58b2db1ddb23515fed9af
|
46f7e1b8845dcf5cf25ed79a9b4c6513c5576c27
|
/FSSD_OoD_Detection/lib/training/mnist_oe_training/resnet.py
|
b27e608393647f8d8fc61a706f748b2e45821faf
|
[
"MIT"
] |
permissive
|
kartikeya013/OOD_Baselines
|
15ec79baecd4b11d4eb05c0e7e3174e5dbdd68e7
|
6a83c367028462966f34cea8fb7a6f15dd0a5fd8
|
refs/heads/main
| 2023-08-18T04:16:50.355657
| 2021-10-10T13:50:24
| 2021-10-10T13:50:24
| 387,860,505
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,013
|
py
|
'''ResNet in PyTorch.
For Pre-activation ResNet, see 'preact_resnet.py'.
Reference:
[1] Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun
Deep Residual Learning for Image Recognition. arXiv:1512.03385
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, in_planes, planes, stride=1):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.bn2(self.conv2(out))
out += self.shortcut(x)
out = F.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, in_planes, planes, stride=1):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(self.expansion*planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = F.relu(self.bn2(self.conv2(out)))
out = self.bn3(self.conv3(out))
out += self.shortcut(x)
out = F.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, block, num_blocks, num_classes=10):
super(ResNet, self).__init__()
self.in_planes = 64
self.conv1 = nn.Conv2d(1, 64, kernel_size=3, stride=1, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)
self.linear = nn.Linear(512*block.expansion, num_classes)
def _make_layer(self, block, planes, num_blocks, stride):
strides = [stride] + [1]*(num_blocks-1)
layers = []
for stride in strides:
layers.append(block(self.in_planes, planes, stride))
self.in_planes = planes * block.expansion
return nn.Sequential(*layers)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.layer1(out)
out = self.layer2(out)
out = self.layer3(out)
out = self.layer4(out)
out = F.avg_pool2d(out, 4)
out = out.view(out.size(0), -1)
out = self.linear(out)
return out
def ResNet18():
return ResNet(BasicBlock, [2,2,2,2])
def ResNet34():
return ResNet(BasicBlock, [3,4,6,3])
def ResNet50():
return ResNet(Bottleneck, [3,4,6,3])
def ResNet101():
return ResNet(Bottleneck, [3,4,23,3])
def ResNet152():
return ResNet(Bottleneck, [3,8,36,3])
# def test():
# net = ResNet18()
# y = net(torch.randn(1,3,32,32))
# print(y.size())
# test()
|
[
"kartikeyagupta2000@gmail.com"
] |
kartikeyagupta2000@gmail.com
|
47a6e7a126dfc1accb2bcdd8f120fc7be23acf0b
|
f2f034087591b1ca704b892d7528a985563b8460
|
/cforce/785/A/25503070.py
|
ccb86d8a0cdb7197cb00e97a6cb4cb11a8036821
|
[] |
no_license
|
Custom-Tech-Enterprize/sportProgrammingSubmissionsOfmoghya
|
cd45c5a5ef1db3ba7dcd78bfcd2d12ad50524f57
|
9f71c9ee1538ef9d8de2b519f9eff223ed5258ed
|
refs/heads/master
| 2022-02-21T08:57:34.950245
| 2017-06-09T11:39:49
| 2017-06-09T11:39:49
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 218
|
py
|
data = {
'Tetrahedron':4,
'Cube':6,
'Octahedron':8,
'Dodecahedron':12,
'Icosahedron':20
}
n = int(input())
ans = 0
for _ in range(0,n):
s = input()
ans = ans + data[s]
print(ans)
|
[
"shubham.sawant@walchandsangli.ac.in"
] |
shubham.sawant@walchandsangli.ac.in
|
fba2c5cf38bb2f6744f411ed83d03e8a501360c2
|
3906bcfc1ea7686140745f72a74f1c7517b6c149
|
/CHS/complaints/forms.py
|
2c88c1b9ff111d48e9bfaaa4d6096184752f6f4a
|
[] |
no_license
|
eaakashmaurya/Bits_Please_Hack30
|
46e26cde157cc8e016cf584178cb3c0e3ae41385
|
c6500846c92fef7e3b3ebe56942ae70ad17edae1
|
refs/heads/main
| 2023-03-11T07:33:56.145716
| 2021-03-01T16:10:54
| 2021-03-01T16:10:54
| 343,316,432
| 0
| 1
| null | 2021-03-01T16:11:12
| 2021-03-01T06:43:23
|
Python
|
UTF-8
|
Python
| false
| false
| 244
|
py
|
from django import forms
# Models
from posts.models import Post
class ComplaintForm(forms.ModelForm):
"""Post model form"""
class Meta:
"""Form settings."""
model = Post
fields = ('userprofile', 'category', 'complaint_face_picture')
|
[
"eaakashmaurya@gmail.com"
] |
eaakashmaurya@gmail.com
|
a243fa0e0fe1b5fe678851230432173dda92bb6e
|
08eb86a756a89f26759b0fad2e2f776b5e8fd5e7
|
/BigGAN-PyTorch-master/datasets.py
|
59ae4aa681588e1383ef98ea6767400c9fc80dbc
|
[
"MIT"
] |
permissive
|
RuiLiFeng/biggan-torch
|
61064d08efb59ded24e1063ed3fdaf8b608b00e4
|
ade6c5d3c67c391bec4a7bc6600e2e1250bbcf2c
|
refs/heads/master
| 2020-07-16T11:18:54.762398
| 2019-09-19T07:22:16
| 2019-09-19T07:22:16
| 205,779,229
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,418
|
py
|
''' Datasets
This file contains definitions for our CIFAR, ImageFolder, and HDF5 datasets
'''
import os
import os.path
import sys
from PIL import Image
import numpy as np
from tqdm import tqdm, trange
import torchvision.datasets as dset
import torchvision.transforms as transforms
from torchvision.datasets.utils import download_url, check_integrity
import torch.utils.data as data
from torch.utils.data import DataLoader
IMG_EXTENSIONS = ['.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.pgm']
def is_image_file(filename):
"""Checks if a file is an image.
Args:
filename (string): path to a file
Returns:
bool: True if the filename ends with a known image extension
"""
filename_lower = filename.lower()
return any(filename_lower.endswith(ext) for ext in IMG_EXTENSIONS)
def find_classes(dir):
classes = [d for d in os.listdir(dir) if os.path.isdir(os.path.join(dir, d))]
classes.sort()
class_to_idx = {classes[i]: i for i in range(len(classes))}
return classes, class_to_idx
def make_dataset(dir, class_to_idx):
images = []
dir = os.path.expanduser(dir)
for target in tqdm(sorted(os.listdir(dir))):
d = os.path.join(dir, target)
if not os.path.isdir(d):
continue
for root, _, fnames in sorted(os.walk(d)):
for fname in sorted(fnames):
if is_image_file(fname):
path = os.path.join(root, fname)
item = (path, class_to_idx[target])
images.append(item)
return images
def pil_loader(path):
# open path as file to avoid ResourceWarning (https://github.com/python-pillow/Pillow/issues/835)
with open(path, 'rb') as f:
img = Image.open(f)
return img.convert('RGB')
def accimage_loader(path):
import accimage
try:
return accimage.Image(path)
except IOError:
# Potentially a decoding problem, fall back to PIL.Image
return pil_loader(path)
def default_loader(path):
from torchvision import get_image_backend
if get_image_backend() == 'accimage':
return accimage_loader(path)
else:
return pil_loader(path)
class ImageFolder(data.Dataset):
"""A generic data loader where the images are arranged in this way: ::
root/dogball/xxx.png
root/dogball/xxy.png
root/dogball/xxz.png
root/cat/123.png
root/cat/nsdf3.png
root/cat/asd932_.png
Args:
root (string): Root directory path.
transform (callable, optional): A function/transform that takes in an PIL image
and returns a transformed version. E.g, ``transforms.RandomCrop``
target_transform (callable, optional): A function/transform that takes in the
target and transforms it.
loader (callable, optional): A function to load an image given its path.
Attributes:
classes (list): List of the class names.
class_to_idx (dict): Dict with items (class_name, class_index).
imgs (list): List of (image path, class_index) tuples
"""
def __init__(self, root, transform=None, target_transform=None,
loader=default_loader, load_in_mem=False,
index_filename='imagenet_imgs.npz', **kwargs):
classes, class_to_idx = find_classes(root)
# Load pre-computed image directory walk
if os.path.exists(index_filename):
print('Loading pre-saved Index file %s...' % index_filename)
imgs = np.load(index_filename)['imgs']
# If first time, walk the folder directory and save the
# results to a pre-computed file.
else:
print('Generating Index file %s...' % index_filename)
imgs = make_dataset(root, class_to_idx)
np.savez_compressed(index_filename, **{'imgs' : imgs})
if len(imgs) == 0:
raise(RuntimeError("Found 0 images in subfolders of: " + root + "\n"
"Supported image extensions are: " + ",".join(IMG_EXTENSIONS)))
self.root = root
self.imgs = imgs
self.classes = classes
self.class_to_idx = class_to_idx
self.transform = transform
self.target_transform = target_transform
self.loader = loader
self.load_in_mem = load_in_mem
if self.load_in_mem:
print('Loading all images into memory...')
self.data, self.labels = [], []
for index in tqdm(range(len(self.imgs))):
path, target = imgs[index][0], imgs[index][1]
self.data.append(self.transform(self.loader(path)))
self.labels.append(target)
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
tuple: (image, target) where target is class_index of the target class.
"""
if self.load_in_mem:
img = self.data[index]
target = self.labels[index]
else:
path, target = self.imgs[index]
img = self.loader(str(path))
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
# print(img.size(), target)
return img, int(target)
def __len__(self):
return len(self.imgs)
def __repr__(self):
fmt_str = 'Dataset ' + self.__class__.__name__ + '\n'
fmt_str += ' Number of datapoints: {}\n'.format(self.__len__())
fmt_str += ' Root Location: {}\n'.format(self.root)
tmp = ' Transforms (if any): '
fmt_str += '{0}{1}\n'.format(tmp, self.transform.__repr__().replace('\n', '\n' + ' ' * len(tmp)))
tmp = ' Target Transforms (if any): '
fmt_str += '{0}{1}'.format(tmp, self.target_transform.__repr__().replace('\n', '\n' + ' ' * len(tmp)))
return fmt_str
''' ILSVRC_HDF5: A dataset to support I/O from an HDF5 to avoid
having to load individual images all the time. '''
import h5py as h5
import torch
class ILSVRC_HDF5(data.Dataset):
def __init__(self, root, transform=None, target_transform=None,
load_in_mem=False, train=True, download=False, validate_seed=0,
val_split=0, **kwargs): # last four are dummies
self.root = root
self.num_imgs = len(h5.File(root, 'r')['labels'])
# self.transform = transform
self.target_transform = target_transform
# Set the transform here
self.transform = transform
# load the entire dataset into memory?
self.load_in_mem = load_in_mem
# If loading into memory, do so now
if self.load_in_mem:
print('Loading %s into memory...' % root)
with h5.File(root,'r') as f:
self.data = f['imgs'][:]
self.labels = f['labels'][:]
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
tuple: (image, target) where target is class_index of the target class.
"""
# If loaded the entire dataset in RAM, get image from memory
if self.load_in_mem:
img = self.data[index]
target = self.labels[index]
# Else load it from disk
else:
with h5.File(self.root,'r') as f:
img = f['imgs'][index]
target = f['labels'][index]
# if self.transform is not None:
# img = self.transform(img)
# Apply my own transform
img = ((torch.from_numpy(img).float() / 255) - 0.5) * 2
if self.target_transform is not None:
target = self.target_transform(target)
return img, int(target)
def __len__(self):
return self.num_imgs
# return len(self.f['imgs'])
import pickle
class CIFAR10(dset.CIFAR10):
def __init__(self, root, train=True,
transform=None, target_transform=None,
download=True, validate_seed=0,
val_split=0, load_in_mem=True, **kwargs):
self.root = os.path.expanduser(root)
self.transform = transform
self.target_transform = target_transform
self.train = train # training set or test set
self.val_split = val_split
if download:
self.download()
if not self._check_integrity():
raise RuntimeError('Dataset not found or corrupted.' +
' You can use download=True to download it')
# now load the picked numpy arrays
self.data = []
self.labels= []
for fentry in self.train_list:
f = fentry[0]
file = os.path.join(self.root, self.base_folder, f)
fo = open(file, 'rb')
if sys.version_info[0] == 2:
entry = pickle.load(fo)
else:
entry = pickle.load(fo, encoding='latin1')
self.data.append(entry['data'])
if 'labels' in entry:
self.labels += entry['labels']
else:
self.labels += entry['fine_labels']
fo.close()
self.data = np.concatenate(self.data)
# Randomly select indices for validation
if self.val_split > 0:
label_indices = [[] for _ in range(max(self.labels)+1)]
for i,l in enumerate(self.labels):
label_indices[l] += [i]
label_indices = np.asarray(label_indices)
# randomly grab 500 elements of each class
np.random.seed(validate_seed)
self.val_indices = []
for l_i in label_indices:
self.val_indices += list(l_i[np.random.choice(len(l_i), int(len(self.data) * val_split) // (max(self.labels) + 1) ,replace=False)])
if self.train=='validate':
self.data = self.data[self.val_indices]
self.labels = list(np.asarray(self.labels)[self.val_indices])
self.data = self.data.reshape((int(50e3 * self.val_split), 3, 32, 32))
self.data = self.data.transpose((0, 2, 3, 1)) # convert to HWC
elif self.train:
print(np.shape(self.data))
if self.val_split > 0:
self.data = np.delete(self.data,self.val_indices,axis=0)
self.labels = list(np.delete(np.asarray(self.labels),self.val_indices,axis=0))
self.data = self.data.reshape((int(50e3 * (1.-self.val_split)), 3, 32, 32))
self.data = self.data.transpose((0, 2, 3, 1)) # convert to HWC
else:
f = self.test_list[0][0]
file = os.path.join(self.root, self.base_folder, f)
fo = open(file, 'rb')
if sys.version_info[0] == 2:
entry = pickle.load(fo)
else:
entry = pickle.load(fo, encoding='latin1')
self.data = entry['data']
if 'labels' in entry:
self.labels = entry['labels']
else:
self.labels = entry['fine_labels']
fo.close()
self.data = self.data.reshape((10000, 3, 32, 32))
self.data = self.data.transpose((0, 2, 3, 1)) # convert to HWC
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
tuple: (image, target) where target is index of the target class.
"""
img, target = self.data[index], self.labels[index]
# doing this so that it is consistent with all other datasets
# to return a PIL Image
img = Image.fromarray(img)
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
def __len__(self):
return len(self.data)
class CIFAR100(CIFAR10):
base_folder = 'cifar-100-python'
url = "http://www.cs.toronto.edu/~kriz/cifar-100-python.tar.gz"
filename = "cifar-100-python.tar.gz"
tgz_md5 = 'eb9058c3a382ffc7106e4002c42a8d85'
train_list = [
['train', '16019d7e3df5f24257cddd939b257f8d'],
]
test_list = [
['test', 'f0ef6b0ae62326f3e7ffdfab6717acfc'],
]
|
[
"frl1996@mail.ustc.edu.cn"
] |
frl1996@mail.ustc.edu.cn
|
ad49c19251bc2bb94ae01e4a11fb4763e8c85d3d
|
f914ac0a4ab3942d1b3e1a8b2e2da78b5da959f1
|
/fproject/fApp/admin.py
|
aa3d16c86e700dc4c71e74c8684cb45e0be3d91f
|
[] |
no_license
|
rohitc1818/blogproject
|
9c2ac4b6975fbcf960c027697ceb86060aa43694
|
3bbbdcb626fe75dd6645685762548869929e66d3
|
refs/heads/main
| 2022-12-27T17:59:40.310445
| 2020-10-19T11:22:06
| 2020-10-19T11:22:06
| 305,358,776
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 676
|
py
|
from django.contrib import admin
from fApp.models import Post,Comment
# Register your models here.
class PostAdmin(admin.ModelAdmin):
list_display = ['title', 'slug', 'author', 'publish', 'created', 'updated', 'status']
list_filter = ('status', 'created', 'publish', 'author')
search_fields = ('title', 'body')
prepopulated_fields = {'slug': ('title',)}
raw_id_fields = ('author',)
ordering = ('status', 'publish')
admin.site.register(Post,PostAdmin)
class CommentAdmin(admin.ModelAdmin):
list_display = ['post','name','body','date_added']
search_fields = ('body',)
admin.site.register(Comment,CommentAdmin)
#uername:-raghav
#pa:-raghav1234
|
[
"rc5741545@gmail.com"
] |
rc5741545@gmail.com
|
b8c2211e440572450fc40542853a71bd431d4466
|
5c94254ef9897a1aca24b87249c6dfd3bee4e70d
|
/code/devf/batch13_red/tools/dia1/hola.py
|
619de2afc6e08049bdc820323f38779577fff6f8
|
[] |
no_license
|
IanCarloz/Red-Batch13
|
ca917d220e7718e27e4faf5fb7e4750b43573818
|
7dee13774d982ceb00ed3b9ff5f59c64f23316fc
|
refs/heads/master
| 2021-01-09T06:19:17.741408
| 2017-02-05T01:37:48
| 2017-02-05T01:37:48
| 80,956,018
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 42
|
py
|
# -*- coding: utf-8 -*-
print("hola Ian")
|
[
"noreply@github.com"
] |
IanCarloz.noreply@github.com
|
bb7985c2f88d28bcee6d7049d2224da4c7746af0
|
ac98c6184c5e7c633d02bc66a4da357cd2771b4d
|
/model.py
|
e619b0d95b77d5347169de73c3b31dd55d892ba2
|
[] |
no_license
|
jadhavmayur/Heroku-Demo
|
ef7b272f611305e176540e5dd9d5021480334ef2
|
a396c1025d0260baf9c9aa632caa740794d27aba
|
refs/heads/main
| 2023-08-15T01:54:59.386080
| 2021-09-30T05:51:53
| 2021-09-30T05:51:53
| 410,007,612
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,279
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 22 14:42:54 2021
@author: Mayur
"""
# Importing the libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import pickle
dataset = pd.read_csv('F:\mayur\Module-3\model deploy\hiring.csv')
dataset['experience'].fillna(0, inplace=True)
dataset['test_score(out of 10)'].fillna(dataset['test_score(out of 10)'].mean(), inplace=True)
X = dataset.iloc[:, :3]
#Converting words to integer values
def convert_to_int(word):
word_dict = {'one':1, 'two':2, 'three':3, 'four':4, 'five':5, 'six':6, 'seven':7, 'eight':8,
'nine':9, 'ten':10, 'eleven':11, 'twelve':12, 'zero':0, 0: 0}
return word_dict[word]
X['experience'] = X['experience'].apply(lambda x : convert_to_int(x))
y = dataset.iloc[:, -1]
#Splitting Training and Test Set
#Since we have a very small dataset, we will train our model with all availabe data.
from sklearn.linear_model import LinearRegression
regressor = LinearRegression()
#Fitting model with trainig data
regressor.fit(X, y)
# Saving model to disk
pickle.dump(regressor, open('model.pkl','wb'))
# Loading model to compare the results
model = pickle.load(open('model.pkl','rb'))
print(model.predict([[2, 9, 6]]))
|
[
"noreply@github.com"
] |
jadhavmayur.noreply@github.com
|
59e9bfcb6ae8c2c353d30f13d889945a173a23a9
|
fa4c9b51af2ebab47dba888577c02894aa9efbf6
|
/DualLogger.py
|
4de5c44ea5d42fdbbc81f092dc7c5e13f6f676c7
|
[] |
no_license
|
Jake-Wright/DualScaleLogger
|
17d39ce444d760ebe7dc372ce9c143ecc673f926
|
60019718e337c7513105298effb468d8e4666a6e
|
refs/heads/master
| 2020-06-05T00:31:03.615573
| 2013-04-12T11:16:31
| 2013-04-12T11:16:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 19,936
|
py
|
# -*- coding: utf-8 -*-
import serial
from time import sleep
import time
import re
import logging
import datetime
import pygtk
pygtk.require('2.0')
import gtk
import threading
import gobject
import time
gobject.threads_init()
#Todo: Setup monitor thread, connect checkbuttons
RATE=9600
LOCATIONS=['/dev/ttyUSB0','/dev/ttyUSB1','/dev/ttyUSB2','/dev/ttyUSB3',
'/dev/ttyS0','/dev/ttyS2','/dev/ttyS3', '/dev/ttyS4']
class scale(object):
#Object to make the number of scales modular. Includes the serial port, an id, and connection status
def __init__(self, scaleId):
self.logging=False
self.connected=False
self.scaleId=scaleId
self.address=''
self.weight='No Data'
def connect(self,locations, rate):
for address in locations:
try:
self.serial=serial.Serial(address, rate, timeout=1)
self.serial.flushInput()
self.serial.write('p' + '\r\n')
buf=self.serial.readline() #blank
#print 'wrote to' + address
#print str(buf)
if buf: #if you didn't time out, attempt to read the whole string
#print "didn't time out passed first if"
buf=self.serial.readline() #blank
buf=self.serial.readline() #date
buf=self.serial.readline() #time
buf=self.serial.readline() #blank
buf=self.serial.readline() #id
for text in buf.split():
try:
#print 'tried split'
scaleid=float(text)
if(scaleid==float(self.scaleId)):
self.address=address
self.connected=True
buf=self.serial.readline() #user id
buf=self.serial.readline() #blank
buf=self.serial.readline() #weight
buf=self.serial.readline() #blank
buf=self.serial.readline() #blank
buf=self.serial.readline() #blank
buf=self.serial.readline() #blank
break
except:
_=''
except:
_=''
if self.address:
break
class guiFramework(object):
def __init__(self, locations, rate):
self.window=gtk.Window(gtk.WINDOW_TOPLEVEL)
self.window.set_title("Scale Measuring Program")
self.window.connect("delete_event", self.delete_event)
self.window.set_resizable(False)
self.window.set_border_width(20)
self.locations=locations
self.rate=rate
#self.initializeSerial(None, None)
self.details='No details were entered for this run.'
self.speed='No speed was entered for this run.'
self.guiInitialization()
self.window.show_all()
date=datetime.date.today()
self.filename=str(date.month) +'-'+ str(date.day)
self.filenumber=0
self.initializeLogger()
self.logging=False
self.scale1=scale(1)
self.scale2=scale(2)
self.width=12
self.monitorThread=gtkThread(self)
def guiInitialization(self):
#Create the various boxes necessary for gtk windows
self.bigVBox=gtk.VBox(False, 2)
self.window.add(self.bigVBox)
self.scalesbox=gtk.HBox(False, 2)
self.runbox=gtk.VBox(False, 2)
self.mainlogbox=gtk.HBox(False, 2)
self.statusbox=gtk.HBox(False, 2)
self.bigVBox.pack_start(self.scalesbox, False, False, 0)
self.bigVBox.pack_start(self.runbox, False, False, 0)
self.bigVBox.pack_start(self.mainlogbox, False, False, 0)
self.bigVBox.pack_start(self.statusbox, False, False, 0)
#Now set up the individual boxes, from the top down
#First, create the sub boxes to house scales 1 and 2
self.scale1box=gtk.VBox(False, 2)
self.scale2box=gtk.VBox(False, 2)
self.scalesbox.pack_start(self.scale1box, False, False, 0)
self.scalesbox.pack_start(self.scale2box, False, False, 0)
#Create scale box 1 elements
#Name and include
self.scale1labelbox=gtk.HBox(False, 2)
self.scale1Label=gtk.Label('Scale 1')
self.scale1Include=gtk.CheckButton('Include', False)
self.scale1Include.connect('toggled',self.toggle_event, '1')
self.scale1labelbox.pack_start(self.scale1Label, False, False, 0)
self.scale1labelbox.pack_start(self.scale1Include, False, False, 0)
#Disconnect/connect
self.scale1buttonbox=gtk.HBox(False, 2)
self.scale1ConnectButton=gtk.Button('Connect')
self.scale1ConnectButton.connect('clicked', self.scaleConnect, '1')
self.scale1DisconnectButton=gtk.Button('Disconnect')
self.scale1DisconnectButton.connect('clicked',self.scaleDisconnect, '1')
self.scale1buttonbox.pack_start(self.scale1ConnectButton, False, False, 0)
self.scale1buttonbox.pack_start(self.scale1DisconnectButton, False, False, 0)
#And status/weight
self.scale1StatusBox=gtk.HBox(False, 2)
self.scale1ConnectedFrame=gtk.Frame('Connected')
self.scale1ConnectedLabel=gtk.Label('False')
self.scale1WeightFrame=gtk.Frame('Weight')
self.scale1WeightLabel=gtk.Label('No Data')
self.scale1WeightFrame.add(self.scale1WeightLabel)
self.scale1ConnectedFrame.add(self.scale1ConnectedLabel)
self.scale1StatusBox.pack_start(self.scale1ConnectedFrame, False, False, 0)
self.scale1StatusBox.pack_start(self.scale1WeightFrame, False, False, 0)
#pack the box
self.scale1box.pack_start(self.scale1labelbox, False, False, 0)
self.scale1box.pack_start(self.scale1buttonbox, False, False, 0)
self.scale1box.pack_start(self.scale1StatusBox, False, False, 0)
#Create scale box 2 elements
#Name and include
self.scale2labelbox=gtk.HBox(False, 2)
self.scale2Label=gtk.Label('Scale 2')
self.scale2Include=gtk.CheckButton('Include', False)
self.scale2Include.connect('toggled',self.toggle_event, '2')
self.scale2labelbox.pack_start(self.scale2Label, False, False, 0)
self.scale2labelbox.pack_start(self.scale2Include, False, False, 0)
#Disconnect/connect
self.scale2buttonbox=gtk.HBox(False, 2)
self.scale2ConnectButton=gtk.Button('Connect')
self.scale2ConnectButton.connect('clicked', self.scaleConnect, '2')
self.scale2DisconnectButton=gtk.Button('Disconnect')
self.scale2DisconnectButton.connect('clicked', self.scaleDisconnect, '2')
self.scale2buttonbox.pack_start(self.scale2ConnectButton, False, False, 0)
self.scale2buttonbox.pack_start(self.scale2DisconnectButton, False, False, 0)
#And status/weight
self.scale2StatusBox=gtk.HBox(False, 2)
self.scale2ConnectedFrame=gtk.Frame('Connected')
self.scale2ConnectedLabel=gtk.Label('False')
self.scale2WeightFrame=gtk.Frame('Weight')
self.scale2WeightLabel=gtk.Label('No Data')
self.scale2WeightFrame.add(self.scale2WeightLabel)
self.scale2ConnectedFrame.add(self.scale2ConnectedLabel)
self.scale2StatusBox.pack_start(self.scale2ConnectedFrame, False, False, 0)
self.scale2StatusBox.pack_start(self.scale2WeightFrame, False, False, 0)
#pack the box
self.scale2box.pack_start(self.scale2labelbox, False, False, 0)
self.scale2box.pack_start(self.scale2buttonbox, False, False, 0)
self.scale2box.pack_start(self.scale2StatusBox, False, False, 0)
#Now move on to the run box (run data, speed, width)
self.detailFrame=gtk.Frame('Run Details')
self.detailEntry=gtk.Entry(200)
self.detailEntry.connect('activate',self.getDetailText, '')
self.detailFrame.add(self.detailEntry)
self.widthAndSpeedBox=gtk.HBox(False, 0)
self.speedFrame=gtk.Frame('Speed (FPM)')
self.speedEntry=gtk.Entry(3)
self.speedEntry.connect('activate', self.getSpeedText, '')
self.speedFrame.add(self.speedEntry)
self.widthFrame=gtk.Frame('Width (In.)')
self.widthEntry=gtk.Entry(2)
self.widthEntry.connect('activate',self.getWidthText, '')
self.widthFrame.add(self.widthEntry)
self.widthAndSpeedBox.pack_start(self.speedFrame, False, False, 0)
self.widthAndSpeedBox.pack_start(self.widthFrame, False, False, 0)
self.runbox.pack_start(self.detailFrame, False, False, 0)
self.runbox.pack_start(self.widthAndSpeedBox, False, False, 0)
#Finally (for now) the logging buttons
self.loggingButton=gtk.Button('Start Logging')
self.loggingButton.connect('clicked', self.startLogging, '')
self.stopLoggingButton=gtk.Button('Stop Logging')
self.stopLoggingButton.connect('clicked', self.stopLogging, '')
self.newFileButton=gtk.Button('Use New File')
self.newFileButton.connect('clicked', self.newFile, '')
self.mainlogbox.pack_start(self.loggingButton, True, True, 0)
self.mainlogbox.pack_start(self.stopLoggingButton, True, True, 0)
self.mainlogbox.pack_start(self.newFileButton, True, True, 0)
#OK, really finally, I promise. Logging status stuff
self.loggingFrame=gtk.Frame('Logging')
self.loggingLabel=gtk.Label('False')
self.loggingFrame.add(self.loggingLabel)
self.statusbox.pack_start(self.loggingFrame, True, True, 0)
self.timeLabel=gtk.Label('0')
self.timeFrame=gtk.Frame('Run Duration')
self.timeFrame.add(self.timeLabel)
self.statusbox.pack_start(self.timeFrame, True, True, 0)
def initializeLogger(self):
"""Sets up logging"""
self.logger=logging.getLogger('data_logger' + str(self.filenumber))
self.hdlr = logging.FileHandler(self.filename + '-' +str(self.filenumber) + '.txt')
self.formatter = logging.Formatter('%(message)s')
self.hdlr.setFormatter(self.formatter)
self.logger.addHandler(self.hdlr)
self.logger.setLevel(logging.WARNING)
self.basetime=float(time.time())
def scaleConnect(self, widget, data):
if data=='1':
self.scale1.connect(self.locations, self.rate)
#print 'finished connection'
#print self.scale1.connected
if self.scale1.connected:
#print 'succeeded connected if'
self.locations.remove(self.scale1.address)
self.scale1ConnectedLabel.set_text('True')
if not self.monitorThread.started and not self.monitorThread.quit:
self.monitorThread.start()
self.monitorThread.started=True
elif not self.monitorThread.started and self.monitorThread.quit:
self.monitorThread=gtkThread(self)
self.monitorThread.start()
elif data=='2':
self.scale2.connect(self.locations, self.rate)
if self.scale2.connected:
#print 'succeeded connected if'
self.locations.remove(self.scale2.address)
self.scale2ConnectedLabel.set_text('True')
if not self.monitorThread.started and not self.monitorThread.quit:
self.monitorThread.start()
self.monitorThread.started=True
elif not self.monitorThread.started and self.monitorThread.quit:
self.monitorThread=gtkThread(self)
self.monitorThread.start()
def scaleDisconnect(self, widget, data):
if data=='1':
if self.scale1.connected:
self.scale1.serial.close()
self.locations.reverse()
self.locations.append(self.scale1.address)
self.locations.reverse()
self.scale1.address=''
self.scale1ConnectedLabel.set_text('False')
self.scale1.connected=False
self.scale1.weight='No Data'
elif data=='2':
if self.scale2.connected:
self.scale2.serial.close()
self.locations.reverse()
self.locations.append(self.scale2.address)
self.locations.reverse()
self.scale2.address=''
self.scale2ConnectedLabel.set_text('False')
self.scale2.connected=False
self.scale2.weight='No Data'
if (not self.scale1.connected) and (not self.scale2.connected):
self.monitorThread.quit=True
self.monitorThread.started=False
def toggle_event(self, widget, data):
if data=='1':
self.scale1.logging=widget.get_active()
if data=='2':
self.scale2.logging=widget.get_active()
def delete_event(self, widget, data=None):
for i in range(1,3):
self.scaleDisconnect('', str(i))
gtk.main_quit()
def getDetailText(self, widget, data):
self.details=self.detailEntry.get_text()
if not self.details:
self.details='No details were entered for this run.'
def getSpeedText(self, widget, data):
self.speed=self.speedEntry.get_text()
if not self.speed:
self.speed='No speed was entered for this run.'
def getWidthText(self, widget, data):
try:
self.width=float(self.widthEntry.get_text())
except:
self.width=12.0
def startLogging(self, widget, data):
self.logger.setLevel(logging.INFO)
self.getDetailText('','')
self.getSpeedText('','')
self.logger.info('Begin data segment')
self.logger.info(str(datetime.datetime.now()))
self.logger.info(self.details + ' ' + self.speed)
self.basetime=float(time.time())
self.logging=True
self.loggingLabel.set_text("Logging")
def stopLogging(self, widget, data):
self.logger.info('End segment')
self.logger.setLevel(logging.WARNING)
self.logging=False
self.loggingLabel.set_text("Not Logging")
def newFile(self, widget, data):
self.filenumber+=1
self.initializeLogger()
if self.logging==True:
self.logger.setLevel(logging.INFO)
class gtkThread(threading.Thread):
def __init__(self, gui):
super(gtkThread, self).__init__()
self.gui=gui
#self.label=gui.weightLabel
self.quit=False
self.started=False
def update_labels(self):
if self.gui.logging:
self.gui.timeLabel.set_text(str(self.elapsed)[:4])
self.gui.scale1WeightLabel.set_text(str(self.gui.scale1.weight))
self.gui.scale2WeightLabel.set_text(str(self.gui.scale2.weight))
return False
def sendCommand(self, port):
port.flushInput()
port.write('p' + '\r\n')
def weightExtraction(self, port):
weight=''
buf=''
for i in range(9):
try:
buf=port.readline()
except:
_=''
#print str(i) + ' ' + buf
for text in buf.split():
try:
#print buf
weight=float(text)
break
except:
_=''
for i in range(4):
buf=port.readline()
return weight
def run(self):
scale1Weights=[]
scale2Weights=[]
while not self.quit:
#Write print commands to currently logging scales
if (self.gui.scale1.connected):
self.sendCommand(self.gui.scale1.serial)
if (self.gui.scale2.connected):
self.sendCommand(self.gui.scale2.serial)
time.sleep(.05)
if (self.gui.scale1.connected):
weight=self.weightExtraction(self.gui.scale1.serial)
if not weight=='':
scale1Weights.append(weight)
if (self.gui.scale2.connected):
weight=self.weightExtraction(self.gui.scale2.serial)
if not weight=='':
scale2Weights.append(weight)
if(self.gui.scale1.connected and self.gui.scale2.connected):
if ((len(scale1Weights)>4) and (len(scale2Weights)>4)):
self.elapsed=float(time.time())-self.gui.basetime
average1=0
average2=0
for num in scale1Weights:
average1=average1+float(num)
for num in scale2Weights:
average2=average2+float(num)
average1=average1/len(scale1Weights)
average2=average2/len(scale2Weights)
self.gui.scale1.weight=str(average1)
self.gui.scale2.weight=str(average2)
scale1Weights=[]
scale2Weights=[]
if self.gui.logging:
if gui.scale1.logging and gui.scale2.logging:
self.gui.logger.info(str(self.elapsed)[:4] + ' ' + str(average1) + ' ' + str(average2))
elif gui.scale1.logging:
self.gui.logger.info(str(self.elapsed)[:4] + ' ' + str(average1) + ' Not_Logged')
elif self.gui.scale2.logging:
self.gui.logger.info(str(self.elapsed)[:4] + ' NotLogged ' + str(average2))
gobject.idle_add(self.update_labels)
elif(self.gui.scale1.connected):
if(len(scale1Weights)>4):
self.elapsed=float(time.time())-self.gui.basetime
average1=0
for num in scale1Weights:
#print scale1Weights
average1=average1+float(num)
average1=average1/len(scale1Weights)
self.gui.scale1.weight=str(average1)
scale1Weights=[]
scale2Weights=[]
if self.gui.logging:
if self.gui.scale1.logging:
self.gui.logger.info(str(self.elapsed)[:4] + ' ' + str(average1) + ' Not_Logged')
gobject.idle_add(self.update_labels)
elif(self.gui.scale2.connected):
if(len(scale2Weights)>4):
self.elapsed=float(time.time())-self.gui.basetime
average2=0
for num in scale2Weights:
average2=average2+float(num)
average2=average2/len(scale2Weights)
self.gui.scale2.weight=str(average2)
scale1Weights=[]
scale2Weights=[]
if self.gui.logging and self.gui.scale2.logging:
self.gui.logger.info(str(self.elapsed)[:4] + ' NotLogged ' + str(average2))
gobject.idle_add(self.update_labels)
print 'called update labels'
#self.average=average
if __name__=='__main__':
gui=guiFramework(LOCATIONS, RATE)
gtk.main()
|
[
"jake.wright@hp.com"
] |
jake.wright@hp.com
|
b3c148944e5ca13caec1a7e1d8b400d8d64c4adf
|
8569c7da7b9f0c620b4b8d5589965066651b420c
|
/Coursera Course/4 - Manejo de bases de datos con Python/Ejercicios/point.py
|
962da7a941f85b1567f26e16255343583e8922c4
|
[] |
no_license
|
mazzalaialan/Python
|
e467163331bdf0c590337d03f87c9318fc062651
|
9932459d1e68b53c8d28a596f7b7ded0a181c59a
|
refs/heads/master
| 2023-08-21T16:35:24.171067
| 2023-08-10T02:34:02
| 2023-08-10T02:34:02
| 284,333,086
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 384
|
py
|
# -*- coding: utf-8 -*-
import sqlite3
class Point(object):
def __init__(self, x, y):
self.x, self.y = x, y
def __conform__(self, protocol):
if protocol is sqlite3.PrepareProtocol:
return "%f;%f" % (self.x, self.y)
con = sqlite3.connect(":memory:")
cur = con.cursor()
p = Point(5.2, -3.5)
cur.execute("select ?", (p,))
print(cur.fetchone()[0])
|
[
"mazzalaialan@gmail.com"
] |
mazzalaialan@gmail.com
|
b0e4af458f93a6384e0c01890b33a2472bae2c1d
|
1476961981ceeec03e92d02e2c5d7993151f7273
|
/aula 5/exemplo_condicional1.py
|
a17c7fcb0b2a66d28eda6294528a1383084c87a0
|
[] |
no_license
|
jbsribas/Programacao_de_Computadores
|
7a241afb34539e6200920b53910c7520eb54d72b
|
63f645046e504df321b3161dca2e7ce4a0052a3c
|
refs/heads/main
| 2023-07-25T22:53:49.761583
| 2021-08-26T12:46:20
| 2021-08-26T12:46:20
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 435
|
py
|
#exemplo1 condicional
#Faça um programa Python que leia um número qualquer e
#o imprima,caso este seja positivo
numero = float(input("Digite um numero qualquer: "))
if numero>0:
print("o numero ",round(numero), " é positivo")
elif numero ==0:
print("o numero é zero e consideramos neutro")
else:
print("o numero digitado é negativo")
## elif seria :: else:
#if numero == 0:
|
[
"noreply@github.com"
] |
jbsribas.noreply@github.com
|
a5ffc305aa320033a659530ec8735f13a10c4cce
|
547d20b1aed50d867928a2f52abb97a8d0069f8f
|
/ctmentor/w2/[이영선-20190083]190083_실습02-0.py
|
d4e4cdb2133a5c994f35f0128646ecd2eff1cc36
|
[] |
no_license
|
ohgree/cs2019-2
|
0ab4fe27173a2f3f6d3e32f09b8a0d50dae12bb0
|
98a15bf1a05167d7315d73748ba151ff31eafd8d
|
refs/heads/master
| 2022-03-24T15:23:15.244214
| 2019-12-03T16:20:19
| 2019-12-03T16:20:19
| 218,444,857
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 995
|
py
|
print("*20190083 이영선 실습2*")
print()
print("*****문제 1*****")
myMoney=5000
candyPrice=120
howMany=myMoney//candyPrice
print("myMoney:",myMoney,"원")
print("candyPrice:",candyPrice,"원")
print("howMany:",howMany,"개")
print()
print("*****문제 2-1*****")
wic="돈 안정 친구 가족 인정 즐거움 새로움"
print("wic", wic)
print("@@@내게 소중한 것 7가지?")
print(wic)
print()
print("*****문제 2-2*****")
wic1="돈"
wic2="안정"
wic3="친구"
wic4="가족"
wic5="인정"
wic6="즐거움"
wic7="새로움"
print("wic1",wic1)
print("wic2",wic2)
print("wic3",wic3)
print("wic4",wic4)
print("wic5",wic5)
print("wic6",wic6)
print("wic7",wic7)
print("@@@내게 소중한 것 7가지?")
print(wic1,wic2,wic3,wic4,wic5,wic6,wic7)
print()
print("*****문제3*****")
사람=25
치킨=사람*3
생수=사람*1
피자=사람*4
print("치킨:",str(치킨)+"쪽")
print("생수:",str(생수)+"병")
print("피자:",str(피자)+"쪽")
|
[
"ohgree@u.sogang.ac.kr"
] |
ohgree@u.sogang.ac.kr
|
aa564ce694628ed2cd611c0e98f38b8a1eb6b69a
|
0117f2e85e2bbed8713560b0caf3bc57079c4cb8
|
/Python/Guess-the-Number/guess.py
|
2f391ffced79b3513d91d33b788d97f697227060
|
[] |
no_license
|
priyaastha/Hacking-Scripts
|
27b00ea38b498f21ef16a057b762b5f16438d740
|
98e8acb6f9b10695e8bfb77f6d201460ada01810
|
refs/heads/main
| 2023-06-07T21:03:19.016148
| 2021-06-30T04:01:36
| 2021-06-30T04:01:36
| 379,502,215
| 4
| 1
| null | 2021-06-23T06:29:30
| 2021-06-23T06:29:30
| null |
UTF-8
|
Python
| false
| false
| 1,280
|
py
|
#library for getting a random number
import random
import math
num = random.randint(0, 20)
num1 =random.randint(3,6) # for number of chances. i am taking the number which should be greater than or equal to 3 to make user friendly game
print("--Welcome to Guess The Number Game--")
print("\n you have " + str(num1) + " chances to guess the number")
count = 0
# Iterate using while loop
while count < num1:
count += 1
res = int(input("Please input a number between 0 and 20:"))
try:
val = int(res)
except ValueError:
print("This is not a valid integer. Please try again!")
continue
# Check whether the user input is high or low than the random number generated.
if val < num:
print("This is lower than actual number. Please try again!")
elif val > num:
print("This is higher than actual number. Please try again!")
else:
# Final result will be printed with this message as the user input matches the random number generated
print("Hurray! you won, this is the correct number🥳")
exit(0)
guessCheck = "correct"
if (count >= num1):
print("the number is "+ str(num)+" and you loose the game ☹️")
print("Thank you for playing Guess The Number. See you again!")
|
[
"dhruvvarshney2612@gmail.com"
] |
dhruvvarshney2612@gmail.com
|
7951cea3e5fdeb6339e5c56769a030ea74adfe8b
|
45011a61908bf9ce92e948bd186d4330b8af36fc
|
/h/migrations/versions/ddb5f0baa429_add_nipsa_column_to_user_table.py
|
8754ad4aba2101e0252c9790ba0327f7c3a9f688
|
[
"MIT",
"BSD-2-Clause",
"BSD-3-Clause",
"BSD-2-Clause-Views"
] |
permissive
|
project-star/h
|
4e6e0ab244ae9df4d473c3c3bc0d21edce47c30f
|
fd1decafdce981b681ef3bd59e001b1284498dae
|
refs/heads/master
| 2021-07-16T06:56:34.957971
| 2016-11-21T21:18:33
| 2016-11-21T21:18:33
| 73,963,922
| 1
| 1
|
NOASSERTION
| 2020-09-03T23:19:11
| 2016-11-16T21:44:01
|
Python
|
UTF-8
|
Python
| false
| false
| 421
|
py
|
"""
Add NIPSA column to user table
Revision ID: ddb5f0baa429
Revises: 6d9257ad610d
Create Date: 2016-09-16 16:58:03.585538
"""
from __future__ import unicode_literals
import sqlalchemy as sa
from alembic import op
revision = 'ddb5f0baa429'
down_revision = '6d9257ad610d'
def upgrade():
op.add_column('user', sa.Column('nipsa', sa.Boolean, nullable=True))
def downgrade():
op.drop_column('user', 'nipsa')
|
[
"nick@whiteink.com"
] |
nick@whiteink.com
|
15d51d606e3fb1d1bef1050fd498897acddc03e2
|
95f950bfeba765d308eb55036fa523ba6b147e8e
|
/PS4-Word_Game/ps4b.py
|
c897f654f87d2daa0db7b789084b2908c0fcd801
|
[
"Giftware"
] |
permissive
|
rayaniruddho/Python-programs
|
18e1ddf032d3796fe4c91f07a00d04d567cfe0e0
|
69379e4d3698712222946a3b286be2c555c8be67
|
refs/heads/master
| 2021-01-01T20:03:42.497513
| 2017-07-30T19:59:38
| 2017-07-30T19:59:38
| 98,756,881
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,153
|
py
|
from ps4a import *
import time
#
#
# Computer chooses a word
#
#
def compChooseWord(hand, wordList, n):
"""
Given a hand and a wordList, find the word that gives
the maximum value score, and return it.
This word should be calculated by considering all the words
in the wordList.
If no words in the wordList can be made from the hand, return None.
hand: dictionary (string -> int)
wordList: list (string)
n: integer (HAND_SIZE; i.e., hand size required for additional points)
returns: string or None
"""
# Create a new variable to store the maximum score seen so far (initially 0)
bestScore = 0
# Create a new variable to store the best word seen so far (initially None)
bestWord = None
# For each word in the wordList
for word in wordList:
# If you can construct the word from your hand
if isValidWord(word, hand, wordList):
# find out how much making that word is worth
score = getWordScore(word, n)
# If the score for that word is higher than your best score
if (score > bestScore):
# update your best score, and best word accordingly
bestScore = score
bestWord = word
# return the best word you found.
return bestWord
#
# Computer plays a hand
#
def compPlayHand(hand, wordList, n):
"""
Allows the computer to play the given hand, following the same procedure
as playHand, except instead of the user choosing a word, the computer
chooses it.
1) The hand is displayed.
2) The computer chooses a word.
3) After every valid word: the word and the score for that word is
displayed, the remaining letters in the hand are displayed, and the
computer chooses another word.
4) The sum of the word scores is displayed when the hand finishes.
5) The hand finishes when the computer has exhausted its possible
choices (i.e. compChooseWord returns None).
hand: dictionary (string -> int)
wordList: list (string)
n: integer (HAND_SIZE; i.e., hand size required for additional points)
"""
# Keep track of the total score
totalScore = 0
# As long as there are still letters left in the hand:
while (calculateHandlen(hand) > 0) :
# Display the hand
print("Current Hand: ", end=' ')
displayHand(hand)
# computer's word
word = compChooseWord(hand, wordList, n)
# If the input is a single period:
if word == None:
# End the game (break out of the loop)
break
# Otherwise (the input is not a single period):
else :
# If the word is not valid:
if (not isValidWord(word, hand, wordList)) :
print('This is a terrible error! I need to check my own code!')
break
# Otherwise (the word is valid):
else :
# Tell the user how many points the word earned, and the updated total score
score = getWordScore(word, n)
totalScore += score
print('"' + word + '" earned ' + str(score) + ' points. Total: ' + str(totalScore) + ' points')
# Update hand and show the updated hand to the user
hand = updateHand(hand, word)
print()
# Game is over (user entered a '.' or ran out of letters), so tell user the total score
print('Total score: ' + str(totalScore) + ' points.')
#
# Problem #6: Playing a game
#
#
def playGame(wordList):
"""
Allow the user to play an arbitrary number of hands.
1) Asks the user to input 'n' or 'r' or 'e'.
* If the user inputs 'e', immediately exit the game.
* If the user inputs anything that's not 'n', 'r', or 'e', keep asking them again.
2) Asks the user to input a 'u' or a 'c'.
* If the user inputs anything that's not 'c' or 'u', keep asking them again.
3) Switch functionality based on the above choices:
* If the user inputted 'n', play a new (random) hand.
* Else, if the user inputted 'r', play the last hand again.
* If the user inputted 'u', let the user play the game
with the selected hand, using playHand.
* If the user inputted 'c', let the computer play the
game with the selected hand, using compPlayHand.
4) After the computer or user has played the hand, repeat from step 1
wordList: list (string)
"""
# TO DO... <-- Remove this comment when you code this function
#print("playGame not yet implemented.") # <-- Remove this when you code this function
def displayHand2(hand):
displayed = ''
for letter in hand.keys():
for j in range(hand[letter]):
displayed += letter + ' '
return displayed
hand = False
while True:
ans = input("Enter n to deal a new hand, r to replay the last hand, or e to end game: ")
if ans == "n":
hand = dealHand(HAND_SIZE)
tester1 = True
while tester1:
ans2 = input("Enter u to have yourself play, c to have the computer play: ")
if ans2 == "u":
playHand(hand, wordList, HAND_SIZE)
tester1 = False
elif ans2 == "c":
compPlayHand(hand, wordList, HAND_SIZE)
tester1 = False
else:
print ("Invalid command.")
elif ans == "r":
if hand:
ans2 = input("Enter u to have yourself play, c to have the computer play: ")
if ans2 == "u":
playHand(hand, wordList, HAND_SIZE)
elif ans2 == "c":
compPlayHand(hand, wordList, HAND_SIZE)
else:
print ("Invalid command.")
else:
print ("You have not played a hand yet. Please play a new hand first!")
elif ans == "e":
break
else:
print ("Invalid command.")
#
# Build data structures used for entire session and play game
#
if __name__ == '__main__':
wordList = loadWords()
playGame(wordList)
|
[
"rayaniruddho92@gmail.com"
] |
rayaniruddho92@gmail.com
|
a7b416f01fb7a0717937fcee95b9bc510385eac9
|
40d97ac72bee46d3a1c25f24952972ae5dd77c45
|
/dev/postman/testIt.py
|
fecfb8c2f3b9cf880eab5778168b94c1d2e8d748
|
[
"MIT"
] |
permissive
|
ReidBix/cs3240-f15-team19
|
7d3013ce5b614230a8613dafa648e5e521e28ac9
|
b0bd9bde4056711dbe4a8208a917682dcc5c113c
|
refs/heads/master
| 2021-01-15T09:19:10.057793
| 2015-12-08T04:52:19
| 2015-12-08T04:52:19
| 44,134,446
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,158
|
py
|
__author__ = 'Mike'
from Crypto.PublicKey import RSA
from Crypto import Random
def encode(str1, public_key):
return public_key.encrypt(str1.encode(),32)[0]
def decrypt_string(enc_str1, key):
b_str = key.decrypt(enc_str1)
return b_str.decode()
def gen_keys():
random_gen = Random.new().read
return RSA.generate(1024, random_gen)
def enc_file(fname, public_key):
with open(fname, 'r+') as orig_file:
with open(fname + '.enc', 'wb') as enc_file:
enc_file.write(encode(orig_file.read(),public_key))
enc_file.closed
orig_file.closed
return True
def dec_file(fname, priv_key):
with open(fname, 'rb+') as enc_file:
fname2 = 'dec_' + fname[0:len(fname)-3]
with open(fname2, 'w') as new_file:
dec_string = decrypt_string(enc_file.read(), priv_key)
new_file.write(dec_string)
new_file.closed
enc_file.closed
return True
random_generator = Random.new().read
key = RSA.generate(1024, random_generator)
public_key = key.publickey()
encoded = encode("this is a test", public_key)
print(encoded)
decoded = key.decrypt(encoded)
print(decoded)
|
[
"root@d-172-25-99-47.bootp.virginia.edu"
] |
root@d-172-25-99-47.bootp.virginia.edu
|
3e69fd4e9e5abc37a755cfeaa1b81dcbbaba6418
|
6555eff49ec184c52ec41a60552648be3593fc18
|
/tools/db copy 2.py
|
7f649ee449d699cf780970d4768496d245a3dd8f
|
[] |
no_license
|
ashmit-khadka/pnsuk-react
|
174244ed5f26f0af870afaded6bc08d781d3f8ff
|
83e37e385c3fef71719101c153b0d87c504493f6
|
refs/heads/master
| 2022-11-30T13:24:24.037414
| 2020-08-10T18:53:08
| 2020-08-10T18:53:08
| 286,495,076
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 720
|
py
|
import sqlite3
from pymongo import MongoClient
import datetime
cluster = MongoClient("mongodb+srv://admin:54321@clusterpv-stcgg.gcp.mongodb.net/test?retryWrites=true&w=majority")
database = cluster['pns']
collection = database['minutes']
collection.insert_many([
{
'description': '',
'date': datetime.datetime(2019, 9, 20),
'document': '/minute_docs/Meeting_20_October_2019.pdf',
},
{
'description': '',
'date': datetime.datetime(2020, 2, 24),
'document': '/minute_docs/Meeting_24_Febuary_2020.pdf',
},
{
'description': '',
'date': datetime.datetime(2020, 7, 5),
'document': '/minute_docs/Meeting_05_July_2020.pdf',
}
])
|
[
"a0khadka@gmail.com"
] |
a0khadka@gmail.com
|
428f27ec7bc13b9e7987cc9f543d5146f95011c2
|
d60657b373073bbdc17c752a580d8bd728b87852
|
/src/nfv_test_api/v2/services/namespace.py
|
811d4dff38a2daf527d278cf57e9528eebf48d30
|
[] |
no_license
|
inmanta/nfv-test-api
|
a54ff7517ea1846f85a3a8d948aac9b9ccb92f7b
|
2b4cb1c7e1217f422dcd72784f0920b535518d84
|
refs/heads/master
| 2023-08-31T08:14:41.580019
| 2023-08-28T13:41:49
| 2023-08-28T13:41:49
| 247,904,341
| 2
| 1
| null | 2023-09-08T15:49:43
| 2020-03-17T07:20:12
|
Python
|
UTF-8
|
Python
| false
| false
| 5,020
|
py
|
"""
Copyright 2021 Inmanta
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json
import logging
from typing import Any, Dict, List, Optional, Union
import pydantic
from pydantic import ValidationError
from werkzeug.exceptions import Conflict, NotFound # type: ignore
from nfv_test_api.host import Host
from nfv_test_api.v2.data.common import CommandStatus
from nfv_test_api.v2.data.namespace import Namespace, NamespaceCreate, NamespaceUpdate
from nfv_test_api.v2.services.base_service import BaseService, K
LOGGER = logging.getLogger(__name__)
class NamespaceService(BaseService[Namespace, NamespaceCreate, NamespaceUpdate]):
def __init__(self, host: Host) -> None:
super().__init__(host)
def get_all_raw(self) -> List[Dict[str, Any]]:
stdout, stderr = self.host.exec(["ip", "-j", "-details", "netns", "list-id"])
if stderr:
raise RuntimeError(f"Failed to run netns list-id command on host: {stderr}")
raw_namespaces = json.loads(stdout or "[]")
return pydantic.parse_obj_as(List[Dict[str, Any]], raw_namespaces)
def get_all(self) -> List[Namespace]:
namespaces = []
for raw_namespace in self.get_all_raw():
try:
namespace = Namespace(**raw_namespace)
namespace.attach_host(self.host)
namespaces.append(namespace)
except ValidationError as e:
LOGGER.error(f"Failed to parse a namespace: {raw_namespace}\n" f"{str(e)}")
return namespaces
def get_one_raw(self, identifier: str) -> Optional[Dict[str, Any]]:
raw_namespaces_list = [
raw_namespace for raw_namespace in self.get_all_raw() if raw_namespace.get("name", "") == identifier
]
if not raw_namespaces_list:
return None
if len(raw_namespaces_list) > 1:
LOGGER.error(f"Expected to get one namespace here but got multiple ones: {raw_namespaces_list}")
return raw_namespaces_list[0]
def get_one_or_default(self, identifier: str, default: Optional[K] = None) -> Union[Namespace, None, K]:
raw_namespace = self.get_one_raw(identifier)
if raw_namespace is None:
return default
namespace = Namespace(**raw_namespace)
namespace.attach_host(self.host)
return namespace
def get_one(self, identifier: str) -> Namespace:
namespace = self.get_one_or_default(identifier)
if not namespace:
raise NotFound(f"Could not find any namespace with name {identifier}")
return namespace
def create(self, o: NamespaceCreate) -> Namespace:
existing_namespace = self.get_one_or_default(o.name)
if existing_namespace:
raise Conflict("A namespace with this name already exists")
if o.ns_id is not None:
if o.ns_id in [ns.ns_id for ns in self.get_all()]:
raise Conflict("A namespace with this id already exists")
_, stderr = self.host.exec(["ip", "netns", "add", o.name])
if stderr:
raise RuntimeError(f"Failed to create namespace: {stderr}")
_, stderr = self.host.exec(["ip", "netns", "set", o.name, str(o.ns_id or "auto")])
if stderr:
raise RuntimeError(f"Failed to set namespace id: {stderr}")
existing_namespace = self.get_one_or_default(o.name)
if existing_namespace:
return existing_namespace
raise RuntimeError("The namespace should have been created but can not be found")
def update(self, identifier: str, o: NamespaceUpdate) -> Namespace:
raise NotImplementedError("Updating namespaces is not supported")
def delete(self, identifier: str) -> None:
existing_namespace = self.get_one_or_default(identifier)
if not existing_namespace:
return
_, stderr = self.host.exec(["ip", "netns", "del", identifier])
if stderr:
raise RuntimeError(f"Failed to delete namespace: {stderr}")
existing_namespace = self.get_one_or_default(identifier)
if not existing_namespace:
return
raise RuntimeError("The namespace should have been deleted but can still be found")
def status(self) -> CommandStatus:
command = ["ip", "-details", "netns", "list-id"]
stdout, stderr = self.host.exec(command)
return CommandStatus(
command=command,
stdout=stdout,
stderr=stderr,
)
|
[
"noreply@github.com"
] |
inmanta.noreply@github.com
|
8eea98aae837d92aa3359eb2d80d6cf25c8fe0fb
|
fee347a9d4cef1ca3a60b72d14c107aef113d2fb
|
/DOS/cfdiengine/docmaker/builders/ncrxml.py
|
04519a1b9158ac36ea13c623ca447f892d0099d0
|
[] |
no_license
|
Telematica/erp
|
7517e3fd3c53b359bcb41d3616928407cd343b08
|
3a0d9ed195e09adea01e944016a7e1cc82666e02
|
refs/heads/main
| 2023-02-19T21:51:14.011725
| 2021-01-23T17:41:33
| 2021-01-23T17:41:33
| 323,704,516
| 1
| 0
| null | 2020-12-22T18:32:00
| 2020-12-22T18:32:00
| null |
UTF-8
|
Python
| false
| false
| 20,169
|
py
|
import unidecode
import math
import os
import base64
import datetime
import tempfile
import pyxb
from decimal import Decimal
from misc.helperstr import HelperStr
from docmaker.error import DocBuilderStepError
from misc.tricks import truncate
from docmaker.gen import BuilderGen
from sat.v33 import Comprobante
from sat.requirement import writedom_cfdi, sign_cfdi
from sat.artifacts import CfdiType
impt_class='NcrXml'
class NcrXml(BuilderGen):
__NDECIMALS = 2
__MAKEUP_PROPOS = CfdiType.NCR
__XSLT_NCR = 'cadenaoriginal_3_3.xslt'
def __init__(self, logger):
super().__init__(logger)
def __narf(self, v):
return Decimal(truncate(float(v), self.__NDECIMALS, True))
def __calc_imp_tax(self, imp, tasa):
return self.__narf(Decimal(imp) * Decimal(tasa))
def __calc_base(self, imp, tasa):
return self.__narf(
Decimal(imp) + Decimal( self.__calc_imp_tax(imp, tasa) )
)
def __abs_importe(self, a):
return self.__narf(
Decimal(str(a['IMPORTE'])) - Decimal(str(a['DESCTO']))
)
def __place_tasa(self, x):
"""
smart method to deal with a tasa less
than zero or greater than zero
"""
try:
return x * 10 ** -2 if math.log10(x) >= 0 else x
except ValueError:
# Silent the error and just return value passed
return x
def __q_conceptos(self, conn, nc_id):
"""
Consulta los conceptos de la nc en dbms
"""
q = """SELECT '84111506'::character varying AS clave_prod,
'ACT'::character varying AS clave_unidad,
'ACT'::character varying AS unidad,
'1'::double precision AS cantidad,
'0'::character varying AS no_identificacion,
'Servicios de facturacion'::character varying AS descripcion,
NC.subtotal as valor_unitario,
NC.subtotal as importe,
'0'::double precision AS descto,
'0'::double precision AS tasa_ieps,
'0'::integer as ieps_id,
NC.valor_impuesto AS tasa_impuesto,
GI.id AS impto_id
FROM fac_nota_credito AS NC
JOIN gral_suc AS SUC on NC.gral_suc_id = SUC.id
JOIN gral_imptos AS GI ON GI.id = SUC.gral_impto_id
WHERE NC.id = """
rowset = []
for row in self.pg_query(conn, "{0}{1}".format(q, nc_id)):
rowset.append({
'PRODSERV': row['clave_prod'],
'UNIDAD': row['clave_unidad'],
'CANTIDAD': row['cantidad'],
'SKU': row['no_identificacion'],
'DESCRIPCION': row['descripcion'],
'PRECIO_UNITARIO': self.__narf(row['valor_unitario']),
'IMPORTE': self.__narf(row['importe']),
'DESCTO': self.__narf(row['descto']),
# From this point onwards tax related elements
'TASA_IEPS': row['tasa_ieps'],
'TASA_IMPUESTO': row['tasa_impuesto'],
'IEPS_ID': row['ieps_id'],
'IMPUESTO_ID': row['impto_id']
})
return rowset
def __q_no_certificado(self, conn, usr_id):
"""
Consulta el numero de certificado en dbms
"""
q = """select CFDI_CONF.numero_certificado
FROM gral_suc AS SUC
LEFT JOIN gral_usr_suc AS USR_SUC ON USR_SUC.gral_suc_id = SUC.id
LEFT JOIN fac_cfds_conf AS CFDI_CONF ON CFDI_CONF.gral_suc_id = SUC.id
WHERE USR_SUC.gral_usr_id = """
for row in self.pg_query(conn, "{0}{1}".format(q, usr_id)):
# Just taking first row of query result
return row['numero_certificado']
def __q_serie_folio(self, conn, usr_id):
"""
Consulta la serie y folio a usar en dbms
"""
q = """select fac_cfds_conf_folios.serie as serie,
fac_cfds_conf_folios.folio_actual::character varying as folio
FROM gral_suc AS SUC
LEFT JOIN fac_cfds_conf ON fac_cfds_conf.gral_suc_id = SUC.id
LEFT JOIN fac_cfds_conf_folios ON fac_cfds_conf_folios.fac_cfds_conf_id = fac_cfds_conf.id
LEFT JOIN gral_usr_suc AS USR_SUC ON USR_SUC.gral_suc_id = SUC.id
WHERE fac_cfds_conf_folios.proposito = 'NCR'
AND USR_SUC.gral_usr_id = """
for row in self.pg_query(conn, "{0}{1}".format(q, usr_id)):
# Just taking first row of query result
return { 'SERIE': row['serie'], 'FOLIO': row['folio'] }
def __q_forma_pago(self, conn, prefact_id):
"""
Consulta la forma de pago y numero de cuenta
"""
return { 'CLAVE': '99', 'CUENTA': None }
def __q_moneda(self, conn, nc_id):
"""
Consulta la moneda de la nc en dbms
"""
q = """SELECT
upper(gral_mon.iso_4217) AS moneda_iso_4217,
upper(gral_mon.simbolo) AS moneda_simbolo,
fac_nota_credito.tipo_cambio
FROM fac_nota_credito
JOIN gral_mon ON gral_mon.id = fac_nota_credito.moneda_id
WHERE fac_nota_credito.id = """
for row in self.pg_query(conn, "{0}{1}".format(q, nc_id)):
# Just taking first row of query result
return {
'ISO_4217': row['moneda_iso_4217'],
'SIMBOLO': row['moneda_simbolo'],
'TIPO_DE_CAMBIO': row['tipo_cambio']
}
def __q_receptor(self, conn, nc_id):
"""
Consulta el cliente de la nc en dbms
"""
q = """select cxc_clie.rfc as rfc,
cxc_clie.razon_social as razon_social
FROM fac_nota_credito
LEFT JOIN cxc_clie ON cxc_clie.id = fac_nota_credito.cxc_clie_id
WHERE fac_nota_credito.id = """
for row in self.pg_query(conn, "{0}{1}".format(q, nc_id)):
# Just taking first row of query result
return {
'RFC': row['rfc'],
'RAZON_SOCIAL': unidecode.unidecode(row['razon_social']),
'USO_CFDI': 'P01'
}
def __q_emisor(self, conn, usr_id):
"""
Consulta el emisor en dbms
"""
q = """select upper(EMP.rfc) as rfc, upper(EMP.titulo) as titulo,
upper(REG.numero_control) as numero_control
FROM gral_suc AS SUC
LEFT JOIN gral_usr_suc AS USR_SUC ON USR_SUC.gral_suc_id = SUC.id
LEFT JOIN gral_emp AS EMP ON EMP.id = SUC.empresa_id
LEFT JOIN cfdi_regimenes AS REG ON REG.numero_control = EMP.regimen_fiscal
WHERE USR_SUC.gral_usr_id = """
for row in self.pg_query(conn, "{0}{1}".format(q, usr_id)):
# Just taking first row of query result
return {
'RFC': row['rfc'],
'RAZON_SOCIAL': unidecode.unidecode(row['titulo']),
'REGIMEN_FISCAL': row['numero_control']
}
def __q_lugar_expedicion(self, conn, usr_id):
"""
Consulta el lugar de expedicion en dbms
"""
q = """select SUC.cp
FROM gral_suc AS SUC
LEFT JOIN gral_usr_suc as USR_SUC ON USR_SUC.gral_suc_id=SUC.id
WHERE USR_SUC.gral_usr_id="""
for row in self.pg_query(conn, "{0}{1}".format(q, usr_id)):
# Just taking first row of query result
return row['cp']
def __q_sign_params(self, conn, usr_id):
"""
Consulta parametros requeridos para firmado cfdi
"""
q = """SELECT fac_cfds_conf.archivo_llave as pk
FROM gral_suc AS SUC
LEFT JOIN gral_usr_suc AS USR_SUC ON USR_SUC.gral_suc_id = SUC.id
LEFT JOIN fac_cfds_conf ON fac_cfds_conf.gral_suc_id = SUC.id
WHERE USR_SUC.gral_usr_id="""
for row in self.pg_query(conn, "{0}{1}".format(q, usr_id)):
# Just taking first row of query result
return {
'PKNAME': row['pk']
}
def __q_cert_file(self, conn, usr_id):
"""
Consulta el certificado que usa el usuario en dbms
"""
q = """select fac_cfds_conf.archivo_certificado as cert_file
FROM gral_suc AS SUC
LEFT JOIN gral_usr_suc ON gral_usr_suc.gral_suc_id = SUC.id
LEFT JOIN fac_cfds_conf ON fac_cfds_conf.gral_suc_id = SUC.id
WHERE gral_usr_suc.gral_usr_id="""
for row in self.pg_query(conn, "{0}{1}".format(q, usr_id)):
# Just taking first row of query result
return row['cert_file']
def __q_ieps(self, conn, usr_id):
"""
Total de IEPS activos en sucursal
"""
q = """SELECT gral_ieps.id as id, cci.clave as clave,
gral_ieps.titulo as desc, gral_ieps.tasa as tasa
FROM gral_suc AS SUC
LEFT JOIN gral_usr_suc AS USR_SUC ON USR_SUC.gral_suc_id = SUC.id
LEFT JOIN gral_emp AS EMP ON EMP.id = SUC.empresa_id
LEFT JOIN gral_ieps ON gral_ieps.gral_emp_id = EMP.id
LEFT JOIN cfdi_c_impuesto AS cci ON cci.id = gral_ieps.cfdi_c_impuesto
WHERE gral_ieps.borrado_logico=false AND
USR_SUC.gral_usr_id = """
rowset = []
for row in self.pg_query(conn, "{0}{1}".format(q, usr_id)):
rowset.append({
'ID' : row['id'],
'CLAVE': row['clave'],
'DESC': row['desc'],
'TASA': row['tasa']
})
return rowset
def __q_ivas(self, conn, usr_id):
"""
Total de IVA activos en sucursal
"""
q = """SELECT gral_imptos.id as id,
gral_imptos.descripcion AS titulo,
gral_imptos.iva_1 as tasa
FROM gral_suc AS SUC
JOIN gral_imptos ON gral_imptos.id = SUC.gral_impto_id
LEFT JOIN gral_usr_suc AS USR_SUC ON USR_SUC.gral_suc_id = SUC.id
WHERE gral_imptos.borrado_logico = false AND
USR_SUC.gral_usr_id = """
rowset = []
for row in self.pg_query(conn, "{0}{1}".format(q, usr_id)):
rowset.append({
'ID' : row['id'],
'DESC': row['titulo'],
'TASA': row['tasa']
})
return rowset
def __calc_traslados(self, l_items, l_ieps, l_iva):
"""
Calcula los impuestos trasladados
"""
traslados = []
for tax in l_iva:
# next two variables shall get lastest value of loop
# It's not me. It is the Noe approach :|
impto_id = 0
tasa = 0
importe_sum = Decimal(0)
for item in l_items:
if tax['ID'] == item['IMPUESTO_ID']:
impto_id = item['IMPUESTO_ID']
tasa = item['TASA_IMPUESTO']
importe_sum += self.__narf(self.__calc_imp_tax(
self.__calc_base(self.__abs_importe(item), self.__place_tasa(item['TASA_IEPS'])),
self.__place_tasa(item['TASA_IMPUESTO'])
))
if impto_id > 0:
traslados.append({
'impuesto': 'IVA',
'clave': '002',
'importe': truncate(float(importe_sum), self.__NDECIMALS),
'tasa': tasa
})
for tax in l_ieps:
# next two variables shall get lastest value of loop
# It's not me. It is the Noe approach :|
impto_id = 0
tasa = 0
importe_sum = Decimal(0)
for item in l_items:
if tax['ID'] == item['IEPS_ID']:
impto_id = item['IEPS_ID']
tasa = item['TASA_IEPS']
importe_sum += self.__narf(self.__calc_imp_tax(
self.__abs_importe(item), self.__place_tasa(item['TASA_IEPS'])
))
if impto_id > 0:
traslados.append({
'impuesto': 'IEPS',
'clave': '003',
'importe': truncate(float(importe_sum), self.__NDECIMALS),
'tasa': tasa
})
return traslados
def __calc_totales(self, l_items):
totales = {
'MONTO_TOTAL': Decimal('0'),
'IMPORTE_SUM': Decimal('0'),
'IMPORTE_SUM_IMPUESTO': Decimal('0'),
'IMPORTE_SUM_IEPS': Decimal('0'),
'DESCTO_SUM': Decimal('0'),
}
for item in l_items:
totales['IMPORTE_SUM'] += self.__narf(item['IMPORTE'])
totales['DESCTO_SUM'] += self.__narf(item['DESCTO'])
totales['IMPORTE_SUM_IEPS'] += self.__narf(
self.__calc_imp_tax(
self.__abs_importe(item),
self.__place_tasa(item['TASA_IEPS'])
)
)
totales['IMPORTE_SUM_IMPUESTO'] += self.__narf(
self.__calc_imp_tax(
self.__calc_base(self.__abs_importe(item), self.__place_tasa(item['TASA_IEPS'])),
self.__place_tasa(item['TASA_IMPUESTO'])
)
)
totales['MONTO_TOTAL'] = self.__narf(totales['IMPORTE_SUM']) - self.__narf(totales['DESCTO_SUM']) + self.__narf(totales['IMPORTE_SUM_IEPS']) + self.__narf(totales['IMPORTE_SUM_IMPUESTO'])
return {k: self.__narf(v) for k, v in totales.items()}
def data_acq(self, conn, d_rdirs, **kwargs):
usr_id = kwargs.get('usr_id', None)
if usr_id is None:
raise DocBuilderStepError("user id not fed")
ed = self.__q_emisor(conn, usr_id)
sp = self.__q_sign_params(conn, usr_id)
# dirs with full emisor rfc path
sslrfc_dir = os.path.join(d_rdirs['ssl'], ed['RFC'])
cert_file = os.path.join(
sslrfc_dir, self.__q_cert_file(conn, usr_id))
certb64 = None
with open(cert_file, 'rb') as f:
content = f.read()
certb64 = base64.b64encode(content).decode('ascii')
nc_id = kwargs.get('nc_id', None)
if nc_id is None:
raise DocBuilderStepError("nc id not fed")
conceptos = self.__q_conceptos(conn, nc_id)
traslados = self.__calc_traslados(conceptos,
self.__q_ieps(conn, usr_id), self.__q_ivas(conn, usr_id))
return {
'TIME_STAMP': '{0:%Y-%m-%dT%H:%M:%S}'.format(datetime.datetime.now()),
'CONTROL': self.__q_serie_folio(conn, usr_id),
'CERT_B64': certb64,
'KEY_PRIVATE': os.path.join(sslrfc_dir, sp['PKNAME']),
'XSLT_SCRIPT': os.path.join(d_rdirs['cfdi_xslt'], self.__XSLT_NCR),
'EMISOR': ed,
'RECEPTOR': self.__q_receptor(conn, nc_id),
'MONEDA': self.__q_moneda(conn, nc_id),
'FORMA_PAGO': self.__q_forma_pago(conn, nc_id),
'NUMERO_CERTIFICADO': self.__q_no_certificado(conn, usr_id),
'LUGAR_EXPEDICION': self.__q_lugar_expedicion(conn, usr_id),
'CONCEPTOS': conceptos,
'TRASLADOS': traslados,
'TOTALES': self.__calc_totales(conceptos)
}
def format_wrt(self, output_file, dat):
self.logger.debug('dumping contents of dat: {}'.format(repr(dat)))
def save(xo):
tmp_dir = tempfile.gettempdir()
f = os.path.join(tmp_dir, HelperStr.random_str())
writedom_cfdi(xo.toDOM(), self.__MAKEUP_PROPOS, f)
return f
def wa(tf):
"""
The sundry work arounds to apply
"""
HelperStr.edit_pattern('TipoCambio="1.0"', 'TipoCambio="1"', tf)
HelperStr.edit_pattern(
'(Importe=)"([0-9]*(\.[0-9]{0,1})?)"',
lambda x: 'Importe="%.2f"' % (float(x.group(2)),), tf
)
def wrap_up(tf, of):
with open(of, 'w', encoding="utf-8") as a:
a.write(
sign_cfdi(
dat['KEY_PRIVATE'],
dat['XSLT_SCRIPT'],
tf
)
)
os.remove(tf)
c = Comprobante()
c.Version = '3.3'
c.MetodoPago = "PPD" # optional and hardcode until ui can suply such value
c.LugarExpedicion = dat['LUGAR_EXPEDICION']
c.Serie = dat['CONTROL']['SERIE'] # optional
c.Folio = dat['CONTROL']['FOLIO'] # optional
c.Fecha = dat['TIME_STAMP']
c.Sello = '__DIGITAL_SIGN_HERE__'
c.FormaPago = dat["FORMA_PAGO"]['CLAVE'] # optional
c.NoCertificado = dat['NUMERO_CERTIFICADO']
c.Certificado = dat['CERT_B64']
c.TipoDeComprobante = 'E'
c.SubTotal = dat['TOTALES']['IMPORTE_SUM']
c.Total = dat['TOTALES']['MONTO_TOTAL']
if dat['MONEDA']['ISO_4217'] == 'MXN':
c.TipoCambio = 1
else:
# optional (requerido en ciertos casos)
c.TipoCambio = truncate(dat['MONEDA']['TIPO_DE_CAMBIO'], self.__NDECIMALS)
c.Moneda = dat['MONEDA']['ISO_4217']
c.Emisor = pyxb.BIND()
c.Emisor.Nombre = dat['EMISOR']['RAZON_SOCIAL'] # optional
c.Emisor.Rfc = dat['EMISOR']['RFC']
c.Emisor.RegimenFiscal = dat['EMISOR']['REGIMEN_FISCAL']
c.Receptor = pyxb.BIND()
c.Receptor.Nombre = dat['RECEPTOR']['RAZON_SOCIAL'] # optional
c.Receptor.Rfc = dat['RECEPTOR']['RFC']
c.Receptor.UsoCFDI = dat['RECEPTOR']['USO_CFDI']
c.Conceptos = pyxb.BIND()
for i in dat['CONCEPTOS']:
c.Conceptos.append(pyxb.BIND(
Cantidad=i['CANTIDAD'],
ClaveUnidad=i['UNIDAD'],
ClaveProdServ=i['PRODSERV'],
Descripcion=i['DESCRIPCION'],
ValorUnitario=i['PRECIO_UNITARIO'],
NoIdentificacion=i['SKU'], # optional
Importe=i['IMPORTE'],
Impuestos=self.__tag_impuestos(i) if i['TASA_IMPUESTO'] > 0 else None
))
def traslado(c, tc, imp):
return pyxb.BIND(TipoFactor='Tasa',
Impuesto=c, TasaOCuota=tc, Importe=imp)
def zigma(v):
z = Decimal(0)
for w in v:
z += self.__narf(w['importe'])
return z
c.Impuestos = pyxb.BIND(
TotalImpuestosRetenidos=0,
TotalImpuestosTrasladados=zigma(dat['TRASLADOS']),
Traslados=pyxb.BIND(
*tuple([traslado(t['clave'], self.__place_tasa(t['tasa']), t['importe']) for t in dat['TRASLADOS']])
)
)
tmp_file = save(c)
wa(tmp_file)
wrap_up(tmp_file, output_file)
def data_rel(self, dat):
pass
def __tag_traslados(self, i):
def traslado(b, c, tc, imp):
return pyxb.BIND(
Base=b, TipoFactor='Tasa',
Impuesto=c, TasaOCuota=tc, Importe=imp)
taxes = []
if i['TASA_IMPUESTO'] > 0:
base = self.__calc_base(self.__abs_importe(i), self.__place_tasa(i['TASA_IEPS']))
taxes.append(
traslado(
base, "002", self.__place_tasa(i['TASA_IMPUESTO']), self.__calc_imp_tax(
base, self.__place_tasa(i['TASA_IMPUESTO'])
)
)
)
if i['TASA_IEPS'] > 0:
taxes.append(
traslado(
i['IMPORTE'], "003", self.__place_tasa(i['TASA_IEPS']), self.__calc_imp_tax(
i['IMPORTE'], self.__place_tasa(i['TASA_IEPS'])
)
)
)
return pyxb.BIND(*tuple(taxes))
def __tag_impuestos(self, i):
notaxes = True
kwargs = {}
if i['TASA_IMPUESTO'] > 0 or i['TASA_IEPS'] > 0:
notaxes = False
kwargs['Traslados'] = self.__tag_traslados(i)
return pyxb.BIND() if notaxes else pyxb.BIND(**kwargs)
|
[
"j4nusx@yahoo.com"
] |
j4nusx@yahoo.com
|
fd990697940eb618ad2404eb51ee4a9aa17ba5c3
|
f7d9f6fd48f4310094e023d05b3aa03e8ab86faa
|
/exampleOutput3Part.py
|
2d91ca3abc0e740948ec026a5e7044ad1abe97a5
|
[] |
no_license
|
chriscanal/BOMpricer
|
9be72e3bb055af2afcfe300d56d791f4fdcd4578
|
67049f3972124fe593d55258a8e570f29ede7780
|
refs/heads/master
| 2021-05-01T00:52:08.997911
| 2015-06-26T18:50:48
| 2015-06-26T18:50:48
| 32,906,049
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 119,089
|
py
|
{'CurrentDate': '2015-04-22T07:43:16.7375974-05:00',
'ErrorMessage': None,
'Messages': None,
'OriginalRequest': {'APIKey': '8e2b4c56-05aa-4ce9-82dc-be9a660bb1ea',
'CompanyID': 'iRobot',
'CountryCode': '',
'CurrencyCode': '',
'Distributors': [],
'ExactMatch': False,
'InStockOnly': False,
'Queries': [{'Manufacturers': [None],
'SearchToken': 'ERJ-3EKF4420V'},
{'Manufacturers': [None],
'SearchToken': 'CRCW060356K2FKEA'},
{'Manufacturers': [None],
'SearchToken': 'RC0402FR-0747K5L'}]},
'PartResults': [{'Distributors': [{'Count': 3,
'DistributorResults': [],
'Name': 'Score Electronics',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'WPG Americas',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'SMD, Inc.',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'Dependable Component Supply',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [{'Compliance': {},
'Description': u'Res Thick Film 0603 442 Ohm 1% 0.1W(1/10W) \xb1100ppm/\xb0C Molded SMD Punched Carrier T/R',
'DistributorPartNumber': 'ERJ-3EKF4420V',
'Links': [{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=4a44e070-b4e8-46e5-b4d2-45ef64e58a5d'},
{'Type': 'BUY',
'Url': 'http://www.eciaauthorized.com/productredirect?id=267cca48-7379-4b6b-9bcf-22ed3fa0f33a'}],
'Manufacturer': 'Panasonic',
'PartNumber': 'ERJ-3EKF4420V',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 1.0,
'Prices': [{'Amount': 0.0083,
'FormattedAmount': '$0.0083',
'Quantity': 1.0,
'Text': '0.0083'},
{'Amount': 0.0061,
'FormattedAmount': '$0.0061',
'Quantity': 25.0,
'Text': '0.0061'},
{'Amount': 0.0042,
'FormattedAmount': '$0.0042',
'Quantity': 100.0,
'Text': '0.0042'},
{'Amount': 0.004,
'FormattedAmount': '$0.004',
'Quantity': 1000.0,
'Text': '0.004'},
{'Amount': 0.0016,
'FormattedAmount': '$0.0016',
'Quantity': 5000.0,
'Text': '0.0016'},
{'Amount': 0.0015,
'FormattedAmount': '$0.0015',
'Quantity': 10000.0,
'Text': '0.0015'}],
'QuantityMultiple': None},
'Stock': {'Availability': 'In Stock',
'QuantityOnHand': 40000.0}}],
'Name': 'Master Electronics',
'TotalCount': 1},
{'Count': 3,
'DistributorResults': [],
'Name': 'Omni Pro Electronics',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'Peerless Electronic Supplies',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'PEI-Genesis',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'Powell Electronics',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'PUI',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'Rochester Electronics',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'Schuster Electronics',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'Straight Road Electronics',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [{'Compliance': {},
'Description': '56.2K ohm SMD (Surface Mount) Thick Film Resistor 0603 1% 1/10W 100ppm - 500 or more parts, free T/R',
'DistributorPartNumber': '',
'Links': [{'Type': 'Buy',
'Url': 'http://www.eciaauthorized.com/productredirect?id=933ac257-1809-42ed-88ad-5586fe4c2607'}],
'Manufacturer': 'Vishay Dale',
'PartNumber': 'CRCW060356K2FKEA',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 0.0,
'Prices': [{'Amount': 0.04,
'FormattedAmount': '$0.04',
'Quantity': 0.0,
'Text': '0.040000'},
{'Amount': 0.03,
'FormattedAmount': '$0.03',
'Quantity': 0.0,
'Text': '0.030000'},
{'Amount': 0.019,
'FormattedAmount': '$0.019',
'Quantity': 0.0,
'Text': '0.019000'},
{'Amount': 0.016,
'FormattedAmount': '$0.016',
'Quantity': 0.0,
'Text': '0.016000'},
{'Amount': 0.013,
'FormattedAmount': '$0.013',
'Quantity': 0.0,
'Text': '0.013000'},
{'Amount': 0.0075,
'FormattedAmount': '$0.0075',
'Quantity': 0.0,
'Text': '0.007500'},
{'Amount': 0.005,
'FormattedAmount': '$0.005',
'Quantity': 0.0,
'Text': '0.005000'}],
'QuantityMultiple': None},
'Stock': {'Availability': '',
'QuantityOnHand': 18.0}},
{'Compliance': {},
'Description': '47.5K ohm 100ppm SMD (Surface Mount) Thick Film Resistor 0402 1% 1/16W - 500 or more parts, free T/R',
'DistributorPartNumber': '',
'Links': [{'Type': 'Buy',
'Url': 'http://www.eciaauthorized.com/productredirect?id=0eb40936-a437-4ab1-9654-85d458385cb0'}],
'Manufacturer': 'Yageo',
'PartNumber': 'RC0402FR-0747K5L',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 0.0,
'Prices': [{'Amount': 0.04,
'FormattedAmount': '$0.04',
'Quantity': 0.0,
'Text': '0.040000'},
{'Amount': 0.03,
'FormattedAmount': '$0.03',
'Quantity': 0.0,
'Text': '0.030000'},
{'Amount': 0.018,
'FormattedAmount': '$0.018',
'Quantity': 0.0,
'Text': '0.018000'},
{'Amount': 0.013,
'FormattedAmount': '$0.013',
'Quantity': 0.0,
'Text': '0.013000'},
{'Amount': 0.01,
'FormattedAmount': '$0.01',
'Quantity': 0.0,
'Text': '0.010000'},
{'Amount': 0.006,
'FormattedAmount': '$0.006',
'Quantity': 0.0,
'Text': '0.006000'},
{'Amount': 0.0044,
'FormattedAmount': '$0.0044',
'Quantity': 0.0,
'Text': '0.004400'},
{'Amount': 0.0032,
'FormattedAmount': '$0.0032',
'Quantity': 0.0,
'Text': '0.003200'}],
'QuantityMultiple': None},
'Stock': {'Availability': '',
'QuantityOnHand': 1450.0}}],
'Name': 'Garrett Electronics',
'TotalCount': 2},
{'Count': 3,
'DistributorResults': [],
'Name': 'Gopher Electronics',
'TotalCount': 2},
{'Count': 3,
'DistributorResults': [],
'Name': 'Hammond Electronics',
'TotalCount': 2},
{'Count': 3,
'DistributorResults': [],
'Name': 'Hawk Electronics',
'TotalCount': 2},
{'Count': 3,
'DistributorResults': [],
'Name': 'Heilind Electronics',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'Interstate Connecting',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'Kreger Components',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'Marsh Electronics',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'East Coast Microwave Distributors, Inc.',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'DB LECTRO, Inc.',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'CyPower, LLC',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'New Yorker Electronics',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'Components Direct',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'Dove Electronic',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'Electro Enterprises',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'Electro Sonic',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [{'Compliance': {},
'Description': u'Res Thick Film 0603 442 Ohm 1% 0.1W(1/10W) \xb1100ppm/\xb0C Molded SMD Punched Carrier T/R',
'DistributorPartNumber': 'ERJ-3EKF4420V',
'Links': [{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=6dcb2de3-22f8-42a7-ab0c-ce082f435f17'},
{'Type': 'BUY',
'Url': 'http://www.eciaauthorized.com/productredirect?id=e04d5c7f-8827-4507-a639-082e5cd5568d'}],
'Manufacturer': 'Panasonic',
'PartNumber': 'ERJ-3EKF4420V',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 1.0,
'Prices': [{'Amount': 0.0083,
'FormattedAmount': '$0.0083',
'Quantity': 1.0,
'Text': '0.0083'},
{'Amount': 0.0061,
'FormattedAmount': '$0.0061',
'Quantity': 25.0,
'Text': '0.0061'},
{'Amount': 0.0042,
'FormattedAmount': '$0.0042',
'Quantity': 100.0,
'Text': '0.0042'},
{'Amount': 0.004,
'FormattedAmount': '$0.004',
'Quantity': 1000.0,
'Text': '0.004'},
{'Amount': 0.0016,
'FormattedAmount': '$0.0016',
'Quantity': 5000.0,
'Text': '0.0016'},
{'Amount': 0.0015,
'FormattedAmount': '$0.0015',
'Quantity': 10000.0,
'Text': '0.0015'}],
'QuantityMultiple': None},
'Stock': {'Availability': 'In Stock',
'QuantityOnHand': 60000.0}}],
'Name': 'Onlinecomponents.com',
'TotalCount': 1},
{'Count': 3,
'DistributorResults': [{'Compliance': {},
'Description': u'Res Thick Film 0603 442 Ohm 1%35 0.1W(1/10W) \xc2\xb1100ppm/\xc2\xb0C Molded SMD Automotive Punched T/R',
'DistributorPartNumber': 'ERJ-3EKF4420V',
'Links': [{'Type': 'View',
'Url': 'http://www.eciaauthorized.com/productredirect?id=dbbf6eda-0b70-42be-ab99-6d68a105d21b'}],
'Manufacturer': 'Panasonic',
'PartNumber': 'ERJ-3EKF4420V',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 1.0,
'Prices': [{'Amount': 0.0025,
'FormattedAmount': '$0.0025',
'Quantity': 1.0,
'Text': '0.0025'},
{'Amount': 0.0025,
'FormattedAmount': '$0.0025',
'Quantity': 1.0,
'Text': '0.0025'},
{'Amount': 0.0024,
'FormattedAmount': '$0.0024',
'Quantity': 70000.0,
'Text': '0.0024'},
{'Amount': 0.0024,
'FormattedAmount': '$0.0024',
'Quantity': 70000.0,
'Text': '0.0024'},
{'Amount': 0.0022,
'FormattedAmount': '$0.0022',
'Quantity': 180000.0,
'Text': '0.0022'},
{'Amount': 0.0022,
'FormattedAmount': '$0.0022',
'Quantity': 180000.0,
'Text': '0.0022'},
{'Amount': 0.0021,
'FormattedAmount': '$0.0021',
'Quantity': 355000.0,
'Text': '0.0021'},
{'Amount': 0.0021,
'FormattedAmount': '$0.0021',
'Quantity': 355000.0,
'Text': '0.0021'},
{'Amount': 0.002,
'FormattedAmount': '$0.002',
'Quantity': 715000.0,
'Text': '0.002'},
{'Amount': 0.002,
'FormattedAmount': '$0.002',
'Quantity': 715000.0,
'Text': '0.002'},
{'Amount': 0.002,
'FormattedAmount': '$0.002',
'Quantity': 1430000.0,
'Text': '0.002'},
{'Amount': 0.002,
'FormattedAmount': '$0.002',
'Quantity': 1430000.0,
'Text': '0.002'}],
'QuantityMultiple': None},
'Stock': {'Availability': '',
'QuantityOnHand': 0.0}}],
'Name': 'Sager Electronics',
'TotalCount': 1},
{'Count': 3,
'DistributorResults': [],
'Name': 'Richardson RFPD',
'TotalCount': 0},
{'Count': 9,
'DistributorResults': [{'Compliance': {},
'Description': 'RES SMD 442 OHM 1% 1/10W 0603',
'DistributorPartNumber': 'P442HTR-ND',
'Links': [{'Type': 'BUY',
'Url': 'http://www.eciaauthorized.com/productredirect?id=94596b82-cc25-47bf-bcc4-d595efc4b6c7'},
{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=cfcd4ece-4f93-4f87-8daa-75b623fe8536'}],
'Manufacturer': 'Panasonic',
'PartNumber': 'ERJ-3EKF4420V',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 5000.0,
'Prices': [{'Amount': 0.0025,
'FormattedAmount': '$0.0025',
'Quantity': 5000.0,
'Text': '0.0025'},
{'Amount': 0.0022,
'FormattedAmount': '$0.0022',
'Quantity': 10000.0,
'Text': '0.0022'},
{'Amount': 0.0019,
'FormattedAmount': '$0.0019',
'Quantity': 25000.0,
'Text': '0.0019'},
{'Amount': 0.0017,
'FormattedAmount': '$0.0017',
'Quantity': 50000.0,
'Text': '0.0017'},
{'Amount': 0.0017,
'FormattedAmount': '$0.0017',
'Quantity': 125000.0,
'Text': '0.0017'}],
'QuantityMultiple': None},
'Stock': {'Availability': 'In Stock',
'QuantityOnHand': 45000.0}},
{'Compliance': {},
'Description': 'RES SMD 442 OHM 1% 1/10W 0603',
'DistributorPartNumber': 'P442HCT-ND',
'Links': [{'Type': 'BUY',
'Url': 'http://www.eciaauthorized.com/productredirect?id=51b523a4-abbc-49af-b5ae-73c054789dcc'},
{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=1b5b2820-42ad-4b8f-8183-0ab54d08868b'}],
'Manufacturer': 'Panasonic',
'PartNumber': 'ERJ-3EKF4420V',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 1.0,
'Prices': [{'Amount': 0.1,
'FormattedAmount': '$0.10',
'Quantity': 1.0,
'Text': '0.1000'},
{'Amount': 0.015,
'FormattedAmount': '$0.015',
'Quantity': 50.0,
'Text': '0.0150'},
{'Amount': 0.0114,
'FormattedAmount': '$0.0114',
'Quantity': 100.0,
'Text': '0.0114'},
{'Amount': 0.0084,
'FormattedAmount': '$0.0084',
'Quantity': 250.0,
'Text': '0.0084'},
{'Amount': 0.0062,
'FormattedAmount': '$0.0062',
'Quantity': 500.0,
'Text': '0.0062'},
{'Amount': 0.0042,
'FormattedAmount': '$0.0042',
'Quantity': 1000.0,
'Text': '0.0042'},
{'Amount': 0.0035,
'FormattedAmount': '$0.0035',
'Quantity': 2500.0,
'Text': '0.0035'}],
'QuantityMultiple': None},
'Stock': {'Availability': 'In Stock',
'QuantityOnHand': 46049.0}},
{'Compliance': {},
'Description': 'RES SMD 442 OHM 1% 1/10W 0603',
'DistributorPartNumber': 'P442HDKR-ND',
'Links': [{'Type': 'BUY',
'Url': 'http://www.eciaauthorized.com/productredirect?id=196f05e0-a8da-48b7-9763-2ed257cf3509'},
{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=8e3526ce-8e6f-4ed1-a044-a769e0fc9f19'}],
'Manufacturer': 'Panasonic',
'PartNumber': 'ERJ-3EKF4420V',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 1.0,
'Prices': [{'Amount': 0.1,
'FormattedAmount': '$0.10',
'Quantity': 1.0,
'Text': '0.1000'},
{'Amount': 0.015,
'FormattedAmount': '$0.015',
'Quantity': 50.0,
'Text': '0.0150'},
{'Amount': 0.0114,
'FormattedAmount': '$0.0114',
'Quantity': 100.0,
'Text': '0.0114'},
{'Amount': 0.0084,
'FormattedAmount': '$0.0084',
'Quantity': 250.0,
'Text': '0.0084'},
{'Amount': 0.0062,
'FormattedAmount': '$0.0062',
'Quantity': 500.0,
'Text': '0.0062'},
{'Amount': 0.0042,
'FormattedAmount': '$0.0042',
'Quantity': 1000.0,
'Text': '0.0042'},
{'Amount': 0.0035,
'FormattedAmount': '$0.0035',
'Quantity': 2500.0,
'Text': '0.0035'}],
'QuantityMultiple': None},
'Stock': {'Availability': 'In Stock',
'QuantityOnHand': 46049.0}},
{'Compliance': {},
'Description': 'RES SMD 56.2K OHM 1% 1/10W 0603',
'DistributorPartNumber': '541-56.2KHTR-ND',
'Links': [{'Type': 'BUY',
'Url': 'http://www.eciaauthorized.com/productredirect?id=0e7ccf84-fb23-4bca-8c73-4be1861d03b8'},
{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=72423090-e0dc-46e3-bf5d-9f75da12acee'}],
'Manufacturer': 'Vishay Dale',
'PartNumber': 'CRCW060356K2FKEA',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 5000.0,
'Prices': [{'Amount': 0.0057,
'FormattedAmount': '$0.0057',
'Quantity': 5000.0,
'Text': '0.0057'},
{'Amount': 0.0051,
'FormattedAmount': '$0.0051',
'Quantity': 10000.0,
'Text': '0.0051'},
{'Amount': 0.0046,
'FormattedAmount': '$0.0046',
'Quantity': 25000.0,
'Text': '0.0046'},
{'Amount': 0.0038,
'FormattedAmount': '$0.0038',
'Quantity': 50000.0,
'Text': '0.0038'},
{'Amount': 0.0032,
'FormattedAmount': '$0.0032',
'Quantity': 100000.0,
'Text': '0.0032'},
{'Amount': 0.0029,
'FormattedAmount': '$0.0029',
'Quantity': 250000.0,
'Text': '0.0029'},
{'Amount': 0.0026,
'FormattedAmount': '$0.0026',
'Quantity': 500000.0,
'Text': '0.0026'}],
'QuantityMultiple': None},
'Stock': {'Availability': 'In Stock',
'QuantityOnHand': 10000.0}},
{'Compliance': {},
'Description': 'RES SMD 56.2K OHM 1% 1/10W 0603',
'DistributorPartNumber': '541-56.2KHCT-ND',
'Links': [{'Type': 'BUY',
'Url': 'http://www.eciaauthorized.com/productredirect?id=fe7c5f20-99d0-4580-8e3e-cd016f977252'},
{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=87d16767-55be-488a-86df-82d31ac29414'}],
'Manufacturer': 'Vishay Dale',
'PartNumber': 'CRCW060356K2FKEA',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 10.0,
'Prices': [{'Amount': 0.081,
'FormattedAmount': '$0.081',
'Quantity': 10.0,
'Text': '0.0810'},
{'Amount': 0.0436,
'FormattedAmount': '$0.0436',
'Quantity': 50.0,
'Text': '0.0436'},
{'Amount': 0.0251,
'FormattedAmount': '$0.0251',
'Quantity': 200.0,
'Text': '0.0251'},
{'Amount': 0.0171,
'FormattedAmount': '$0.0171',
'Quantity': 1000.0,
'Text': '0.0171'}],
'QuantityMultiple': None},
'Stock': {'Availability': 'In Stock',
'QuantityOnHand': 14571.0}},
{'Compliance': {},
'Description': 'RES SMD 56.2K OHM 1% 1/10W 0603',
'DistributorPartNumber': '541-56.2KHDKR-ND',
'Links': [{'Type': 'BUY',
'Url': 'http://www.eciaauthorized.com/productredirect?id=f540c14e-9e75-4892-98c6-c2b99ae46b73'},
{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=fdd4fb2d-2454-4020-97a5-6ee000686861'}],
'Manufacturer': 'Vishay Dale',
'PartNumber': 'CRCW060356K2FKEA',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 10.0,
'Prices': [{'Amount': 0.081,
'FormattedAmount': '$0.081',
'Quantity': 10.0,
'Text': '0.0810'},
{'Amount': 0.0436,
'FormattedAmount': '$0.0436',
'Quantity': 50.0,
'Text': '0.0436'},
{'Amount': 0.0251,
'FormattedAmount': '$0.0251',
'Quantity': 200.0,
'Text': '0.0251'},
{'Amount': 0.0171,
'FormattedAmount': '$0.0171',
'Quantity': 1000.0,
'Text': '0.0171'}],
'QuantityMultiple': None},
'Stock': {'Availability': 'In Stock',
'QuantityOnHand': 14571.0}},
{'Compliance': {},
'Description': 'RES SMD 47.5K OHM 1% 1/16W 0402',
'DistributorPartNumber': '311-47.5KLRTR-ND',
'Links': [{'Type': 'BUY',
'Url': 'http://www.eciaauthorized.com/productredirect?id=6e312769-e1ed-4fcd-9d78-33904f31cc40'},
{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=626ae150-5663-40aa-beb1-287db2ece93e'}],
'Manufacturer': 'Yageo',
'PartNumber': 'RC0402FR-0747K5L',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 10000.0,
'Prices': [{'Amount': 0.0015,
'FormattedAmount': '$0.0015',
'Quantity': 10000.0,
'Text': '0.0015'},
{'Amount': 0.0013,
'FormattedAmount': '$0.0013',
'Quantity': 30000.0,
'Text': '0.0013'},
{'Amount': 0.0012,
'FormattedAmount': '$0.0012',
'Quantity': 50000.0,
'Text': '0.0012'},
{'Amount': 0.0012,
'FormattedAmount': '$0.0012',
'Quantity': 100000.0,
'Text': '0.0012'}],
'QuantityMultiple': None},
'Stock': {'Availability': 'In Stock',
'QuantityOnHand': 270000.0}},
{'Compliance': {},
'Description': 'RES SMD 47.5K OHM 1% 1/16W 0402',
'DistributorPartNumber': '311-47.5KLRCT-ND',
'Links': [{'Type': 'BUY',
'Url': 'http://www.eciaauthorized.com/productredirect?id=9bca0f57-5396-4733-934b-cfc80947a414'},
{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=caf88b24-37e1-410f-97f7-b521ff19746d'}],
'Manufacturer': 'Yageo',
'PartNumber': 'RC0402FR-0747K5L',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 1.0,
'Prices': [{'Amount': 0.1,
'FormattedAmount': '$0.10',
'Quantity': 1.0,
'Text': '0.1000'},
{'Amount': 0.015,
'FormattedAmount': '$0.015',
'Quantity': 10.0,
'Text': '0.0150'},
{'Amount': 0.0108,
'FormattedAmount': '$0.0108',
'Quantity': 25.0,
'Text': '0.0108'},
{'Amount': 0.006,
'FormattedAmount': '$0.006',
'Quantity': 100.0,
'Text': '0.0060'},
{'Amount': 0.0046,
'FormattedAmount': '$0.0046',
'Quantity': 250.0,
'Text': '0.0046'},
{'Amount': 0.0036,
'FormattedAmount': '$0.0036',
'Quantity': 500.0,
'Text': '0.0036'},
{'Amount': 0.0027,
'FormattedAmount': '$0.0027',
'Quantity': 1000.0,
'Text': '0.0027'},
{'Amount': 0.0023,
'FormattedAmount': '$0.0023',
'Quantity': 2500.0,
'Text': '0.0023'},
{'Amount': 0.0019,
'FormattedAmount': '$0.0019',
'Quantity': 5000.0,
'Text': '0.0019'}],
'QuantityMultiple': None},
'Stock': {'Availability': 'In Stock',
'QuantityOnHand': 277648.0}},
{'Compliance': {},
'Description': 'RES SMD 47.5K OHM 1% 1/16W 0402',
'DistributorPartNumber': '311-47.5KLRDKR-ND',
'Links': [{'Type': 'BUY',
'Url': 'http://www.eciaauthorized.com/productredirect?id=ec77d574-1349-40e2-9a2f-c40ee5c0a971'},
{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=8c328dca-7da6-4dc4-bd8b-77c5ad8de6e1'}],
'Manufacturer': 'Yageo',
'PartNumber': 'RC0402FR-0747K5L',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 1.0,
'Prices': [{'Amount': 0.1,
'FormattedAmount': '$0.10',
'Quantity': 1.0,
'Text': '0.1000'},
{'Amount': 0.015,
'FormattedAmount': '$0.015',
'Quantity': 10.0,
'Text': '0.0150'},
{'Amount': 0.0108,
'FormattedAmount': '$0.0108',
'Quantity': 25.0,
'Text': '0.0108'},
{'Amount': 0.006,
'FormattedAmount': '$0.006',
'Quantity': 100.0,
'Text': '0.0060'},
{'Amount': 0.0046,
'FormattedAmount': '$0.0046',
'Quantity': 250.0,
'Text': '0.0046'},
{'Amount': 0.0036,
'FormattedAmount': '$0.0036',
'Quantity': 500.0,
'Text': '0.0036'},
{'Amount': 0.0027,
'FormattedAmount': '$0.0027',
'Quantity': 1000.0,
'Text': '0.0027'},
{'Amount': 0.0023,
'FormattedAmount': '$0.0023',
'Quantity': 2500.0,
'Text': '0.0023'},
{'Amount': 0.0019,
'FormattedAmount': '$0.0019',
'Quantity': 5000.0,
'Text': '0.0019'}],
'QuantityMultiple': None},
'Stock': {'Availability': 'In Stock',
'QuantityOnHand': 277648.0}}],
'Name': 'Digi-Key',
'TotalCount': 9},
{'Count': 3,
'DistributorResults': [{'Compliance': {},
'Description': u'Res Thick Film 0603 442 Ohm 1% 0.1W(1/10W) \xb1100ppm/C Molded SMD Automotive Punched T/R',
'DistributorPartNumber': 'ERJ-3EKF4420V',
'Links': [{'Type': 'View',
'Url': 'http://www.eciaauthorized.com/productredirect?id=89583b59-aad8-4aed-8850-0a85b7cf9d68'}],
'Manufacturer': 'Panasonic',
'PartNumber': 'ERJ-3EKF4420V',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 0.0,
'Prices': [{'Amount': None,
'FormattedAmount': None,
'Quantity': None,
'Text': 'See Website'}],
'QuantityMultiple': None},
'Stock': {'Availability': 'Cut Strips',
'QuantityOnHand': 4750.0}},
{'Compliance': {},
'Description': u'Res Thick Film 0603 56.2K Ohm 1% 0.1W(1/10W) \xb1100ppm/C Molded SMD Automotive Paper T/R',
'DistributorPartNumber': 'CRCW060356K2FKEA',
'Links': [{'Type': 'Buy',
'Url': 'http://www.eciaauthorized.com/productredirect?id=59308afd-f559-43be-8aaa-0800d2eef91e'}],
'Manufacturer': 'Vishay',
'PartNumber': 'CRCW060356K2FKEA',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 5000.0,
'Prices': [{'Amount': 0.0022,
'FormattedAmount': '$0.0022',
'Quantity': 5000.0,
'Text': '0.0022'}],
'QuantityMultiple': None},
'Stock': {'Availability': 'In Stock',
'QuantityOnHand': 70000.0}}],
'Name': 'Arrow Electronics',
'TotalCount': 3},
{'Count': 5,
'DistributorResults': [{'Compliance': {},
'Description': 'Resistor; Thick Film; Res 56.2 Kilohms; Pwr-Rtg 0.1 W; Tol 1%; SMT; 0603; Tape & Reel',
'DistributorPartNumber': '70203478',
'Links': [{'Type': 'View',
'Url': 'http://www.eciaauthorized.com/productredirect?id=460bac5e-ab9a-4113-95a6-503db9300743'},
{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=3e1c3b18-42c5-4f07-914f-9069a49e5b01'}],
'Manufacturer': 'Vishay Dale',
'PartNumber': 'CRCW060356K2FKEA',
'Pricing': {'CurrencyCode': None,
'MinimumQuantity': 5000.0,
'Prices': [{'Amount': 0.008,
'FormattedAmount': '$0.008',
'Quantity': 5000.0,
'Text': '$0.0080'},
{'Amount': 0.007,
'FormattedAmount': '$0.007',
'Quantity': 10000.0,
'Text': '$0.0070'},
{'Amount': 0.006,
'FormattedAmount': '$0.006',
'Quantity': 30000.0,
'Text': '$0.0060'}],
'QuantityMultiple': None},
'Stock': {'Availability': '0 Available Immediately',
'QuantityOnHand': 0.0}},
{'Compliance': {},
'Description': 'Resistor; Thick Film; Res 442 Ohms; Pwr-Rtg 0.1 W; Tol 1%; SMT; 0603; Tape & Reel',
'DistributorPartNumber': '70268539',
'Links': [{'Type': 'View',
'Url': 'http://www.eciaauthorized.com/productredirect?id=c6da4043-e655-484a-b4c4-0d6634b9a59c'},
{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=eb7b85b3-9d45-4979-b2ca-4e6a18a85f40'}],
'Manufacturer': 'Panasonic',
'PartNumber': 'ERJ-3EKF4420V',
'Pricing': {'CurrencyCode': None,
'MinimumQuantity': 5000.0,
'Prices': [{'Amount': 0.0022,
'FormattedAmount': '$0.0022',
'Quantity': 5000.0,
'Text': '$0.0022'},
{'Amount': 0.0015,
'FormattedAmount': '$0.0015',
'Quantity': 10000.0,
'Text': '$0.0015'}],
'QuantityMultiple': None},
'Stock': {'Availability': '0 Available Immediately',
'QuantityOnHand': 0.0}},
{'Compliance': {},
'Description': 'Resistor; Thick Film; Res 56.2 Kilohms; Pwr-Rtg 0.1 W; Tol 1%; SMT; 0603; Cut Tape',
'DistributorPartNumber': '70203479',
'Links': [{'Type': 'View',
'Url': 'http://www.eciaauthorized.com/productredirect?id=c051f6f7-a0e3-4e6b-9794-ac3e7fab5a94'},
{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=7328c577-5312-440d-8c77-d8168adc1329'}],
'Manufacturer': 'Vishay Dale',
'PartNumber': 'CRCW060356K2FKEA',
'Pricing': {'CurrencyCode': None,
'MinimumQuantity': 10.0,
'Prices': [{'Amount': 0.06,
'FormattedAmount': '$0.06',
'Quantity': 10.0,
'Text': '$0.0600'},
{'Amount': 0.03,
'FormattedAmount': '$0.03',
'Quantity': 100.0,
'Text': '$0.0300'},
{'Amount': 0.025,
'FormattedAmount': '$0.025',
'Quantity': 500.0,
'Text': '$0.0250'},
{'Amount': 0.017,
'FormattedAmount': '$0.017',
'Quantity': 1000.0,
'Text': '$0.0170'}],
'QuantityMultiple': None},
'Stock': {'Availability': '0 Available Immediately',
'QuantityOnHand': 0.0}},
{'Compliance': {},
'Description': 'Resistor; Thick Film; Res 442 Ohms; Pwr-Rtg 0.1 W; Tol 1%; SMT; 0603',
'DistributorPartNumber': '70269475',
'Links': [{'Type': 'View',
'Url': 'http://www.eciaauthorized.com/productredirect?id=718d92bb-1096-4fdc-a79d-487565793679'},
{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=6a100191-211c-4850-9a2b-e115d156edd5'}],
'Manufacturer': 'Panasonic',
'PartNumber': 'ERJ-3EKF4420V',
'Pricing': {'CurrencyCode': None,
'MinimumQuantity': 1.0,
'Prices': [{'Amount': 0.0761,
'FormattedAmount': '$0.0761',
'Quantity': 1.0,
'Text': '$0.0761'},
{'Amount': 0.0148,
'FormattedAmount': '$0.0148',
'Quantity': 25.0,
'Text': '$0.0148'},
{'Amount': 0.0114,
'FormattedAmount': '$0.0114',
'Quantity': 100.0,
'Text': '$0.0114'},
{'Amount': 0.0084,
'FormattedAmount': '$0.0084',
'Quantity': 250.0,
'Text': '$0.0084'},
{'Amount': 0.0062,
'FormattedAmount': '$0.0062',
'Quantity': 500.0,
'Text': '$0.0062'},
{'Amount': 0.0039,
'FormattedAmount': '$0.0039',
'Quantity': 1000.0,
'Text': '$0.0039'},
{'Amount': 0.0024,
'FormattedAmount': '$0.0024',
'Quantity': 5000.0,
'Text': '$0.0024'}],
'QuantityMultiple': None},
'Stock': {'Availability': '0 Available Immediately',
'QuantityOnHand': 0.0}}],
'Name': 'Allied Electronics',
'TotalCount': 4},
{'Count': 3,
'DistributorResults': [],
'Name': 'Fuses Unlimited',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'Electroshield, Inc.',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'Chris Electronics',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'ECCO',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'Avnet USI',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [{'Compliance': {},
'Description': 'Thick Film Resistors - SMD 1/10watt 56.2Kohms 1%',
'DistributorPartNumber': 'CRCW060356K2FKEA',
'Links': [{'Type': 'Buy',
'Url': 'http://www.eciaauthorized.com/productredirect?id=af6d0886-1405-403c-9ad6-9ae0a8e5a211'},
{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=20b34be6-4ac3-43e5-8bf4-f1a59c92bf2b'}],
'Manufacturer': 'Vishay',
'PartNumber': 'CRCW060356K2FKEA',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 5000.0,
'Prices': [{'Amount': 0.0022,
'FormattedAmount': '$0.0022',
'Quantity': 5000.0,
'Text': '0.0022'},
{'Amount': 0.0021,
'FormattedAmount': '$0.0021',
'Quantity': 15000.0,
'Text': '0.0021'},
{'Amount': 0.0019,
'FormattedAmount': '$0.0019',
'Quantity': 30000.0,
'Text': '0.0019'}],
'QuantityMultiple': None},
'Stock': {'Availability': 'In Stock',
'QuantityOnHand': 115000.0}},
{'Compliance': {},
'Description': 'Thick Film Resistors - SMD 0603 442ohms 1% Tol',
'DistributorPartNumber': 'ERJ-3EKF4420V',
'Links': [{'Type': 'View',
'Url': 'http://www.eciaauthorized.com/productredirect?id=9cb59cc6-d62e-442e-8336-ec78077fd1d5'},
{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=dfacb31b-1e27-4ec4-8561-eae098deab65'}],
'Manufacturer': 'Panasonic',
'PartNumber': 'ERJ-3EKF4420V',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 0.0,
'QuantityMultiple': None},
'Stock': {'Availability': '',
'QuantityOnHand': 0.0}}],
'Name': 'TTI',
'TotalCount': 2},
{'Count': 3,
'DistributorResults': [],
'Name': 'Brothers Electronics',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'Carlton-Bates Company',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'CDM Electronics',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [],
'Name': 'Components Center',
'TotalCount': 0},
{'Count': 3,
'DistributorResults': [{'Compliance': {},
'Description': 'SELECT: TAPE & REEL, CUT TAPE, MouseREEL | Thick Film Resistors - SMD 1/10watt 56.2Kohms 1%',
'DistributorPartNumber': '71-CRCW0603-56.2K-E3',
'Links': [{'Type': 'Buy',
'Url': 'http://www.eciaauthorized.com/productredirect?id=ddff6233-d7a3-4b30-884d-c19f5e5fd0cd'},
{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=859f9c3e-1ff4-4d25-a636-d34d3469506c'}],
'Manufacturer': 'Vishay',
'PartNumber': 'CRCW060356K2FKEA',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 1.0,
'Prices': [{'Amount': 0.08,
'FormattedAmount': '$0.08',
'Quantity': 1.0,
'Text': '$0.08'},
{'Amount': 0.013,
'FormattedAmount': '$0.013',
'Quantity': 10.0,
'Text': '$0.013'},
{'Amount': 0.01,
'FormattedAmount': '$0.01',
'Quantity': 100.0,
'Text': '$0.01'},
{'Amount': 0.008,
'FormattedAmount': '$0.008',
'Quantity': 1000.0,
'Text': '$0.008'},
{'Amount': 0.005,
'FormattedAmount': '$0.005',
'Quantity': 5000.0,
'Text': '$0.005'},
{'Amount': 0.004,
'FormattedAmount': '$0.004',
'Quantity': 25000.0,
'Text': '$0.004'},
{'Amount': 0.003,
'FormattedAmount': '$0.003',
'Quantity': 50000.0,
'Text': '$0.003'}],
'QuantityMultiple': None},
'Stock': {'Availability': 'In Stock',
'QuantityOnHand': 43890.0}},
{'Compliance': {},
'Description': 'SELECT: TAPE & REEL, CUT TAPE, MouseREEL | Thick Film Resistors - SMD 0603 442ohms 1% Tol',
'DistributorPartNumber': '667-ERJ-3EKF4420V',
'Links': [{'Type': 'Buy',
'Url': 'http://www.eciaauthorized.com/productredirect?id=94d9d970-0ece-4a8d-9a49-dea95a8b647c'},
{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=0db1e8b3-04ec-4c82-a919-01b50ece3c55'}],
'Manufacturer': 'Panasonic',
'PartNumber': 'ERJ-3EKF4420V',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 1.0,
'Prices': [{'Amount': 0.1,
'FormattedAmount': '$0.10',
'Quantity': 1.0,
'Text': '$0.10'},
{'Amount': 0.015,
'FormattedAmount': '$0.015',
'Quantity': 10.0,
'Text': '$0.015'},
{'Amount': 0.008,
'FormattedAmount': '$0.008',
'Quantity': 100.0,
'Text': '$0.008'},
{'Amount': 0.006,
'FormattedAmount': '$0.006',
'Quantity': 500.0,
'Text': '$0.006'},
{'Amount': 0.004,
'FormattedAmount': '$0.004',
'Quantity': 1000.0,
'Text': '$0.004'},
{'Amount': 0.002,
'FormattedAmount': '$0.002',
'Quantity': 5000.0,
'Text': '$0.002'}],
'QuantityMultiple': None},
'Stock': {'Availability': 'In Stock',
'QuantityOnHand': 71929.0}},
{'Compliance': {},
'Description': 'TAPE & REEL | Thick Film Resistors - SMD 47.5K OHM 1%',
'DistributorPartNumber': '603-RC0402FR-0747K5L',
'Links': [{'Type': 'View',
'Url': 'http://www.eciaauthorized.com/productredirect?id=e1731faa-962a-448e-a42d-3443807f2421'},
{'Type': 'Datasheet',
'Url': 'http://www.eciaauthorized.com/productredirect?id=bce59d9f-4456-4e46-89d7-6e834a1c7171'}],
'Manufacturer': 'Yageo',
'PartNumber': 'RC0402FR-0747K5L',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 10000.0,
'Prices': [{'Amount': 0.001,
'FormattedAmount': '$0.001',
'Quantity': 10000.0,
'Text': '$0.001'}],
'QuantityMultiple': None},
'Stock': {'Availability': 'On Order',
'QuantityOnHand': 120000.0}}],
'Name': 'Mouser Electronics',
'TotalCount': 3},
{'Count': 3,
'DistributorResults': [{'Compliance': {},
'Description': u'Res Thick Film 0603 442 Ohm 1% 0.1W(1/10W) \xb1100ppm/C Molded SMD Automotive Punched T/R',
'DistributorPartNumber': 'ERJ-3EKF4420V',
'Links': [{'Type': 'Buy',
'Url': 'http://www.eciaauthorized.com/productredirect?id=50356e95-bd8c-4b41-8cda-5be1abf35e38'}],
'Manufacturer': 'Panasonic',
'PartNumber': 'ERJ-3EKF4420V',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 1.0,
'Prices': [{'Amount': 0.0039,
'FormattedAmount': '$0.0039',
'Quantity': 1.0,
'Text': '0.0039'}],
'QuantityMultiple': None},
'Stock': {'Availability': 'In Stock',
'QuantityOnHand': 4750.0}},
{'Compliance': {},
'Description': u'Res Thick Film 0603 56.2K Ohm 1% 0.1W(1/10W) \xb1100ppm/C Molded SMD Automotive Paper T/R',
'DistributorPartNumber': 'CRCW060356K2FKEA',
'Links': [{'Type': 'Buy',
'Url': 'http://www.eciaauthorized.com/productredirect?id=e94fa733-8cf6-44b1-a9c7-c7849f15a848'}],
'Manufacturer': 'Vishay',
'PartNumber': 'CRCW060356K2FKEA',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 5000.0,
'Prices': [{'Amount': 0.0017,
'FormattedAmount': '$0.0017',
'Quantity': 5000.0,
'Text': '0.0017'}],
'QuantityMultiple': None},
'Stock': {'Availability': 'In Stock',
'QuantityOnHand': 70000.0}}],
'Name': 'Verical',
'TotalCount': 3},
{'Count': 3,
'DistributorResults': [{'Compliance': {},
'Description': u'CRCW Series 0603 0.1 W 56.2 kOhm \ufffd1 % \ufffd100 ppm/K SMT Thick Film Chip Resistor',
'DistributorPartNumber': '',
'Links': [{'Type': 'View',
'Url': 'http://www.eciaauthorized.com/productredirect?id=2a652c6c-fd93-4034-b8b0-fea55a2e3407'}],
'Manufacturer': 'Vishay',
'PartNumber': 'CRCW060356K2FKEA',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 0.0,
'Prices': [{'Amount': 0.0556,
'FormattedAmount': '$0.0556',
'Quantity': 0.0,
'Text': '0.055600'}],
'QuantityMultiple': None},
'Stock': {'Availability': '',
'QuantityOnHand': 0.0}},
{'Compliance': {},
'Description': u'RC Series 0402 62.5 mW 47.5 kOhms 1% \ufffd100 ppm/\ufffdC SMT Thick Film Chip Resistor',
'DistributorPartNumber': '',
'Links': [{'Type': 'View',
'Url': 'http://www.eciaauthorized.com/productredirect?id=c8249a9b-5ca1-47c9-84ab-76b39c8e8ebf'}],
'Manufacturer': 'Yageo',
'PartNumber': 'RC0402FR-0747K5L',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 0.0,
'Prices': [{'Amount': 0.0444,
'FormattedAmount': '$0.0444',
'Quantity': 0.0,
'Text': '0.044400'}],
'QuantityMultiple': None},
'Stock': {'Availability': '',
'QuantityOnHand': 0.0}}],
'Name': 'Future Electronics',
'TotalCount': 5},
{'Count': 3,
'DistributorResults': [{'Compliance': {},
'Description': 'THICK FILM RESISTOR 56.2KOHM 100mW 1% FULL REEL; Product Range:AEC-Q200 CRCW Series; Resistance:56.2kohm; Power Rating:100mW; Resistance Tolerance: 1%; Voltage Rating:75V; Resistor Case Style:0603 [1608 Metric]; No. of Pins:2 ;RoHS Compliant: Yes',
'DistributorPartNumber': '',
'Links': [{'Type': 'Buy',
'Url': 'http://www.eciaauthorized.com/productredirect?id=02a6a241-70a3-44e0-b538-9237d8d20e76'}],
'Manufacturer': 'Vishay Dale',
'PartNumber': 'CRCW060356K2FKEA',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 0.0,
'Prices': [{'Amount': 0.005,
'FormattedAmount': '$0.005',
'Quantity': 0.0,
'Text': '0.005000'},
{'Amount': 0.005,
'FormattedAmount': '$0.005',
'Quantity': 0.0,
'Text': '0.005000'},
{'Amount': 0.004,
'FormattedAmount': '$0.004',
'Quantity': 0.0,
'Text': '0.004000'},
{'Amount': 0.003,
'FormattedAmount': '$0.003',
'Quantity': 0.0,
'Text': '0.003000'}],
'QuantityMultiple': None},
'Stock': {'Availability': '',
'QuantityOnHand': 10000.0}},
{'Compliance': {},
'Description': 'THICK FILM RESISTOR 442OHM 100mW 1% FULL REEL; Product Range:AEC-Q200 ERJ Series; Resistance:442ohm; Power Rating:100mW; Resistance Tolerance: 1%; Voltage Rating:75V; Resistor Case Style:0603 [1608 Metric]; Packaging:Tape & Reel ;RoHS Compliant: Yes',
'DistributorPartNumber': '',
'Links': [{'Type': 'Buy',
'Url': 'http://www.eciaauthorized.com/productredirect?id=0a0b2972-b948-404a-ad6d-8abe34ddb287'}],
'Manufacturer': 'PANASONIC ELECTRONIC COMPONENTS',
'PartNumber': 'ERJ-3EKF4420V',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 0.0,
'Prices': [{'Amount': 0.002,
'FormattedAmount': '$0.002',
'Quantity': 0.0,
'Text': '0.002000'},
{'Amount': 0.002,
'FormattedAmount': '$0.002',
'Quantity': 0.0,
'Text': '0.002000'}],
'QuantityMultiple': None},
'Stock': {'Availability': '',
'QuantityOnHand': 80000.0}},
{'Compliance': {},
'Description': 'THICK FILM RESISTOR 47.5KOHM 62.5mW 1%; Product Range:RC Series; Resistance:47.5kohm; Power Rating:62.5mW; Resistance Tolerance: 1%; Voltage Rating:50V; Resistor Case Style:0402 [1005 Metric]; Packaging:Cut Tape; MSL:- ;RoHS Compliant: Yes',
'DistributorPartNumber': '',
'Links': [{'Type': 'Buy',
'Url': 'http://www.eciaauthorized.com/productredirect?id=effa4779-26a8-46f4-b209-90a598962cdc'}],
'Manufacturer': 'Yageo',
'PartNumber': 'RC0402FR-0747K5L',
'Pricing': {'CurrencyCode': 'USD',
'MinimumQuantity': 0.0,
'Prices': [{'Amount': 0.001,
'FormattedAmount': '$0.001',
'Quantity': 0.0,
'Text': '0.001000'},
{'Amount': 0.001,
'FormattedAmount': '$0.001',
'Quantity': 0.0,
'Text': '0.001000'},
{'Amount': 0.001,
'FormattedAmount': '$0.001',
'Quantity': 0.0,
'Text': '0.001000'},
{'Amount': 0.001,
'FormattedAmount': '$0.001',
'Quantity': 0.0,
'Text': '0.001000'},
{'Amount': 0.001,
'FormattedAmount': '$0.001',
'Quantity': 0.0,
'Text': '0.001000'},
{'Amount': 0.001,
'FormattedAmount': '$0.001',
'Quantity': 0.0,
'Text': '0.001000'}],
'QuantityMultiple': None},
'Stock': {'Availability': '',
'QuantityOnHand': 766.0}}],
'Name': 'Newark element14',
'TotalCount': 5},
{'Count': 3,
'DistributorResults': [],
'Name': 'AESCO Electronics',
'TotalCount': 5},
{'Count': 3,
'DistributorResults': [],
'Name': 'Area51-ESG',
'TotalCount': 5}],
'Messages': [],
'PartNumbers': 'ERJ-3EKF4420V, CRCW060356K2FKEA, RC0402FR-0747K5L'}],
'ResponseTime': '00:00:05.67'}
|
[
"christophercanal4@gmail.com"
] |
christophercanal4@gmail.com
|
ead059169811ef21f083a05a3ba167569743c66b
|
c4852ee94908d3254c7fda50dbe0c3f08a942f0e
|
/PythonPC/day01/06-贴吧案例.py
|
57fe1d95cd1067c4da409e7da9ac73b1a6d3e589
|
[] |
no_license
|
Nigihayamiim/PythonPC
|
05d2ca937405a036d8948a92043bb3694c1b56e2
|
4fc4a52a5315157fbafccdf2c854d70301ce2e9f
|
refs/heads/master
| 2022-06-10T14:16:03.227478
| 2020-05-07T11:18:13
| 2020-05-07T11:18:13
| 259,821,635
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 966
|
py
|
from urllib.request import Request, urlopen
from urllib.parse import urlencode
from fake_useragent import UserAgent
def get_html(url):
headers = {
"User-Agent": UserAgent().chrome
}
request = Request(url, headers=headers)
response = urlopen(request)
return response.read()
def save_html(filename, html_bytes):
with open(filename, "wb") as f:
f.write(html_bytes)
def main():
base_url = "https://tieba.baidu.com/f?ie=utf-8&{}"
content = input("请输入要下载的内容:")
num = input("请输入要下载的页数:")
for pn in range(int(num)):
args = {
"kw": content,
"pn": pn*50
}
arg = urlencode(args)
url = base_url.format(arg)
filename = content + "吧的第" + str(pn+1) + "页.html"
print(filename + "正在下载")
html_bytes = get_html(url)
save_html(filename, html_bytes)
if __name__ == '__main__':
main()
|
[
"xzyim@outlook.com"
] |
xzyim@outlook.com
|
a1591a7c8cdcde44c51460db376da7e890923db7
|
e76fafcc4f8cd93aeea97cc90434a04216ed3d6b
|
/utils.py
|
a34990de52e424c0888607f936775bc5dcf9d944
|
[] |
no_license
|
bipinkc19/sentiment-analysis
|
5e4716b9e253c38330762c19dbe5c17c389813ba
|
7bcf89c01c63bb6a05a1fc97eea282843ac7f818
|
refs/heads/master
| 2020-07-13T03:02:57.745335
| 2019-09-05T05:35:31
| 2019-09-05T05:35:31
| 204,974,192
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 904
|
py
|
from sklearn.metrics import confusion_matrix
import seaborn as sns
import numpy as np
import matplotlib.pyplot as plt
def classification_metrics(actual, pred, msg):
cm = confusion_matrix(actual, pred)
plt.figure(figsize=(4, 4))
ax= plt.subplot()
sns.heatmap(cm, annot = True, fmt = 'g')
# labels, title and ticks
ax.set_xlabel('Predicted labels')
ax.set_ylabel('True labels')
ax.set_title('Confusion Matrix')
ax.xaxis.set_ticklabels(['Negative', 'Positive'])
ax.yaxis.set_ticklabels(['Negative', 'Positive'])
plt.show()
sensitivity = cm[1][1]/(cm[1][0] + cm[1][1])
specifity = cm[0][0]/(cm[0][0] + cm[0][1])
accuracy = (cm[0][0] + cm[1][1]) / np.sum(cm)
print(msg, '\n')
print('accuracy: ', round(accuracy,2),
'\nsensitivity: ', round(sensitivity,2),
'\nspecifity: ', round(specifity,2))
print(cm)
|
[
"bipinkc19@gmail.com"
] |
bipinkc19@gmail.com
|
f9df30809d363ee66f80551c56053418489bdc2e
|
57c0e19529c3283be273b2a0339470ddcf470138
|
/githubstalker.py
|
92eee06a76d923efe2d9e462893dd927fe228acb
|
[
"Apache-2.0"
] |
permissive
|
harishsg99/My-Week-end-products
|
a6b76b1573382d6229c1d9de6d24fe982b3eaa56
|
bfdca740540270f90507c23adad0a6cd184950b8
|
refs/heads/master
| 2022-12-21T19:55:17.671587
| 2020-08-20T02:47:54
| 2020-08-20T02:47:54
| 286,051,692
| 2
| 0
|
MIT
| 2020-08-13T09:05:47
| 2020-08-08T13:48:17
|
Python
|
UTF-8
|
Python
| false
| false
| 4,784
|
py
|
try:
import argparse
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.webdriver.chrome.options import Options as ChromeOptions
from selenium.webdriver.firefox.options import Options as FirefoxOptions
from fake_headers import Headers
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from settings import DRIVER_SETTINGS
except ModuleNotFoundError:
print("Please download dependencies from requirement.txt")
except Exception as ex:
print(ex)
class Github:
@staticmethod
def init_driver(driver_path,browser_name):
def set_properties(browser_option):
ua = Headers().generate() #fake user agent
browser_option.add_argument('--headless')
browser_option.add_argument('--disable-extensions')
browser_option.add_argument('--incognito')
browser_option.add_argument('--disable-gpu')
browser_option.add_argument('--log-level=3')
browser_option.add_argument(f'user-agent={ua}')
browser_option.add_argument('--disable-notifications')
browser_option.add_argument('--disable-popup-blocking')
return browser_option
try:
browser_name = browser_name.strip().title()
#automating and opening URL in headless browser
if browser_name == "Chrome":
browser_option = ChromeOptions()
browser_option = set_properties(browser_option)
driver = webdriver.Chrome(driver_path,options=browser_option) #chromedriver's path in first argument
elif browser_name == "Firefox":
browser_option = FirefoxOptions()
browser_option = set_properties(browser_option)
driver = webdriver.Firefox(executable_path=driver_path,options=browser_option)
else:
driver = "Browser Not Supported!"
return driver
except Exception as ex:
print(ex)
@staticmethod
def scrap(username):
try:
URL = 'https://github.com/{}'.format(username)
if DRIVER_SETTINGS['PATH'] != "" and DRIVER_SETTINGS['BROWSER_NAME'] != "":
driver_path = DRIVER_SETTINGS['PATH']
browser = DRIVER_SETTINGS['BROWSER_NAME']
driver = Github.init_driver(driver_path,browser)
else:
print("Driver is not set!. Please edit settings file for driver configurations.")
exit()
driver.get(URL)
#wait until page loads
wait = WebDriverWait(driver, 10)
element = wait.until(EC.title_contains(f"{username}"))
#get source code of the website
response = driver.page_source.encode('utf-8').strip()
soup = BeautifulSoup(response,'html.parser')
#finding all elements in source code
full_name = soup.find("span",{
'class' : 'p-name vcard-fullname d-block overflow-hidden'
})
bio = soup.find('div',{
'class' : 'p-note user-profile-bio mb-3 js-user-profile-bio f4'
})
location = soup.find('span',{
'class' : 'p-label'
})
status = soup.find('div',{
'class' : 'ws-normal user-status-message-wrapper f6 min-width-0'
})
email = soup.find('a',{
'itemprop' : 'u-email link-gray-dark '
})
contributions = soup.find_all("h2",{
"class" : 'f4 text-normal mb-2'
})[0]
driver.close()
driver.quit()
return {
'full_name' : full_name.text if full_name is not None else "Not Found" ,
'bio' : bio.text if bio is not None else "Bio Not Found!",
'location' : location.text if location is not None else "Location Not found!",
'status': status.text if status is not None else "No status given",
"contributions" : contributions.text.strip().replace("\n","")
}
except Exception as ex:
driver.close()
driver.quit()
print(ex)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("username",help="username to search")
args = parser.parse_args()
print(Github.scrap(args.username))
#last updated on 31st July, 2020
|
[
"noreply@github.com"
] |
harishsg99.noreply@github.com
|
89fc8ea8504e008db23d7c2ff978902543775435
|
f30e9ca8c5777196bf8e56d82ecddb80418d21ba
|
/test_fena/v1_12/test_all.py
|
b0aa69333bef16ba1c6f4795f7a98ce6220ddc01
|
[
"MIT"
] |
permissive
|
Aquafina-water-bottle/Fena
|
029a1d6be64bdf1bd75d647d83e524821dfc7a3c
|
00a2b1dbc6f8abd968c46c637d6ad30d5fcde919
|
HEAD
| 2018-10-22T13:36:24.838025
| 2018-07-19T18:16:17
| 2018-07-19T18:16:17
| 96,073,393
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,031
|
py
|
from test_fena.v1_12.test_selectors import test_selectors
from test_fena.v1_12.test_jsons import test_jsons
from test_fena.v1_12.test_nbts import test_nbts
from test_fena.v1_12.test_scoreboards import test_scoreboards
from test_fena.v1_12.test_blocks import test_blocks
from test_fena.v1_12.test_coords import test_coords
from test_fena.v1_12.test_executes import test_executes
from test_fena.v1_12.test_simple_cmds import test_simple_cmds
from test_fena.v1_12.test_datas import test_datas
from test_fena.v1_12.test_effects import test_effects
from test_fena.v1_12.test_items import test_items
from test_fena.v1_12.test_objectives import test_objectives
from test_fena.v1_12.test_teams import test_teams
from test_fena.v1_12.test_xps import test_xps
def test_all():
test_selectors()
test_jsons()
test_nbts()
test_scoreboards()
test_blocks()
test_coords()
test_executes()
test_simple_cmds()
test_effects()
test_datas()
test_items()
test_objectives()
test_teams()
test_xps()
|
[
"glowing.locker@gmail.com"
] |
glowing.locker@gmail.com
|
7a5b217b4fbaf818c24427ce0eb495dbb0c9c3ad
|
b25dbbf04bb70fd52afc68ab654e635dfae37ed9
|
/0x0B-python-input_output/5-save_to_json_file.py~
|
6267c374ed0085fb617bb5bbae4406ca4c0e85de
|
[] |
no_license
|
HLozano12/holbertonschool-higher_level_programming
|
eb6f1cd46b788b7d4e010664f91c4a44fb3276e8
|
26d3bfd6e2f95f39b39a2748ec0e542b65c4f7c5
|
refs/heads/main
| 2023-06-12T06:00:11.993257
| 2021-07-08T00:05:19
| 2021-07-08T00:05:19
| 361,659,105
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 254
|
#!/usr/bin/python3
"""module to write an obj to txtfile using json rep"""
import json
def save_to_json_file(my_obj, filename):
"""Func(arg, filename)"""
with open(filename, mode='w', encodin'utf-8') as h:
return json.dump(my_obj, h)
|
[
"HLR8503@gmail.com"
] |
HLR8503@gmail.com
|
|
4833633212dabfbd9d8976952b217ebfbe2e1cf3
|
f0778e8c5df5d4da020bee4e93c6a11e9a32471c
|
/pages/index.py
|
3122de6ec5b20a4a2e66b86f87bd7f54f7ddb453
|
[] |
no_license
|
nanzhijiao/nanyang
|
535a2cb6cf1af609cd95ce5e7cd9b8b564087a1a
|
7bc8ad22669db24b98a556c572400f283827b085
|
refs/heads/master
| 2020-06-17T07:27:57.935243
| 2019-10-31T06:02:57
| 2019-10-31T06:02:57
| 195,846,015
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 791
|
py
|
from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as ec
from pages.base import BasePage
__author__ = '123456'
# coding=utf-8
class IndexPage(BasePage):
"""首页类"""
bid_locator = (By.XPATH, '//a[contains(@class,"btn-special")]')
def __init__(self, driver):
self.driver = driver
def get_user_info(self):
"""获取首页的用户信息"""
user_ele = BasePage.wait_present_element((By.XPATH, "//a[@href='/Member/index.html']"))
return user_ele
def choice_bid(self):
"""选择标的"""
#定位投标这个按钮
e = BasePage.wait_clickable_element(self.bid_locator)
#点击
e.click()
|
[
"hewangtong@xiaomi.com"
] |
hewangtong@xiaomi.com
|
cf0fda7639f90bddcdbbdfa57d0431066628dd98
|
ae846e6c05476eb68f9ee364c8372ba96e7770fb
|
/0x00-python_variable_annotations/2-floor.py
|
3e642e00ad18f4d0ade0e4ca0f7ad4877add7cea
|
[] |
no_license
|
yassineaboub/holbertonschool-web_back_end
|
3f7ee12934e0f784c1fec74643cb3a09389b31c0
|
400434925e9e0fddb15df842478b02b15fb1f827
|
refs/heads/main
| 2023-05-31T23:57:14.151457
| 2021-06-17T20:34:14
| 2021-06-17T20:34:14
| 305,984,383
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 199
|
py
|
#!/usr/bin/env python3
"""floor function"""
import math
def floor(n: float) -> int:
"""
take a float n as argument
and returns the floor of the float
"""
return (math.floor(n))
|
[
"yassineaboub@live.fr"
] |
yassineaboub@live.fr
|
33d3438f46eb106a9ae38ad15a44e2a6f8dd2ecf
|
4136fc143d5f3121edf25b55ead9bb6e506beaef
|
/TEMpcPlot/Gui/learn_QT/callLineEdit.py
|
5111b085a13949cc3308b68e2d244240be94c574
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
Prestipino/TEMpcPlot
|
a557863b07225a82598b33eb373bd1e7df569280
|
386cea122cd6b04d08dbaf0ecae9ff04d62d05ea
|
refs/heads/master
| 2023-07-14T18:25:19.069018
| 2023-06-13T11:34:34
| 2023-06-13T11:34:34
| 252,416,658
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 524
|
py
|
import sys
from PyQt5.QtWidgets import QDialog, QApplication
from demoLineEdit import *
class MyForm(QDialog):
def __init__(self):
super().__init__()
self.ui = Ui_Dialog()
self.ui.setupUi(self)
self.ui.ButtonClickMe.clicked.connect(self.dispmessage)
self.show()
def dispmessage(self):
self.ui.labelResponse.setText("Hello " +self.ui.lineEditName.text())
if __name__=="__main__":
app = QApplication(sys.argv)
w = MyForm()
w.show()
sys.exit(app.exec_())
|
[
"carmelo.prestipino@gmail.com"
] |
carmelo.prestipino@gmail.com
|
c9fdf7ffef9ab9e0a6fea09af25192f1aec1af80
|
563f7440fc026a16cbad9f69b933eb795d8c696e
|
/3-4/q1.py
|
1adf7e61e0998a5c72976a5527efe22605296a92
|
[] |
no_license
|
yoowonsuk/python
|
f026921b1b5976776af31690cf53e88436b7370e
|
7c9897c45e8a269e0ceb74ec92caa9e644bb0a65
|
refs/heads/master
| 2020-06-16T08:19:10.185653
| 2019-07-10T09:05:32
| 2019-07-10T09:05:32
| 195,479,779
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 49
|
py
|
for i in range(5):
print("Nice to meet you")
|
[
"sam2yuk@naver.com"
] |
sam2yuk@naver.com
|
25333c6693c054490e62be0ad26add45c338acaa
|
5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d
|
/alipay/aop/api/response/AlipayTradeContractQuicksettlementQueryResponse.py
|
b7c2b10a5a075070abe8db3b0d42a5590b07fb36
|
[
"Apache-2.0"
] |
permissive
|
alipay/alipay-sdk-python-all
|
8bd20882852ffeb70a6e929038bf88ff1d1eff1c
|
1fad300587c9e7e099747305ba9077d4cd7afde9
|
refs/heads/master
| 2023-08-27T21:35:01.778771
| 2023-08-23T07:12:26
| 2023-08-23T07:12:26
| 133,338,689
| 247
| 70
|
Apache-2.0
| 2023-04-25T04:54:02
| 2018-05-14T09:40:54
|
Python
|
UTF-8
|
Python
| false
| false
| 2,046
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class AlipayTradeContractQuicksettlementQueryResponse(AlipayResponse):
def __init__(self):
super(AlipayTradeContractQuicksettlementQueryResponse, self).__init__()
self._credit_quota = None
self._left_advance_quota = None
self._left_credit_quota = None
self._service_state = None
self._signed = None
@property
def credit_quota(self):
return self._credit_quota
@credit_quota.setter
def credit_quota(self, value):
self._credit_quota = value
@property
def left_advance_quota(self):
return self._left_advance_quota
@left_advance_quota.setter
def left_advance_quota(self, value):
self._left_advance_quota = value
@property
def left_credit_quota(self):
return self._left_credit_quota
@left_credit_quota.setter
def left_credit_quota(self, value):
self._left_credit_quota = value
@property
def service_state(self):
return self._service_state
@service_state.setter
def service_state(self, value):
self._service_state = value
@property
def signed(self):
return self._signed
@signed.setter
def signed(self, value):
self._signed = value
def parse_response_content(self, response_content):
response = super(AlipayTradeContractQuicksettlementQueryResponse, self).parse_response_content(response_content)
if 'credit_quota' in response:
self.credit_quota = response['credit_quota']
if 'left_advance_quota' in response:
self.left_advance_quota = response['left_advance_quota']
if 'left_credit_quota' in response:
self.left_credit_quota = response['left_credit_quota']
if 'service_state' in response:
self.service_state = response['service_state']
if 'signed' in response:
self.signed = response['signed']
|
[
"jishupei.jsp@alibaba-inc.com"
] |
jishupei.jsp@alibaba-inc.com
|
5c616a727cedc00a5880ffb4476faf785a0bfcef
|
05b25889bddaf3ae34ddf43bdb9c8e19020f3733
|
/task/LuBin_Wan_week_5.py
|
55e44084dc82c56fa85c6531320f317841828d46
|
[] |
no_license
|
20016145/test1
|
03fea759fa37811dd5edc0c606203c6c9dc8a6ba
|
5bccdce1dbacc1d4228552c7bf077c5da552b13f
|
refs/heads/master
| 2023-03-07T20:02:44.515140
| 2021-02-04T13:55:16
| 2021-02-04T13:55:16
| 335,974,381
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,067
|
py
|
NATO={"A":"Alpha","B":"Bravo","C":"Charlie","D":"Delta",
"E":"Echo","F":"Foxtro","G":"Golf","H":"Hotel",
"I":"India","J":"Juliet","K":"Kilo","L":"Lima",
"M":"Mike","N":"November","O":"Oscar","P":"Papa",
"Q":"Quebec","R":"Romeo","S":"Sierra","T":"Tango",
"U":"Uniform","V":"Victor","W":"Whisky","X":"X-Ray",
"Y":"Yankee","Z":"Zulu",}
word = input("enter a word").upper()
for letter in word:
if letter in NATO.keys():
word=NATO.get(letter)
print(word)
w = input("enter a word:").lower()
shift = int (input("enter a number:"))
alphabet = "abcdefghijklmnopqrstuvwxyz"
def ceaser(plaintxt,key):
cipher = ""
for letters in w:
if letters == "":
cipher+=""
else:
for i in range (len(albhabet)):
if alphabet[i] == letters:
cipher+= alphabet[(i+key) % 26]
return cipher
print(ceaser(w,shift))
|
[
"1071436792@qq.com"
] |
1071436792@qq.com
|
607fd244528f171b1c186f58ca57d250f5dd73a1
|
f743e7fd9ad7d9af200dcb077fe6f8d95a5025f9
|
/tests/scripts/thread-cert/Cert_7_1_07_BorderRouterAsLeader.py
|
be980148ef016038c334717d33b207dbff101adf
|
[
"BSD-3-Clause",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
Abhayakara/openthread
|
6ea1cf8053290db20eb3a61732f30bea0083971c
|
fb83c90ffec5dee2194ee9bb5280fa48ce9a24fc
|
refs/heads/main
| 2023-08-28T00:32:37.427937
| 2021-10-07T20:16:38
| 2021-10-07T20:16:38
| 295,905,892
| 5
| 0
|
BSD-3-Clause
| 2021-10-05T18:42:28
| 2020-09-16T02:48:57
|
C++
|
UTF-8
|
Python
| false
| false
| 26,112
|
py
|
#!/usr/bin/env python3
#
# Copyright (c) 2020, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import unittest
import copy
import config
import thread_cert
from pktverify.consts import WIRESHARK_OVERRIDE_PREFS, MLE_CHILD_UPDATE_REQUEST, MLE_DATA_RESPONSE, MLE_CHILD_ID_RESPONSE, SVR_DATA_URI, ACTIVE_TIMESTAMP_TLV, RESPONSE_TLV, LINK_LAYER_FRAME_COUNTER_TLV, VERSION_TLV, TLV_REQUEST_TLV, ADDRESS16_TLV, NETWORK_DATA_TLV, ROUTE64_TLV, CHALLENGE_TLV, SOURCE_ADDRESS_TLV, LEADER_DATA_TLV, ADDRESS_REGISTRATION_TLV, NWD_BORDER_ROUTER_TLV, NWD_6LOWPAN_ID_TLV
from pktverify.packet_verifier import PacketVerifier
from pktverify.bytes import Bytes
from pktverify.addrs import Ipv6Addr
from pktverify.null_field import nullField
from pktverify.utils import is_sublist
LEADER = 1
ROUTER_1 = 2
ROUTER_2 = 3
MED = 4
SED = 5
MTDS = [MED, SED]
PREFIX_1 = '2001:0db8:0001::/64'
PREFIX_2 = '2001:0db8:0002::/64'
# Test Purpose and Description:
# -----------------------------
# The purpose of this test case is to verify that network data is properly updated
# when a server from the network leaves and rejoins.
# Router_1 is configured as Border Router for prefix 2001:db8:1::/64.
# Router_2 is configured as Border Router for prefix 2001:db8:1::/64.
# MED is configured to require complete network data.
# SED is configured to request only stable network data.
#
# Test Topology:
# -------------
# SED
# |
# Router_1 - Leader(DUT) - MED
# |
# Router_2
#
# DUT Types:
# ----------
# Leader
class Cert_7_1_7_BorderRouterAsLeader(thread_cert.TestCase):
USE_MESSAGE_FACTORY = False
SUPPORT_NCP = False
TOPOLOGY = {
LEADER: {
'name': 'LEADER',
'mode': 'rdn',
'allowlist': [ROUTER_1, ROUTER_2, MED, SED]
},
ROUTER_1: {
'name': 'ROUTER_1',
'mode': 'rdn',
'allowlist': [LEADER]
},
ROUTER_2: {
'name': 'ROUTER_2',
'mode': 'rdn',
'allowlist': [LEADER]
},
MED: {
'name': 'MED',
'is_mtd': True,
'mode': 'rn',
'timeout': config.DEFAULT_CHILD_TIMEOUT,
'allowlist': [LEADER]
},
SED: {
'name': 'SED',
'is_mtd': True,
'mode': '-',
'timeout': config.DEFAULT_CHILD_TIMEOUT,
'allowlist': [LEADER]
},
}
# override wireshark preferences with case needed parameters
CASE_WIRESHARK_PREFS = copy.deepcopy(WIRESHARK_OVERRIDE_PREFS)
CASE_WIRESHARK_PREFS['6lowpan.context1'] = PREFIX_1
CASE_WIRESHARK_PREFS['6lowpan.context2'] = PREFIX_2
def test(self):
self.nodes[LEADER].start()
self.simulator.go(5)
self.assertEqual(self.nodes[LEADER].get_state(), 'leader')
for i in (2, 3):
self.nodes[i].start()
self.simulator.go(5)
self.assertEqual(self.nodes[i].get_state(), 'router')
self.nodes[MED].start()
self.simulator.go(5)
self.assertEqual(self.nodes[MED].get_state(), 'child')
self.nodes[SED].start()
self.simulator.go(5)
self.assertEqual(self.nodes[SED].get_state(), 'child')
self.collect_rlocs()
self.nodes[ROUTER_1].add_prefix(PREFIX_1, 'paosr')
self.nodes[ROUTER_1].register_netdata()
self.simulator.go(5)
self.nodes[ROUTER_2].add_prefix(PREFIX_1, 'paro')
self.nodes[ROUTER_2].register_netdata()
self.simulator.go(5)
self.nodes[ROUTER_2].set_preferred_partition_id(1)
self.nodes[ROUTER_2].set_network_id_timeout(50)
self.nodes[ROUTER_2].remove_allowlist(self.nodes[LEADER].get_addr64())
self.nodes[LEADER].remove_allowlist(self.nodes[ROUTER_2].get_addr64())
# Wait for NETWORK_ID_TIMEOUT taking effect
# Router_2 creates a new partition
self.simulator.go(60)
self.assertEqual(self.nodes[ROUTER_2].get_state(), 'leader')
self.nodes[ROUTER_2].remove_domain_prefix(PREFIX_1)
self.nodes[ROUTER_2].add_prefix(PREFIX_2, 'paros')
self.nodes[ROUTER_2].register_netdata()
# Router_2 reattaches to Leader
self.nodes[ROUTER_2].add_allowlist(self.nodes[LEADER].get_addr64())
self.nodes[LEADER].add_allowlist(self.nodes[ROUTER_2].get_addr64())
# Wait for Router_2 reattachment and network data propagation
# ADVERTISEMENT_I_MAX + DEFAULT_CHILD_TIMEOUT + ATTACH_DELAY + Extra
self.simulator.go(60)
self.assertEqual(self.nodes[ROUTER_2].get_state(), 'router')
self.collect_ipaddrs()
self.collect_rloc16s()
# ping Leader's PREFIX_1 and PREFIX_2 addrs
dut_addrs = []
dut_addrs.append(self.nodes[LEADER].get_addr(PREFIX_1))
dut_addrs.append(self.nodes[LEADER].get_addr(PREFIX_2))
for addr in dut_addrs:
self.assertTrue(self.nodes[ROUTER_1].ping(addr))
self.simulator.go(1)
self.assertTrue(self.nodes[SED].ping(addr))
self.simulator.go(1)
self.nodes[ROUTER_2].remove_domain_prefix(PREFIX_2)
self.nodes[ROUTER_2].register_netdata()
self.simulator.go(5)
def verify(self, pv):
pkts = pv.pkts
pv.summary.show()
LEADER = pv.vars['LEADER']
LEADER_RLOC = pv.vars['LEADER_RLOC']
LEADER_RLOC16 = pv.vars['LEADER_RLOC16']
ROUTER_1 = pv.vars['ROUTER_1']
ROUTER_1_RLOC16 = pv.vars['ROUTER_1_RLOC16']
ROUTER_1_RLOC = pv.vars['ROUTER_1_RLOC']
ROUTER_2 = pv.vars['ROUTER_2']
ROUTER_2_RLOC16 = pv.vars['ROUTER_2_RLOC16']
ROUTER_2_RLOC = pv.vars['ROUTER_2_RLOC']
SED = pv.vars['SED']
MED = pv.vars['MED']
GUA = [{}, {}]
PREFIXES = [Bytes(PREFIX_1[:-5]), Bytes(PREFIX_2[:-5])]
for i in (0, 1):
for node in ('LEADER', 'ROUTER_1', 'SED'):
for addr in pv.vars['%s_IPADDRS' % node]:
if addr.startswith(PREFIXES[i]):
GUA[i][node] = addr
# Step 1: Ensure topology is formed correctly
pv.verify_attached('ROUTER_1', 'LEADER')
pv.verify_attached('ROUTER_2', 'LEADER')
pv.verify_attached('MED', 'LEADER', 'MTD')
pv.verify_attached('SED', 'LEADER', 'MTD')
_pkt = pkts.last()
# Step 2,3: Router_1 and Router_2 MUST send a CoAP Server Data
# Notification frame to the Leader including the server’s
# information(Prefix, Border Router):
# CoAP Request URI
# coap://[<Leader address>]:MM/a/sd
# CoAP Payload
# Thread Network Data TLV
# Step 4: Leader sends a CoAP ACK frame to each of Router_1 and
# Router_2
with pkts.save_index():
for node in ('ROUTER_1', 'ROUTER_2'):
_dn_pkt = pkts.filter_wpan_src64(pv.vars['%s' %node]).\
filter_wpan_dst16(LEADER_RLOC16).\
filter_coap_request(SVR_DATA_URI).\
filter(lambda p:
[Ipv6Addr(PREFIX_1[:-3])] ==
p.thread_nwd.tlv.prefix and\
[pv.vars['%s_RLOC16' %node]] ==
p.thread_nwd.tlv.border_router_16
).\
must_next()
pkts.filter_wpan_src64(LEADER).\
filter_ipv6_dst(pv.vars['%s_RLOC' %node]).\
filter_coap_ack(SVR_DATA_URI).\
filter(lambda p: p.coap.mid == _dn_pkt.coap.mid).\
must_next()
# Step 5: Leader MUST multicast MLE Data Response with the new
# information collected from Router_1 and Router_2,
# including the following TLVs:,
# - Source Address TLV
# - Leader Data TLV
# - Data Version field <incremented>
# - Stable Data Version field <incremented>
# - Network Data TLV
# - Stable Flag set
# - At least one Prefix TLV (Prefix 1)
# - Two Border Router sub-TLVs
# Border Router1 TLV: Stable Flag set
# Border Router2 TLV: Stable Flag not set
# - 6LoWPAN ID sub-TLV
# Stable Flag set
_dr_pkt = pkts.filter_wpan_src64(LEADER).\
filter_LLANMA().\
filter_mle_cmd(MLE_DATA_RESPONSE).\
filter(lambda p: {
NETWORK_DATA_TLV,
SOURCE_ADDRESS_TLV,
LEADER_DATA_TLV
} <= set(p.mle.tlv.type) and\
[Ipv6Addr(PREFIX_1[:-3])] ==
p.thread_nwd.tlv.prefix and\
p.mle.tlv.leader_data.data_version ==
(_pkt.mle.tlv.leader_data.data_version + 1) % 256 and\
p.mle.tlv.leader_data.stable_data_version ==
(_pkt.mle.tlv.leader_data.stable_data_version + 1) % 256
).\
must_next()
with pkts.save_index():
_dr_pkt1 = pkts.filter_wpan_src64(LEADER).\
filter_LLANMA().\
filter_mle_cmd(MLE_DATA_RESPONSE).\
filter(lambda p: {
NETWORK_DATA_TLV,
SOURCE_ADDRESS_TLV,
LEADER_DATA_TLV
} <= set(p.mle.tlv.type) and\
{
NWD_BORDER_ROUTER_TLV,
NWD_6LOWPAN_ID_TLV
} <= set(p.thread_nwd.tlv.type) and\
p.mle.tlv.leader_data.data_version ==
(_dr_pkt.mle.tlv.leader_data.data_version + 1) % 256 and\
p.mle.tlv.leader_data.stable_data_version ==
_dr_pkt.mle.tlv.leader_data.stable_data_version and\
is_sublist([ROUTER_1_RLOC16, ROUTER_2_RLOC16],
p.thread_nwd.tlv.border_router_16) and\
is_sublist([0, 1, 1, 1, 0], p.thread_nwd.tlv.stable) and\
is_sublist([1], getattr(p.thread_nwd.tlv, '6co').flag.c) and\
is_sublist([Ipv6Addr(PREFIX_1[:-3])], p.thread_nwd.tlv.prefix)
).\
must_next()
# Step 6: Leader MUST send a MLE Child Update Request or MLE Data
# Response to SED, including the following TLVs:
# - Network Data TLV
# At least one Prefix TLV (Prefix 1) including:
# - Stable Flag set
# - Border Router sub-TLV(corresponding to Router_1)
# - P_border_router_16<0xFFFE>
# - Stable Flag set
# - 6LoWPAN ID sub-TLV
# - Stable Flag set
# - Source Address TLV
# - Leader Data TLV
# - Active Timestamp TLV
pkts.filter_wpan_src64(LEADER).\
filter_wpan_dst64(SED).\
filter_mle_cmd2(MLE_CHILD_UPDATE_REQUEST, MLE_DATA_RESPONSE).\
filter(lambda p: {
NETWORK_DATA_TLV,
SOURCE_ADDRESS_TLV,
LEADER_DATA_TLV,
ACTIVE_TIMESTAMP_TLV
} == set(p.mle.tlv.type) and\
is_sublist([Ipv6Addr(PREFIX_1[:-3])], p.thread_nwd.tlv.prefix) and\
is_sublist([1, 1, 1], p.thread_nwd.tlv.stable) and\
is_sublist([1], getattr(p.thread_nwd.tlv, '6co').flag.c) and\
is_sublist([0xFFFE], p.thread_nwd.tlv.border_router_16)
).\
must_next()
# Step 10: Router_2 automatically reattaches to the Leader and sends
# a CoAP Server Data Notification message with the server’s
# information (Prefix, Border Router) to the Leader:
# CoAP Request URI
# coap://[<leader address>]:MM/a/sd
# CoAP Payload
# Thread Network Data TLV
# Step 11: Leader sends a CoAP ACK frame to each of Routers
pv.verify_attached('ROUTER_2', 'LEADER')
with pkts.save_index():
_dn_pkt = pkts.filter_wpan_src64(ROUTER_2).\
filter_wpan_dst16(LEADER_RLOC16).\
filter_coap_request(SVR_DATA_URI).\
filter(lambda p:
[Ipv6Addr(PREFIX_2[:-3])] ==
p.thread_nwd.tlv.prefix and\
[ROUTER_2_RLOC16] ==
p.thread_nwd.tlv.border_router_16
).\
must_next()
pkts.filter_wpan_src64(LEADER).\
filter_ipv6_dst(ROUTER_2_RLOC).\
filter_coap_ack(SVR_DATA_URI).\
filter(lambda p: p.coap.mid == _dn_pkt.coap.mid).\
must_next()
# Step 12: Leader MUST multicast MLE Data Response with the new
# information collected from Router_2,
# including the following TLVs:,
# - Source Address TLV
# - Leader Data TLV
# - Data Version field <incremented>
# - Stable Data Version field <incremented>
# - Network Data TLV
# - Stable Flag set
# - At least two Prefix TLVs (Prefix 1 and Prefix 2)
# - Prefix 1 TLV
# - Stable Flag set
# - Only one Border Router sub-TLV
# corresponding to Router_1
# - 6LoWPAN ID sub-TLV
# - Stable Flag set
# - Prefix 2 TLV
# - Stable Flag set
# - Only one Border Router sub-TLV
# corresponding to Router_2
# - 6LoWPAN ID sub-TLV
_dr_pkt2 = pkts.filter_wpan_src64(LEADER).\
filter_LLANMA().\
filter_mle_cmd(MLE_DATA_RESPONSE).\
filter(lambda p: {
NETWORK_DATA_TLV,
SOURCE_ADDRESS_TLV,
LEADER_DATA_TLV
} <= set(p.mle.tlv.type) and\
{
NWD_BORDER_ROUTER_TLV,
NWD_6LOWPAN_ID_TLV
} <= set(p.thread_nwd.tlv.type) and\
is_sublist([ROUTER_1_RLOC16, ROUTER_2_RLOC16],
p.thread_nwd.tlv.border_router_16) and\
is_sublist([0, 1, 1, 1, 1, 1, 1],
p.thread_nwd.tlv.stable) and\
is_sublist([1, 1], getattr(p.thread_nwd.tlv, '6co').flag.c) and\
is_sublist([Ipv6Addr(PREFIX_1[:-3]), Ipv6Addr(PREFIX_2[:-3])],
p.thread_nwd.tlv.prefix) and\
p.mle.tlv.leader_data.data_version ==
(_dr_pkt1.mle.tlv.leader_data.data_version + 1) % 256 and\
p.mle.tlv.leader_data.stable_data_version ==
(_dr_pkt1.mle.tlv.leader_data.stable_data_version + 1) % 256
).\
must_next()
# Step 13: Leader MUST send a MLE Child Update Request or MLE Data
# Response to SED, containing the stable Network Data
# including the following TLVs:
# - Source Address TLV
# - Leader Data TLV
# - Network Data TLV
# - At least two Prefix TLVs (Prefix 1 and Prefix 2)
# - Prefix 1 TLV
# - Stable Flag set
# - Border Router sub-TLV
# corresponding to Router_1
# - P_border_router_16 <0xFFFE>
# - Stable flag set
# - 6LoWPAN ID sub-TLV
# - Stable flag set
# - Prefix 2 TLV
# - Stable Flag set
# - Border Router sub-TLV
# corresponding to Router_2
# - P_border_router_16 <0xFFFE>
# - Stable flag set
# - 6LoWPAN ID sub-TLV
# - Stable flag set
# - Active Timestamp TLV
with pkts.save_index():
pkts.filter_wpan_src64(LEADER).\
filter_wpan_dst64(SED).\
filter_mle_cmd2(MLE_CHILD_UPDATE_REQUEST, MLE_DATA_RESPONSE).\
filter(lambda p: {
NETWORK_DATA_TLV,
SOURCE_ADDRESS_TLV,
LEADER_DATA_TLV,
ACTIVE_TIMESTAMP_TLV
} == set(p.mle.tlv.type) and\
is_sublist([1, 1, 1, 1, 1, 1],
p.thread_nwd.tlv.stable) and\
is_sublist([1, 1], getattr(p.thread_nwd.tlv, '6co').flag.c) and\
is_sublist([Ipv6Addr(PREFIX_1[:-3]), Ipv6Addr(PREFIX_2[:-3])],
p.thread_nwd.tlv.prefix) and\
is_sublist([0xFFFE, 0xFFFE], p.thread_nwd.tlv.border_router_16)
).\
must_next()
# Step 14: Verifies connectivity by sending ICMPv6 Echo Requests from
# Router_1 and SED_1 to the Leader Prefix_1 and Prefix_2-based
# address.
# Leader must respond with ICMPv6 Echo Replies
for i in (0, 1):
for node in ('ROUTER_1', 'SED'):
_pkt = pkts.filter_ping_request().\
filter_ipv6_src_dst(GUA[i][node], GUA[i]['LEADER']).\
must_next()
pkts.filter_ping_reply(identifier=_pkt.icmpv6.echo.identifier).\
filter_ipv6_src_dst(GUA[i]['LEADER'], GUA[i][node]).\
must_next()
# Step 15: Router_2 sends a CoAP Server Data Notification message with
# the server’s information (Prefix, Border Router) to the Leader:
# CoAP Request URI
# coap://[<leader address>]:MM/a/sd
# CoAP Payload
# empty payload
# Step 16: Leader sends a CoAP ACK frame to each of Router_1 and
# Router_2
with pkts.save_index():
_dn_pkt = pkts.filter_wpan_src64(ROUTER_2).\
filter_wpan_dst16(LEADER_RLOC16).\
filter_coap_request(SVR_DATA_URI).\
filter(lambda p:
p.thread_nwd.tlv.border_router_16 is nullField
).\
must_next()
pkts.filter_wpan_src64(LEADER).\
filter_ipv6_dst(ROUTER_2_RLOC).\
filter_coap_ack(SVR_DATA_URI).\
filter(lambda p: p.coap.mid == _dn_pkt.coap.mid).\
must_next()
# Step 17: Leader MUST multicast MLE Data Response with the new
# information collected from Router_2,
# including the following TLVs:,
# - Source Address TLV
# - Leader Data TLV
# - Data Version field <incremented>
# - Stable Data Version field <incremented>
# - Network Data TLV
# - Stable Flag set
# - At least two Prefix TLVs (Prefix 1 and Prefix 2)
# - Prefix 1 TLV
# - Stable Flag set
# - Only one Border Router sub-TLV
# corresponding to Router_1
# - 6LoWPAN ID sub-TLV
# - Stable Flag set
# - Prefix 2 TLV
# - Stable Flag set
# - 6LoWPAN ID sub-TLV
# - Stable Flag set
# - compression flag set to 0
_pkt = pkts.filter_wpan_src64(LEADER).\
filter_LLANMA().\
filter_mle_cmd(MLE_DATA_RESPONSE).\
filter(lambda p: {
NETWORK_DATA_TLV,
SOURCE_ADDRESS_TLV,
LEADER_DATA_TLV
} <= set(p.mle.tlv.type) and\
{
NWD_BORDER_ROUTER_TLV,
NWD_6LOWPAN_ID_TLV
} <= set(p.thread_nwd.tlv.type) and\
p.mle.tlv.leader_data.data_version ==
(_dr_pkt2.mle.tlv.leader_data.data_version + 1) % 256 and\
p.mle.tlv.leader_data.stable_data_version ==
(_dr_pkt2.mle.tlv.leader_data.stable_data_version + 1) % 256 and\
is_sublist([Ipv6Addr(PREFIX_1[:-3]), Ipv6Addr(PREFIX_2[:-3])],
p.thread_nwd.tlv.prefix) and\
is_sublist([1,0], getattr(p.thread_nwd.tlv, '6co').flag.c)
).\
must_next()
# Step 18: Leader MUST send a MLE Child Update Request or MLE Data
# Response to SED, containing the stable Network Data
# including the following TLVs:
# - Source Address TLV
# - Leader Data TLV
# - Network Data TLV
# - At least two Prefix TLVs (Prefix 1 and Prefix 2)
# - Prefix 1 TLV
# - Stable Flag set
# - Border Router sub-TLV
# corresponding to Router_1
# - P_border_router_16 <0xFFFE>
# - Stable flag set
# - 6LoWPAN ID sub-TLV
# - Stable flag set
# - Prefix 2 TLV
# - Stable Flag set
# - 6LoWPAN ID sub-TLV
# - Stable flag set
# - compression flag set to 0
# - Active Timestamp TLV
pkts.filter_wpan_src64(LEADER).\
filter_wpan_dst64(SED).\
filter_mle_cmd2(MLE_CHILD_UPDATE_REQUEST, MLE_DATA_RESPONSE).\
filter(lambda p: {
NETWORK_DATA_TLV,
SOURCE_ADDRESS_TLV,
LEADER_DATA_TLV,
ACTIVE_TIMESTAMP_TLV
} == set(p.mle.tlv.type) and\
is_sublist([Ipv6Addr(PREFIX_1[:-3]), Ipv6Addr(PREFIX_2[:-3])],
p.thread_nwd.tlv.prefix) and\
is_sublist([1, 1, 1, 1, 1], p.thread_nwd.tlv.stable) and\
is_sublist([0xFFFE], p.thread_nwd.tlv.border_router_16) and\
is_sublist([1,0], getattr(p.thread_nwd.tlv, '6co').flag.c)
).\
must_next()
if __name__ == '__main__':
unittest.main()
|
[
"elemon@apple.com"
] |
elemon@apple.com
|
f6231ff6fc5a7141afd3541cd1de54d7e5bb03ea
|
d2852bdff722d446d3774754f5cfa4b243f17c74
|
/resumes/models.py
|
1a87e53eee75e66a449a3930cb6e85f22b998102
|
[
"MIT"
] |
permissive
|
USUDR2604/Django-ResumeBuilder
|
16e01d85993f0642748f4c1f79deef506bb882d6
|
0c6066d96fd20c029e5d5b0a447eaa5e8fc80fb6
|
refs/heads/master
| 2023-07-12T16:14:00.520052
| 2021-08-23T03:34:37
| 2021-08-23T03:34:37
| 397,090,820
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,090
|
py
|
from django.db import models
from django.contrib.auth.models import User
from .Choices import *
# Create your models here.
class ContactDetails(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
First_Name = models.CharField(max_length=100,help_text='Enter your First Name')
Last_Name = models.CharField(max_length=100,help_text='Enter your Last Name')
Email_Id = models.EmailField(max_length=100,help_text='Enter your Email Id')
Mobile_No = models.CharField(max_length=10, help_text='Enter Your Mobile No')
Alternate_Mobile_No=models.CharField(max_length=10, help_text='Enter Your Alternate Mobile No')
Address = models.CharField(max_length=800, help_text='Enter your Address')
Address_2 = models.CharField(max_length=800, help_text='Enter your Second Address')
City = models.CharField(max_length=80, help_text='Enter your City Name')
State = models.CharField(max_length=100, help_text='Enter State Name')
ZipCode = models.CharField(max_length=6, help_text='Enter your ZipCode')
Country = models.CharField(max_length=50, help_text='Enter Country Name')
def __str__(self):
return str(self.user)
class PersonalDetails(models.Model):
EXP_INTERN_CHOICES=(('YES',"YES"),("NO","NO"))
GENDER_CHOICES=(("MALE","MALE"),("FEMALE","FEMALE"),("OTHERS","OTHERS"))
user = models.OneToOneField(User, on_delete=models.CASCADE)
Photo = models.ImageField(default='default.jpg',upload_to='Photos/')
Experience = models.CharField(max_length=100, choices=EXP_INTERN_CHOICES)
Internship = models.CharField(max_length=100,choices=EXP_INTERN_CHOICES)
Gender = models.CharField(max_length=50, choices=GENDER_CHOICES)
DOB = models.DateField(help_text='Date Of Birth Details')
def __str__(self):
return str(self.user)
class LanguageDetails(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
Language_Name = models.CharField(max_length=100,help_text="Language Name")
Language_Confidence = models.PositiveIntegerField()
def __str__(self):
return str(self.user)
class SocialMediaLinks(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
SocialMedia_Website_Name = models.CharField(max_length=100,help_text='Enter Social Media Website Name')
SocialMedia_Link = models.URLField(max_length=700,help_text='Enter Social Media Web Link')
def __str__(self):
return str(self.user)
class Skills(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
Skill_Name=models.CharField(max_length=300, help_text='Enter your Skill',unique=True)
Skill_Percentage=models.PositiveIntegerField()
def __str__(self):
return str(self.user)
class Interests(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
Interest_Names = models.CharField(max_length=200,help_text='Enter your Interests')
def __str__(self):
return str(self.user)
class SoftSkills(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
SoftSkill_Name = models.CharField(max_length=200,help_text='Enter Soft Skill')
def __str__(self):
return str(self.user)
class AchievementsOrActivities(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
ActAchiev_Choice = models.CharField(max_length=200,choices=ACTIVITY_ACHIEVEMENTS,help_text='Choose Either Activity or Achievement')
Achievement_Name = models.CharField(max_length=200,help_text='Enter Achievement Name')
Achievement_Description = models.CharField(max_length=800,help_text='Enter Achievement Description')
def __str__(self):
return str(self.user)
class EducationDetails(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
Organization_Type = models.CharField(max_length=100,choices=EDUCATION_CHOICES,help_text='Organization Type')
Country_Name=models.CharField(max_length=300,help_text='Country Name')
State_Name=models.CharField(max_length=300, help_text='Enter State Name')
City_Name=models.CharField(max_length=150,help_text='City Name')
ZipCode=models.CharField(max_length=6,help_text='ZipCode')
Organization_Name=models.CharField(max_length=400, help_text='Organization Name')
Board_Of_Study=models.CharField(max_length=400,blank=True,help_text='Board Of Study')
Field_Of_Study=models.CharField(max_length=400, help_text='Enter your Field Study')
Standard=models.CharField(max_length=120,help_text='Your Standard')
Year_Passing=models.DateField(help_text='Year Passing')
Score=models.DecimalField(max_digits = 5,decimal_places = 2)
def __str__(self):
return str(self.user)
class ProjectDetails(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
Project_Name=models.CharField(max_length=300,help_text='Enter Project Name')
Project_Url=models.URLField(max_length=400,help_text='Enter project Url')
Project_Description=models.TextField(max_length=800,help_text='Enter Project Description')
Project_Position=models.CharField(max_length=500,help_text='Enter your Position')
def __str__(self):
return str(self.user)
class TrainingDetails(models.Model):
TRAINING_CHOICES=(('ONLINE','ONLINE'),('OFFLINE',"OFFLINE"))
user = models.ForeignKey(User, on_delete=models.CASCADE)
Training_Type=models.CharField(max_length=100,choices=TRAINING_CHOICES,help_text='Enter Training Type')
Training_Country=models.CharField(max_length=300,help_text='Enter your Training Country',blank=True)
Training_State=models.CharField(max_length=200,help_text='Enter Training State',blank=True)
Training_City=models.CharField(max_length=100,help_text='Enter your Training City',blank=True)
Training_ZipCode=models.CharField(max_length=6,help_text='Enter Training Place ZipCode')
Training_Org_Name=models.CharField(max_length=500,help_text='Enter Training organization Name')
Training_Course_Name=models.CharField(max_length=200,help_text='Enter Training Course')
Training_startDate=models.DateField(help_text='Training Start Date')
Training_EndDate=models.DateField(help_text='Training End Date')
Training_Description=models.TextField(help_text='Enter Training Description')
def __str__(self):
return str(self.user)
class ExperienceDetails(models.Model):
EXPERIENCE_CHOICES=(('WORK FROM HOME','WORK FROM HOME'),('PART TIME','PART TIME'),('FULL TIME','FULL TIME'))
user = models.ForeignKey(User, on_delete=models.CASCADE)
Experience_choice=models.CharField(max_length=200,choices=EXPERIENCE_CHOICES,blank=True)
Country=models.CharField(max_length=150,help_text='Country Name',blank=True)
State=models.CharField(max_length=300,help_text='State Name',blank=True)
City=models.CharField(max_length=300,help_text='Enter City Name',blank=True)
ZipCode=models.CharField(max_length=6,help_text='Enter ZipCode',blank=True)
Experience_Type=models.CharField(max_length=25,help_text='Enter Experience Type',default='Experience',editable=False)
Experience_Job_Name = models.CharField(max_length=300,help_text='Enter Company Name')
Experience_Job_Type=models.CharField(max_length=300,help_text='Experience Job Type',blank=True)
Experience_Start_Date=models.DateField(help_text='Enter Start Date',blank=True)
Experience_End_Date=models.DateField(help_text='Enter End Date',blank=True)
Experience_Job_Description=models.TextField(max_length=800,help_text='Enter Experience Description',blank=True)
def __str__(self):
return str(self.user)
class InternshipDetails(models.Model):
INTERNSHIP_CHOICES=(("OFFLINE",'OFFLINE'),('PART TIME',"PART TIME"),("ONLINE","ONLINE"))
user = models.ForeignKey(User, on_delete=models.CASCADE)
Internship_choice=models.CharField(max_length=200,choices=INTERNSHIP_CHOICES,blank=True)
Country=models.CharField(max_length=150,help_text='Country Name',blank=True)
State=models.CharField(max_length=300,help_text='State Name',blank=True)
City=models.CharField(max_length=300,help_text='Enter City Name',blank=True)
ZipCode=models.CharField(max_length=6,help_text='Enter ZipCode',blank=True)
Internship_Type=models.CharField(max_length=25,help_text='Enter Internship Type',default='Internship',editable=False)
Internship_Company_Name=models.CharField(max_length=300,help_text="Enter Company Name")
Internship_Job_Type=models.CharField(max_length=300,help_text='Experience Job Type',blank=True)
Internship_Start_Date=models.DateField(help_text='Enter Start Date',blank=True)
Internship_End_Date=models.DateField(help_text='Enter End Date',blank=True)
Internship_Job_Description=models.TextField(max_length=800,help_text='Enter Experience Description',blank=True)
def __str__(self):
return str(self.user)
class CertificationDetails(models.Model):
TRAINING_CHOICES=(('ONLINE','ONLINE'),('OFFLINE',"OFFLINE"))
user = models.ForeignKey(User, on_delete=models.CASCADE)
Certification_Type=models.CharField(max_length=100,choices=TRAINING_CHOICES,help_text='Enter Training Type')
Certify_Country=models.CharField(max_length=300,help_text='Enter your Training Country',blank=True)
Certify_State=models.CharField(max_length=200,help_text='Enter Training State',blank=True)
Certify_City=models.CharField(max_length=100,help_text='Enter your Training City',blank=True)
Certify_ZipCode=models.CharField(max_length=100,help_text='Enter ZipCode',blank=True)
Certify_Org_Name=models.CharField(max_length=500,help_text='Enter Training organization Name')
Certify_Course_Name=models.CharField(max_length=200,help_text='Enter Training Course')
Certify_startDate=models.DateField(help_text='Training Start Date')
Certify_EndDate=models.DateField(help_text='Training End Date')
Certify_Description=models.TextField(help_text='Enter Training Description')
def __str__(self):
return str(self.user)
class StrengthWeakness(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
Strengths=models.TextField(max_length=600,help_text='Enter your Strengths')
Weakness=models.TextField(max_length=600,help_text='Enter your Weakness')
def __str__(self):
return str(self.user)
class HobbieDetails(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
Hobbie=models.CharField(max_length=120,help_text='Enter your Hobbie')
def __str__(self):
return str(self.user)
class SummaryDetails(models.Model):
aboutme = '''Enthusiastic Creater and Developer with innovative ideas and Well Trained and Certified Student.
Always Interested in Implementing Creative Ideas.'''
user = models.ForeignKey(User, on_delete=models.CASCADE)
Your_Position = models.CharField(max_length=200,help_text='enter your position')
About = models.CharField(max_length=350,help_text='About')
Summary = models.TextField(max_length=800,help_text='Your Information')
def __str__(self):
return str(self.user)
|
[
"66463073+USUDR2001@users.noreply.github.com"
] |
66463073+USUDR2001@users.noreply.github.com
|
77f62af9ab54245e6d7b24fb5c39089a2200afe5
|
07081ad771227a3cd8b8fe61e9578ea48a3137ff
|
/venv/bin/pip3.6
|
edd980f78cd2eb2d22701e461885f210a582b4bb
|
[] |
no_license
|
fernnf/benchtraffic
|
28a4de285d4011af5ef0ba4f014cbaf20ebccb74
|
656365959fdaeed7ad76c0c10347c532f879fbe7
|
refs/heads/master
| 2022-12-09T19:10:40.336333
| 2019-10-07T16:01:57
| 2019-10-07T16:01:57
| 206,405,292
| 0
| 0
| null | 2021-06-02T00:23:40
| 2019-09-04T20:14:08
|
Python
|
UTF-8
|
Python
| false
| false
| 399
|
6
|
#!/root/Workspace/benchtraffic/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3.6'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3.6')()
)
|
[
"fernnf@gmail.com"
] |
fernnf@gmail.com
|
0d2354cd3bc5cfe9b5a9a26ca7aa8f92a9696ef9
|
5ca031047050dce52862fd27042153e4ea538951
|
/fixture/group.py
|
08f49779dc9cb502e8a06c0905412af59dfa0ae1
|
[
"Apache-2.0"
] |
permissive
|
holi87/PythonSzkolenie
|
b7c18ab0acae27faa79357d49711e16fe768abb9
|
edb805b62a98e2be5635d01fa0611062509391b9
|
refs/heads/master
| 2021-01-21T06:30:58.090693
| 2017-04-15T23:39:41
| 2017-04-15T23:39:41
| 83,247,994
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,330
|
py
|
import fixture.basic
from model.group import Group
__author__ = "Grzegorz Holak"
class GroupHelper(fixture.basic.BasicHelper):
def __init__(self, app):
super(GroupHelper, self).__init__(app)
def open_groups_page(self):
wd = self.app.wd
if not (wd.current_url.endswith("/group") and len(wd.find_elements_by_name("new")) > 0):
wd.find_element_by_link_text("grupy").click()
def create(self, group):
wd = self.app.wd
self.open_groups_page()
wd.find_element_by_name("new").click()
self.fill_group_form(group)
wd.find_element_by_name("submit").click()
self.open_groups_page() # as return to groups page
self.group_cache = None
def delete_group_by_index(self, index):
wd = self.app.wd
self.open_groups_page()
self.select_group_by_index(index)
wd.find_element_by_name("delete").click()
self.open_groups_page()
self.group_cache = None
def delete_group_by_id(self, g_id):
wd = self.app.wd
self.open_groups_page()
self.select_group_by_id(g_id)
wd.find_element_by_name("delete").click()
self.open_groups_page()
self.group_cache = None
def select_group_by_index(self, index):
wd = self.app.wd
wd.find_elements_by_name("selected[]")[index].click()
def select_group_by_id(self, g_id):
wd = self.app.wd
wd.find_element_by_css_selector("input[value='%s']" % g_id).click()
def delete_first_group(self):
self.delete_group_by_index(0)
def select_first_group(self):
wd = self.app.wd
wd.find_element_by_name("selected[]").click()
def modify_first_group(self, new_group_data):
self.modify_group_by_index(0, new_group_data)
def modify_group_by_index(self, index, new_group_data):
wd = self.app.wd
# open group page
self.open_groups_page()
# init group modify
self.select_group_by_index(index)
wd.find_element_by_name("edit").click()
self.fill_group_form(new_group_data)
# submit group creation
wd.find_element_by_name("update").click()
self.open_groups_page() # as return to groups page
self.group_cache = None
def modify_group_by_id(self, g_id, new_group_data):
wd = self.app.wd
# open group page
self.open_groups_page()
# init group modify
self.select_group_by_id(g_id)
wd.find_element_by_name("edit").click()
self.fill_group_form(new_group_data)
# submit group creation
wd.find_element_by_name("update").click()
self.open_groups_page() # as return to groups page
self.group_cache = None
def fill_group_form(self, group):
wd = self.app.wd
self.change_field_value("group_name", group.name)
self.change_field_value("group_header", group.header)
self.change_field_value("group_footer", group.footer)
def count(self):
wd = self.app.wd
self.open_groups_page()
return len(wd.find_elements_by_name("selected[]"))
group_cache = None
def get_group_list(self):
if self.group_cache is None:
wd = self.app.wd
self.open_groups_page()
self.group_cache = []
for element in wd.find_elements_by_css_selector("span.group"):
text = element.text
group_id = element.find_element_by_name("selected[]").get_attribute("value")
self.group_cache.append(Group(name=text, group_id=group_id))
return list(self.group_cache)
def clean(self, group):
return Group(group_id=group.group_id, name=group.name.strip())
def add_selected_contact_to_group_by_group_id_(self, g_id):
wd = self.app.wd
wd.find_element_by_xpath('//select[@name="to_group"]/option[@value="%s"]' % g_id).click()
wd.find_element_by_xpath('//input[@name="add"]').click()
def select_group_by_id_to_display_contacts(self, g_id):
wd = self.app.wd
wd.find_element_by_xpath('//select[@name="group"]/option[@value="%s"]' % g_id).click()
def del_selected_contact_from_group(self):
wd = self.app.wd
wd.find_element_by_xpath('//input[@name="remove"]').click()
|
[
"holi87@gmail.com"
] |
holi87@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.