blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cf3aef8d67017da08e3ac0f4469803969c4c23ce | 71c0121fb47df8ce11f33e7617dd262525ffea81 | /commandline/manager.py | 34727999ac0f2d6dedbde1132e0ac273ca1b4085 | [] | no_license | igor35hh/PythonTraining | 33d09b045b0f8676f23a5b43410aaa6a7c6a5631 | 020bc274bba0ffb70f1cdc45e18ea8b6467110fb | refs/heads/master | 2021-05-01T23:46:54.919344 | 2018-03-11T21:30:30 | 2018-03-11T21:30:30 | 77,922,552 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 821 | py |
from Person import Person
class Manager(Person):
def __init__(self, name, age, pay):
Person.__init__(self, name, age, pay, 'mahager')
def giveRaise(self, percent, bonus=0.1):
Person.giveRaise(self, percent + bonus)
if __name__ == '__main__':
tom = Manager(name='Tom Doe', age=50, pay=5000)
print(tom.lastName())
tom.giveRaise(.20)
print(tom.pay)
bob = Person(name='Bob Smith', age=42, pay=3000)
sue = Person(name='Sue Jones', age=45, pay=4000)
db = {bob, sue, tom}
for obj in db:
obj.giveRaise(.30)
for obj in db:
print(obj.lastName(), '=>', obj.pay)
print(obj)
for d in obj.__dict__:
print(obj.__dict__[d]);
for v in obj.__dict__.values():
print(v); | [
"igor35hh@gmail.com"
] | igor35hh@gmail.com |
37415d93e5786fda13e764919a9b90057f5fdbdc | db1dcf7cf7218e0de8eb7fa0da709effa507c3bf | /Mastering matplotlib/SF_MM/02-architecture/lib/modfind.py | dfc21e7e0c2b219c2e391cba3fd0d5ae28ad14d4 | [] | no_license | AaronCHH/B_PYTHON_matplotlib | a65da48771ce8248d51ee054eab91eeb3ce50e74 | 84c809800b3b797a09a5abfc76860ca7f7df8b80 | refs/heads/master | 2021-01-19T10:17:05.132372 | 2017-04-10T19:29:54 | 2017-04-10T19:29:54 | 87,849,667 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,416 | py | from collections import Counter, OrderedDict
from modulefinder import Module, ModuleFinder
from typecheck import typecheck
import typecheck as tc
class CustomFinder(ModuleFinder):
def __init__(self, include: list=None, exclude: list=None,
*args, **kwargs):
super().__init__(*args, **kwargs)
self.debug = False
self.cf_include = include or ["matpl", "mpl"]
self.cf_exclude = exclude or ["matplotlib._", "ft2font", "ttconv"]
self.cf_imports = OrderedDict()
self.cf_weights = Counter()
@typecheck
def matches(self, name: str) -> bool:
include = True in [name.startswith(x) for x in self.cf_include]
exclude = True in [name.startswith(x) for x in self.cf_exclude]
if include and not exclude:
return True
return False
@typecheck
def import_hook(self, name: str, caller: tc.optional(Module)=None,
fromlist: tc.optional(list)=None,
level: int=-1) -> tc.optional(Module):
if self.matches(name):
if caller:
if self.debug:
print(caller.__name__, " -> ", name)
self.cf_weights[name] += 1
self.cf_imports[(caller.__name__, name)] = 1
super().import_hook(name, caller, fromlist, level)
__all__ = ["CustomFinder"]
del Module, ModuleFinder, typecheck, tc
| [
"aaronhsu219@gmail.com"
] | aaronhsu219@gmail.com |
41f0e0207d7368fe936b6b7dd33c3d5918255fc7 | 744594f30c5e283f6252909fc68102dd7bc61091 | /2017/13/13b_solution.py | de65bf6b0591cf4c57cbfdbf892acb60bc33e3ad | [
"MIT"
] | permissive | vScourge/Advent_of_Code | 84f40c76e5dc13977876eea6dbea7d05637de686 | 36e4f428129502ddc93c3f8ba7950aed0a7314bb | refs/heads/master | 2022-12-20T22:12:28.646102 | 2022-12-15T22:16:28 | 2022-12-15T22:16:28 | 160,765,438 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,671 | py | """
Advent of Code 2017
input is: input.txt
"""
DIR_DOWN = 0
DIR_UP = 1
class Layer( ):
def __init__( self, depth, range ):
self.depth = depth
self.range = range
self.scan_pos = 0
self.scan_dir = DIR_DOWN
def move( self ):
if self.scan_dir == DIR_DOWN:
self.scan_pos += 1
if self.scan_pos == self.range - 1:
self.scan_dir = DIR_UP
else:
self.scan_pos -= 1
if self.scan_pos == 0:
self.scan_dir = DIR_DOWN
def __repr__( self ):
return '<Layer {0}>'.format( self.depth )
def delay_then_run( layers, delay ):
pos = -1
count = 0
# Reset all scanners
for layer_id in layers:
layer = layers[ layer_id ]
layer.scan_pos = 0
layer.scan_dir = DIR_DOWN
while pos <= max_depth:
if count >= delay:
pos += 1
if pos in layers:
layer = layers[ pos ]
if layers[ pos ].scan_pos == 0:
print( 'delay {0}, caught on layer {1}'.format( delay, pos ) )
return False
# move all scanners
for layer in layers.values( ):
layer.move( )
count += 1
return True
if __name__ == '__main__':
layers = { }
max_depth = 0
# Build dict of layer objects
for line in open( 'input.txt', 'r' ):
split = line.split( ':' )
layer_depth = int( split[ 0 ] )
layer_range = int( split[ 1 ].strip( ) )
layers[ layer_depth ] = Layer( layer_depth, layer_range )
max_depth = max( max_depth, layer_depth )
# Delay N picoseconds before run, and keep repeating with longer
# delays until we make it across
delay = 0
while True:
success = delay_then_run( layers, delay )
if success:
break
delay += 1
print( 'delay =', delay )
print( 'done' ) | [
"adam.pletcher@gmail.com"
] | adam.pletcher@gmail.com |
e9cab789778a120d62afba5b2378b2ea14e862b2 | 0af18096e2d024cc1e069b16e7c17d944c6f96e8 | /backend/naiosh_4776/wsgi.py | 70f7738558b87649acf9f02b276b15f0b7d28243 | [] | no_license | crowdbotics-apps/naiosh-4776 | 7ed8ee5c42efa74da842a36d55fb5acb68e68291 | 27386cc9d311d965e87b086ff10b44f469dd2a96 | refs/heads/master | 2022-12-12T03:43:11.995902 | 2019-06-17T10:09:09 | 2019-06-17T10:09:09 | 192,322,848 | 0 | 0 | null | 2022-12-03T14:01:00 | 2019-06-17T10:09:03 | JavaScript | UTF-8 | Python | false | false | 400 | py | """
WSGI config for naiosh_4776 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "naiosh_4776.settings")
application = get_wsgi_application()
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
7b0cb2d2f3dee1ab0cc39a99d8091729a0a93698 | 98c6ea9c884152e8340605a706efefbea6170be5 | /examples/data/Assignment_3/ndxkee009/question4.py | 5473e314c72caf0b2087d06f5bbfde1e8a2e5a5d | [] | no_license | MrHamdulay/csc3-capstone | 479d659e1dcd28040e83ebd9e3374d0ccc0c6817 | 6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2 | refs/heads/master | 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 605 | py | #Keegan Naidoo
#NDXKEE009
import math
s=eval(input("Enter the starting point N: \n"))
e=eval(input("Enter the ending point M: \n"))
s1=str(s)
e1=str(e)
c=1
print("The palindromic primes are:")
for i in range(s+1,e):
a=s+c
#print(a)
a1=str(a)
#print(a1)
#print(a1[::-1])
if all(i%x!=0 for x in range(2,int(math.sqrt(i))+1)):
if(a1==a1[::-1]):
print(a1)
c=c+1
| [
"jarr2000@gmail.com"
] | jarr2000@gmail.com |
8a3ae0b145000c2ff07323b20a3e821a08a00604 | b500996a0b29829fde6afe8b23178ca9df4a239d | /rydinfap/src/procdata/procxml.py | 85d8990856b92ff7752484b6827873e33fa16a25 | [] | no_license | eocampo2000/test-code | 48c4d444e323eef5e6fe7e61b018952ef3cd4134 | 49328664243e1a9daf9c567d1aaaa19fd4654c02 | refs/heads/master | 2016-08-11T07:35:31.346464 | 2016-02-13T12:33:55 | 2016-02-13T12:33:55 | 51,642,188 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,011 | py | '''
Created on May 15, 2012
@author: eocampo
XML files parser. Please specify the main LAYOUT.
TODO encapsulate it into a class
EO 20150313 : Added parseCanCPI, _parseCanCPITable methods.
'''
__version__ = '20150313'
import utils.fileutils as fu
import xml.etree.ElementTree as ET
import sys
import string as ST
from bs4 import BeautifulSoup
# Use for informatica schedules
#<IS_SCHED>
# <FOLDER>
# <WF sched="1">wkf_name</WF>
# <WF sched="1">wkf_name</WF>
# </FOLDER>
#</IS_SCHED>
sched = []
unsched = []
# Method parses <WF status="1">wkf_name</WF>
def _parseFolder(fld,node):
for elem in node:
if elem.tag == 'wkf':
sc = elem.get('sched')
wf = ST.strip(elem.text)
if sc == '1' : sched.append('%s.%s' % (fld,wf))
else : unsched.append('%s.%s' % (fld,wf))
# Method returns rc 0 if no error and the sched and unsched lists.
def parseSched(fn):
f = fu.openFile(fn,'r')
if f is None : return (1, [], [])
try:
tree = ET.parse(f)
# Empty tree
if tree == '' : return (2, [], [])
rt = tree.getroot()
# 1- Check TREE root
if ST.upper(rt.tag) != 'IS_SCHED' : return (3, [],[])
for elem in tree.getiterator():
for e in elem:
if ST.lower(e.tag) == 'folder' :
_parseFolder(e.get('name'), e.getchildren())
except ET.ParseError:
print ("Error %s \t %s " % (sys.exc_type, sys.exc_value))
return (4, [],[])
finally : return 0, sched, unsched
# Method to parse CPI Table
def _parseCanCPITable(rows,sep='\t'):
d =[]
for row in rows:
cells = row.findChildren('td')
if len(cells) < 3 : continue
i = 0
wline = "CAN"
for cell in cells:
i = i + 1
value = cell.string
value = value.replace("-", sep)
if i < 3:
wline = wline + sep + value
else:
d.append(wline + '\n')
break
return d
def parseCanCPI(fn):
data = fu.readFile(fn)
if data is None or data is '' : return []
soup = BeautifulSoup(data)
table = soup.find("table", { "class" : "table table-bordered table-striped table-hover cpi" })
rows = table.findChildren(['tr'])
return _parseCanCPITable(rows)
def test_schd():
fn = r'C:\infa_support\schedules\sched.xml'
rc,s,u = parseSched(fn)
#parseSched(fn)
print "rc = ", rc, "\tsched = ", s , "\tunsched ", u
def test_cpi():
fn = 'C:\\apps\\cpi_data_us.html'
d = parseCanCPI(fn)
#parseSched(fn)
print "len(d ) = ", len(d), "data= ", d
if __name__ == "__main__":
#test_schd()
test_cpi() | [
"eocampo1000@hotmail.com"
] | eocampo1000@hotmail.com |
bb2a5c272cb0c24dd423ee814a9a892c61bfe4e2 | 1c40857067b4d92e4efe161d154927c928548a7d | /workery/tenant_api/views/staff_comment.py | 168d92c62878ef8fe2ef89a2cc6ead80bac6836b | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | rahulyhg/workery-django | 9331eb1b87b02407a721e9d512e3d60cb22af314 | 73fd500fefcf4212cf07071e16cf676e173ddea6 | refs/heads/master | 2020-06-29T08:34:03.529503 | 2019-08-03T21:18:09 | 2019-08-03T21:18:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,849 | py | # -*- coding: utf-8 -*-
from ipware import get_client_ip
from django_filters.rest_framework import DjangoFilterBackend
from django.conf.urls import url, include
from django.shortcuts import get_list_or_404, get_object_or_404
from rest_framework import filters
from rest_framework import generics
from rest_framework import authentication, viewsets, permissions, status
from rest_framework.response import Response
from shared_foundation.custom.drf.permissions import IsAuthenticatedAndIsActivePermission
from tenant_api.pagination import TinyResultsSetPagination
from tenant_api.permissions.staff import (
CanListCreateStaffPermission,
CanRetrieveUpdateDestroyStaffPermission
)
from tenant_api.serializers.staff_comment import (
StaffCommentListCreateSerializer,
)
from tenant_foundation.models import Staff
class StaffCommentListCreateAPIView(generics.ListCreateAPIView):
serializer_class = StaffCommentListCreateSerializer
pagination_class = TinyResultsSetPagination
permission_classes = (
permissions.IsAuthenticated,
IsAuthenticatedAndIsActivePermission,
CanListCreateStaffPermission
)
def get_queryset(self):
"""
List
"""
queryset = Staff.objects.all().order_by('-created')
return queryset
def post(self, request, format=None):
"""
Create
"""
client_ip, is_routable = get_client_ip(self.request)
serializer = StaffCommentListCreateSerializer(data=request.data, context={
'created_by': request.user,
'created_from': client_ip,
'created_from_is_public': is_routable,
'franchise': request.tenant
})
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
| [
"bart@mikasoftware.com"
] | bart@mikasoftware.com |
144328a1ef2d6268d21d76ba311a8ab52315f9e7 | f07a42f652f46106dee4749277d41c302e2b7406 | /Data Set/bug-fixing-5/fa45c44026ed471714d0383fd2731911d16a1271-<main>-fix.py | c47982491f786ea4edc3a9e39524947a6f75ebe6 | [] | no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,054 | py | def main():
args = parse_args()
try:
sys.stdout = StringIO()
config_files = (cloud_config.CONFIG_FILES + CONFIG_FILES)
sdk.enable_logging(debug=args.debug)
inventory_args = dict(refresh=args.refresh, config_files=config_files, private=args.private, cloud=args.cloud)
if hasattr(sdk_inventory.OpenStackInventory, 'extra_config'):
inventory_args.update(dict(config_key='ansible', config_defaults={
'use_hostnames': False,
'expand_hostvars': True,
'fail_on_errors': True,
}))
inventory = sdk_inventory.OpenStackInventory(**inventory_args)
sys.stdout = sys.__stdout__
if args.list:
output = get_host_groups(inventory, refresh=args.refresh, cloud=args.cloud)
elif args.host:
output = to_json(inventory.get_host(args.host))
print(output)
except sdk.exceptions.OpenStackCloudException as e:
sys.stderr.write(('%s\n' % e.message))
sys.exit(1)
sys.exit(0) | [
"dg1732004@smail.nju.edu.cn"
] | dg1732004@smail.nju.edu.cn |
20e2fa8b1ace352e52ea0894f8e32dc5c436ddab | 9cf6b31876b6fe3652e9d2613afff41793cc7d49 | /apps/cpa/forms.py | aab6be6e553718cf8eca57f5b85acd26ba63747a | [] | no_license | amyard/findinshopGit | 2a3b56c845691573b33ccc66b036cab0effa0e8e | b642bc81cf633c95ccd978d5e9fb4177eee38be4 | refs/heads/master | 2022-12-13T19:17:45.031095 | 2019-10-31T13:08:51 | 2019-10-31T13:08:51 | 213,323,148 | 0 | 0 | null | 2022-12-10T04:48:01 | 2019-10-07T07:44:13 | Roff | UTF-8 | Python | false | false | 3,269 | py | # -*- coding: utf-8 -*-
#Python imports
from datetime import timedelta
#Django imports
from django import forms
from django.shortcuts import get_object_or_404
from django.utils import timezone
from django.core.cache import cache
#Findinshop
from apps.cpa.models import CostSetting, OwnAndUserCategory
from apps.cpa.validators import MIN_COST_RATE
from apps.cpa.utils import float_to_python
from apps.section.models import Section
from apps.catalog.models import Category
from apps.website.models import Website
class CategoryCostForm(forms.ModelForm):
class Meta:
model = CostSetting
fields = ('section', 'current_rate')
def __init__(self, *args, **kwargs):
super(CategoryCostForm, self).__init__(*args, **kwargs)
self.fields['section'].queryset = Section.parents.all()
self.fields['current_rate'].help_text = 'Минимальная стоимость %s грн.' % MIN_COST_RATE
self.fields['current_rate'].to_python = float_to_python
def clean(self):
cleaned_data = super(CategoryCostForm, self).clean()
section = cleaned_data.get('section')
if section and self.user:
#setting = get_object_or_404(CostSetting, user=self.user, section=section)
setting, created = CostSetting.objects.get_or_create(user=self.user, section=section)
if setting.current_rate == cleaned_data.get('current_rate'):
raise forms.ValidationError(u'Такая ставка уже установлена.')
if setting.changed is True:
time_tree_hour_ago = timezone.now() - timedelta(hours=3)
if time_tree_hour_ago < setting.date_change:
raise forms.ValidationError(u'Повторное изменение ставки на эту категорию возможно через 3 часа')
return cleaned_data
class OwnAndUserCategoryForm(forms.ModelForm):
class Meta:
model = OwnAndUserCategory
fields = '__all__'
def __init__(self, *args, **kwargs):
super(OwnAndUserCategoryForm, self).__init__(*args, **kwargs)
#if not cache.get('key_queryset_section_children_admin', False):
# queryset_section = Section.children.all().order_by('parent__name')
# cache.set('key_queryset_section_children_admin', queryset_section, 2*3600)#2 hours
#else:
# queryset_section = cache.get('key_queryset_section_children_admin')
#self.fields['our_section'].queryset = queryset_section
self.fields['our_section'].queryset = Section.parents.all()
if self.instance.pk:
self.fields['categories'].queryset = Category.objects.filter(catalog=self.instance.site.catalog)
else:
self.fields['categories'].choices = Category.objects.none()
self.fields['site'].queryset = Website.objects.order_by('subdomain')
class ReportClickForm(forms.Form):
date_from = forms.DateField(
label=u'Начиная с даты',
#input_formats='%d.%m.%Y'
)
date_to = forms.DateField(
label=u'Заканчивая датой',
#input_formats='%d.%m.%Y'
)
| [
"maksymturenko@gmail.com"
] | maksymturenko@gmail.com |
ae516920a922dc54aabf018db891dd22f8e371f0 | 34cb685d3340cb59c2f3639b3b5ca42ff3812338 | /pptx/shapes/__init__.py | 99f249ecc8e51d468e8c571d0f1aa09118457441 | [
"MIT"
] | permissive | handwriter/python-pptx | 6b435b6c9c95fcc00cd2aa0923ca15e211228a8b | 22351c6f9fe637cadddca3461c4899af7d439711 | refs/heads/master | 2021-04-05T00:27:20.870352 | 2020-03-19T13:20:28 | 2020-03-19T13:20:28 | 248,506,405 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 614 | py | # encoding: utf-8
"""
Objects used across sub-package
"""
class Subshape(object):
"""
Provides common services for drawing elements that occur below a shape
but may occasionally require an ancestor object to provide a service,
such as add or drop a relationship. Provides ``self._parent`` attribute
to subclasses.
"""
def __init__(self, parent):
super(Subshape, self).__init__()
self._parent = parent
@property
def part(self):
"""
The package part containing this object
"""
return self._parent.part
| [
"62296664+handwriter@users.noreply.github.com"
] | 62296664+handwriter@users.noreply.github.com |
ceaa8c09f078a8c7e8371072a72da1225db7cfa5 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_gigolos.py | ef7c72c0482e98e0450b0486f04f9aea27e380da | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 222 | py |
#calss header
class _GIGOLOS():
def __init__(self,):
self.name = "GIGOLOS"
self.definitions = gigolo
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['gigolo']
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
5748810f922be1440a1298e49a103ac06d55a06e | a5747577f1f4b38823f138ec0fbb34a0380cd673 | /17/mc/ExoDiBosonResonances/EDBRTreeMaker/test/crab3_analysisM4000_R_0-6.py | 910fd4a43e6bc2485a23bcac0657f0fb8a30f1a0 | [] | no_license | xdlyu/fullRunII_ntuple | 346fc1da4cec9da4c404aa1ec0bfdaece6df1526 | aa00ca4ce15ae050c3096d7af779de44fc59141e | refs/heads/master | 2020-08-03T07:52:29.544528 | 2020-01-22T14:18:12 | 2020-01-22T14:18:12 | 211,673,739 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 2,282 | py | from WMCore.Configuration import Configuration
name = 'WWW'
steam_dir = 'xulyu'
config = Configuration()
config.section_("General")
config.General.requestName = 'M4000_R0-6_off'
config.General.transferLogs = True
config.section_("JobType")
config.JobType.pluginName = 'Analysis'
config.JobType.inputFiles = ['Fall17_17Nov2017_V8_MC_L1FastJet_AK4PFchs.txt','Fall17_17Nov2017_V8_MC_L2Relative_AK4PFchs.txt','Fall17_17Nov2017_V8_MC_L3Absolute_AK4PFchs.txt','Fall17_17Nov2017_V8_MC_L1FastJet_AK8PFchs.txt','Fall17_17Nov2017_V8_MC_L2Relative_AK8PFchs.txt','Fall17_17Nov2017_V8_MC_L3Absolute_AK8PFchs.txt','Fall17_17Nov2017_V8_MC_L1FastJet_AK8PFPuppi.txt','Fall17_17Nov2017_V8_MC_L2Relative_AK8PFPuppi.txt','Fall17_17Nov2017_V8_MC_L3Absolute_AK8PFPuppi.txt','Fall17_17Nov2017_V8_MC_L1FastJet_AK4PFPuppi.txt','Fall17_17Nov2017_V8_MC_L2Relative_AK4PFPuppi.txt','Fall17_17Nov2017_V8_MC_L3Absolute_AK4PFPuppi.txt','L1PrefiringMaps_new.root']
#config.JobType.inputFiles = ['PHYS14_25_V2_All_L1FastJet_AK4PFchs.txt','PHYS14_25_V2_All_L2Relative_AK4PFchs.txt','PHYS14_25_V2_All_L3Absolute_AK4PFchs.txt','PHYS14_25_V2_All_L1FastJet_AK8PFchs.txt','PHYS14_25_V2_All_L2Relative_AK8PFchs.txt','PHYS14_25_V2_All_L3Absolute_AK8PFchs.txt']
# Name of the CMSSW configuration file
#config.JobType.psetName = 'bkg_ana.py'
config.JobType.psetName = 'analysis_sig.py'
#config.JobType.allowUndistributedCMSSW = True
config.JobType.allowUndistributedCMSSW = True
config.section_("Data")
#config.Data.inputDataset = '/WJetsToLNu_13TeV-madgraph-pythia8-tauola/Phys14DR-PU20bx25_PHYS14_25_V1-v1/MINIAODSIM'
config.Data.inputDataset = '/WkkToWRadionToWWW_M4000-R0-6_TuneCP5_13TeV-madgraph/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v4/MINIAODSIM'
#config.Data.inputDBS = 'global'
config.Data.inputDBS = 'global'
config.Data.splitting = 'FileBased'
config.Data.unitsPerJob =5
config.Data.totalUnits = -1
config.Data.publication = False
#config.Data.outLFNDirBase = '/store/group/dpg_trigger/comm_trigger/TriggerStudiesGroup/STEAM/' + steam_dir + '/' + name + '/'
# This string is used to construct the output dataset name
config.Data.outputDatasetTag = 'M4000_R0-6_off'
config.section_("Site")
# Where the output files will be transmitted to
config.Site.storageSite = 'T2_CH_CERN'
| [
"XXX@cern.ch"
] | XXX@cern.ch |
08c5e949442ecbfec52341e878dee28f49e30e0f | 6fb8892e0455043b4776e331f8176ab1139f1fd9 | /backend/home/migrations/0001_load_initial_data.py | c1927313a664726ec8d191be309891da27b452b7 | [] | no_license | crowdbotics-apps/gozle-browser-28537 | c5bc1dc800c0b9836f3cd067d1d421fdb447def2 | fd0ee4d32ca575df2422506894f9380d11b76533 | refs/heads/master | 2023-06-07T04:50:52.183276 | 2021-07-07T15:26:51 | 2021-07-07T15:26:51 | 383,841,811 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 544 | py | from django.db import migrations
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "gozle-browser-28537.botics.co"
site_params = {
"name": "Gozle browser",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_site),
]
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
60e0a38ac1246955d613e358f11c89163d652506 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_54/686.py | 4511b8d1827a6633638de3b13f1f24900abf7196 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 559 | py | #!/usr/bin/env python3
#-*- coding: utf-8 -*-
def gcd(a,b):
if b == 0:
return a
return gcd(b,a%b)
f = open("input.txt","r")
T = int(f.readline())
for i in range(T):
nums = f.readline().split()
nums = [int(x) for x in nums]
diff = [abs(nums[k+1] - nums[k]) for k in range(1,len(nums)-1)]
diff.sort()
#print(diff)
res = 0
for j in range(0,len(diff)):
res = gcd(res,diff[j])
if res == 1:
print("Case #",i+1,": ",0, sep="")
else:
print("Case #",i+1,": ",(res-(nums[1]%res))%res, sep="")
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
bc9c3ebea8a6a5642bd542c11bcc86389e7463ea | f3d38d0e1d50234ce5f17948361a50090ea8cddf | /백준/Bronze/Bronze 4/2530번 ; 인공지능 시계 아직 안품.py | 01ea7bdd693da5cd63b5c982f15fcf886091b6d1 | [] | no_license | bright-night-sky/algorithm_study | 967c512040c183d56c5cd923912a5e8f1c584546 | 8fd46644129e92137a62db657187b9b707d06985 | refs/heads/main | 2023-08-01T10:27:33.857897 | 2021-10-04T14:36:21 | 2021-10-04T14:36:21 | 323,322,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,383 | py | # https://www.acmicpc.net/problem/2530
# 현재 시각 시 A, 분 B, 초 C 입력
# 0 <= A <= 23
# 0 <= B <= 59
# 0 <= C <= 59
A, B, C = map(int, input().split(' '))
# 요리하는데 필요한 시간 D : 초 단위 입력
# 0 <= D <= 500,000
D = int(input())
# 요리하는 데 필요한 시간은 초 단위로 입력 받으므로 이것을 시, 분, 초 단위로 쪼개준다.
plus_hour = D // 3600
D = D - plus_hour * 3600
plus_minute = D // 60
plus_second = D % 60
# 결과를 표시할 시, 분, 초
result_hour = A + plus_hour
result_minute = B + plus_minute
result_second = C + plus_second
# 요리하는 데 필요한 시간을 더해주고 난 뒤 초가 60초를 넘은 경우
if result_second >= 60:
# 결과의 분에 1분을 더해주고
result_minute += 1
# 결과의 초에는 60초를 빼준다.
result_second -= 60
# 요리하는 데 필요한 시간을 더해주고 난 뒤 분이 60분을 넘은 경우
if result_minute >= 60:
# 결과의 시에 1시간을 더해주고
result_hour += 1
# 결과의 분에는 60분을 빼준다.
result_minute -= 60
# 요리하는 데 필요한 시간을 더해주고 난 뒤 시가 24시를 넘은 경우
if result_hour >= 24:
# 23시 뒤에는 0시가 되므로 결과의 시에 24를 빼준다.
result_hour -= 24
# 결과 출력
print(result_hour, result_minute, result_second)
| [
"bright_night_sky@naver.com"
] | bright_night_sky@naver.com |
4f507e84cd85006b08c0181440aecf1ec59707f3 | a8de9d989519cca6a94cc9c77b4de0efd6f2e8e1 | /ui/gtk/bases/widgets.py | 641cb1c6fce269d4dd6a55f34539b7380ab49948 | [] | no_license | nocarryr/nomadic_recording_lib | bde34a30674474473e8402f8309e59fb47b6fabd | 9070c75d8a18327565f206d3e5d6b054a207a259 | refs/heads/master | 2021-01-01T20:10:14.451132 | 2015-08-10T21:16:42 | 2015-08-10T21:16:42 | 23,086,499 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 44,251 | py | import threading
from ui_modules import gtk, gdk, pango
from nomadic_recording_lib.Bases import BaseObject
from nomadic_recording_lib.Bases.Properties import PropertyConnector
from nomadic_recording_lib.ui.bases import widgets as basewidgets
import gtksimple
get_gtk2_enum = gtksimple.get_gtk2_enum
get_gtk3_enum = gtksimple.get_gtk3_enum
get_gui_thread = gtksimple.get_gui_thread
import tree
import listmodel
#class Box(gtk.Box):
# pass
GTK_VERSION = BaseObject().GLOBAL_CONFIG['gtk_version']
class BoxMixin(object):
def pack_start(self, widget, **kwargs):
new_kwargs = kwargs.copy()
new_kwargs.setdefault('expand', False)
new_kwargs.setdefault('fill', True)
new_kwargs.setdefault('padding', 1)
for key in ['xoptions', 'yoptions']:
if key in new_kwargs:
del new_kwargs[key]
args = [new_kwargs.get(key) for key in ['expand', 'fill', 'padding']]
super(BoxMixin, self).pack_start(widget, *args)
def pack_end(self, widget, **kwargs):
new_kwargs = kwargs.copy()
new_kwargs.setdefault('expand', False)
new_kwargs.setdefault('fill', True)
new_kwargs.setdefault('padding', 1)
for key in ['xoptions', 'yoptions']:
if key in new_kwargs:
del new_kwargs[key]
args = [new_kwargs.get(key) for key in ['expand', 'fill', 'padding']]
super(BoxMixin, self).pack_end(widget, *args)
if GTK_VERSION < 3:
class VBox(BoxMixin, gtk.VBox):
pass
class HBox(BoxMixin, gtk.HBox):
pass
else:
class Box(BoxMixin, gtk.Box):
def __init__(self, *args, **kwargs):
orientation = getattr(gtk.Orientation, self._orientation.upper())
#super(Box, self).__init__(orientation)
gtk.Box.__init__(self, orientation=orientation)
class VBox(Box):
_orientation = 'vertical'
class HBox(Box):
_orientation = 'horizontal'
attach_keys = ['EXPAND', 'FILL']
if GTK_VERSION < 3:
attach_vals = [gtk.EXPAND, gtk.FILL]
else:
attach_vals = [gtk.AttachOptions.EXPAND, gtk.AttachOptions.FILL]
AttachOptions = dict(zip(attach_keys, attach_vals))
class Table(gtk.Table, basewidgets.Table):
def __init__(self, **kwargs):
kwargs.setdefault('columns', 2)
basewidgets.Table.__init__(self, **kwargs)
if self.obj_sort_attr:
del kwargs['obj_sort_attr']
gtk.Table.__init__(self, **kwargs)
def attach(self, *args, **kwargs):
expand = kwargs.get('expand')
if expand is not None:
if expand:
kwargs.setdefault('xoptions', AttachOptions['EXPAND'] | AttachOptions['FILL'])
kwargs.setdefault('yoptions', AttachOptions['EXPAND'] | AttachOptions['FILL'])
del kwargs['expand']
else:
kwargs.setdefault('xoptions', AttachOptions['FILL'])
kwargs.setdefault('yoptions', AttachOptions['FILL'])
super(Table, self).attach(*args, **kwargs)
def do_add_widget(self, widget, loc, **kwargs):
for x, prop in enumerate(['n-columns', 'n-rows']):
if loc[x] > self.get_property(prop):
self.set_property(prop, loc[x])
self.attach(widget, loc[1], loc[1]+1, loc[0], loc[0]+1, **kwargs)
widget.show()
def remove(self, widget):
gtk.Table.remove(self, widget)
basewidgets.Table.remove(self, widget)
def do_child_loc_update(self, widget, loc):
args = [loc[1], loc[1]+1, loc[0], loc[0]+1]
for x, prop in enumerate(['-'.join([key, 'attach']) for key in ['left', 'right', 'top', 'bottom']]):
self.child_set_property(widget, prop, args[x])
def _set_font_scale(widget, scale):
attrs = widget.get_attributes()
if attrs is None:
attrs = pango.AttrList()
## TODO: figure out how pango works now
#attrs.change(pango.SCALE(scale, 0, 9999))
#widget.set_attributes(attrs)
if GTK_VERSION < 3:
JustifyOptions = get_gtk2_enum('JUSTIFY')
else:
JustifyOptions = get_gtk3_enum('Justification')
class Label(gtk.Label, gtksimple.LabelMixIn):
def __init__(self, label=None, **kwargs):
gtk.Label.__init__(self, label)
justify = kwargs.get('justification', 'center')
self.set_justify(justify)
fscale = kwargs.get('font_scale')
if fscale:
self.set_font_scale(fscale)
self._use_thread_control = kwargs.get('threaded', True)
self.Property = kwargs.get('Property')
@gtksimple.ThreadToGtk
def update_text_from_Property(self, text):
self._unthreaded_set_text(text)
def set_text(self, text):
if self._use_thread_control:
self._threaded_set_text(text)
else:
self._unthreaded_set_text(text)
@gtksimple.ThreadToGtk
def _threaded_set_text(self, text):
gtk.Label.set_text(self, text)
def _unthreaded_set_text(self, text):
gtk.Label.set_text(self, text)
def set_font_scale(self, scale):
return
_set_font_scale(self, scale)
def unlink(self):
self.Property = None
def set_justify(self, justify):
if type(justify) == str:
justify = JustifyOptions.get(justify.upper())
if justify == JustifyOptions['LEFT']:
self.set_alignment(0., .5)
super(Label, self).set_justify(justify)
class Frame(gtk.Frame, gtksimple.LabelMixIn):
def __init__(self, **kwargs):
wid_kwargs = {'label':kwargs.get('label', '')}
super(Frame, self).__init__(**wid_kwargs)
fscale = kwargs.get('font_scale')
if fscale:
self.set_font_scale(fscale)
if not hasattr(self, 'topwidget'):
self.topwidget = kwargs.get('topwidget', VBox)()
self.add(self.topwidget)
self.Property = kwargs.get('Property')
def unlink(self):
self.Property = None
@gtksimple.ThreadToGtk
def update_text_from_Property(self, text):
self.set_label(text)
def set_font_scale(self, scale):
return
_set_font_scale(self.get_label_widget(), scale)
def add(self, *args, **kwargs):
if args[0] != self.topwidget:
self.topwidget.add(*args, **kwargs)
else:
super(Frame, self).add(*args, **kwargs)
def pack_start(self, *args, **kwargs):
self.topwidget.pack_start(*args, **kwargs)
def pack_end(self, *args, **kwargs):
self.topwidget.pack_end(*args, **kwargs)
def attach(self, *args, **kwargs):
self.topwidget.pack_start(*args, **kwargs)
class Expander(gtk.Expander):
def __init__(self, **kwargs):
expanded = kwargs.get('expanded', True)
wid_kwargs = {'label':kwargs.get('label', '')}
super(Expander, self).__init__(**wid_kwargs)
fscale = kwargs.get('font_scale')
if fscale:
self.set_font_scale(fscale)
self.topwidget = kwargs.get('topwidget', VBox())
self.add(self.topwidget)
self.set_expanded(expanded)
def set_font_scale(self, scale):
_set_font_scale(self.get_label_widget(), scale)
def pack_start(self, *args, **kwargs):
self.topwidget.pack_start(*args, **kwargs)
def attach(self, *args, **kwargs):
self.topwidget.attach(*args, **kwargs)
class ScrolledWindow(gtk.ScrolledWindow):
def __init__(self, *args, **kwargs):
super(ScrolledWindow, self).__init__(*args, **kwargs)
self.add_viewport = kwargs.get('add_viewport', True)
if self.add_viewport:
self.viewport = gtk.Viewport()
self.add(self.viewport)
def add(self, *args, **kwargs):
if self.add_viewport and not isinstance(args[0], gtk.Viewport):
self.viewport.add(*args, **kwargs)
else:
super(ScrolledWindow, self).add(*args, **kwargs)
def pack_start(self, *args, **kwargs):
if not self.add_viewport:
return
self.viewport.add(args[0])
def remove(self, *args, **kwargs):
if self.add_viewport:
self.viewport.remove(args[0])
else:
super(ScrolledWindow, self).remove(*args, **kwargs)
class DrawingArea(gtk.DrawingArea):
pass
class PaneMixin(object):
def pack_start(self, widget, **kwargs):
if self.get_child1() is None:
self.pack1(widget, True, False)
elif self.get_child2() is None:
self.pack2(widget, True, False)
@property
def normalized_pos(self):
size = getattr(self.allocation, self.size_attribute)
if size < 0:
return 0
return self.get_position() / float(size)
@normalized_pos.setter
def normalized_pos(self, value):
if value < 0 or value > 1:
return
size = getattr(self.get_allocation(), self.size_attribute)
if size < 0:
return
self.set_position(int(value * size))
if GTK_VERSION < 3:
class HPane(PaneMixin, gtk.HPaned):
size_attribute = 'width'
class VPane(PaneMixin, gtk.VPaned):
size_attribute = 'height'
else:
class Pane(PaneMixin, gtk.Paned):
def __init__(self, **kwargs):
orientation = getattr(gtk.Orientation, self._orientation.upper())
gtk.Paned.__init__(self, orientation=orientation)
class HPane(Pane):
size_attribute = 'width'
_orientation = 'horizontal'
class VPane(Pane):
size_attribute = 'height'
_orientation = 'vertical'
class Notebook(gtk.Notebook):
def add_page(self, **kwargs):
widget = kwargs.get('widget')
label = gtk.Label(kwargs.get('label', ''))
self.append_page(widget, label)
class MenuBar(gtk.MenuBar):
def __init__(self, **kwargs):
super(MenuBar, self).__init__()
self.menu_order = kwargs.get('menu_order')
self.menu_info = kwargs.get('menu_info')
for key in self.menu_info.iterkeys():
val = self.menu_info[key]
if type(val) == list or type(val) == tuple:
d = {}
for s in val:
d.update({s:{'name':s}})
self.menu_info[key] = d
self.menus = {}
for key in self.menu_order:
val = self.menu_info[key]
self.add_menu(key, **val)
self.show()
def add_menu(self, id, **kwargs):
menu = Menu(menubar=self, name=id, item_info=kwargs)
self.menus.update({id:menu})
self.append(menu.menuitem._item)
class Menu(object):
def __init__(self, **kwargs):
#super(Menu, self).__init__()
self._menu = gtk.Menu()
self.menuitem = MenuItem(name=kwargs.get('name'))
self.menuitem._item.set_submenu(self._menu)
self.menubar = kwargs.get('menubar')
self.item_info = kwargs.get('item_info')
self.items = {}
for key, val in self.item_info.iteritems():
self.add_item(key, **val)
def add_item(self, id, **kwargs):
item = MenuItem(**kwargs)
self.items.update({id:item})
self._menu.append(item._item)
class MenuItem(object):
def __init__(self, **kwargs):
self.item_name = kwargs.get('name')
self._item = gtk.MenuItem(label=self.item_name)
#super(MenuItem, self).__init__(label=self.item_name)
self._item.show()
if GTK_VERSION < 3:
responses = get_gtk2_enum('RESPONSE')
fc_actions = get_gtk2_enum('FILE_CHOOSER_ACTION')
filter_flags = get_gtk2_enum('FILE_FILTER')
else:
responses = get_gtk3_enum('ResponseType')
fc_actions = get_gtk3_enum('FileChooserAction')
filter_flags = get_gtk3_enum('FileFilterFlags')
btn_info = {'cancel':(gtk.STOCK_CANCEL, responses['CANCEL']),
'open':(gtk.STOCK_OPEN, responses['OK']),
'save':(gtk.STOCK_SAVE, responses['OK']),
'ok':(gtk.STOCK_OK, responses['ACCEPT'])}
class FileDialog(BaseObject):
modes = {'open':fc_actions['OPEN'],
'save':fc_actions['SAVE'],
'select_folder':fc_actions['SELECT_FOLDER'],
'create_folder':fc_actions['CREATE_FOLDER']}
default_buttons = {'open':(btn_info['cancel'] + btn_info['open']),
'save':(btn_info['cancel'] + btn_info['save'])}
filter_types = ['pattern', 'mime']
def __init__(self, **kwargs):
kwargs['ParentEmissionThread'] = get_gui_thread()
super(FileDialog, self).__init__(**kwargs)
self.register_signal('response')
self.mode = kwargs.get('mode')
self.overwrite_confirmation = kwargs.get('overwrite_confirmation', True)
self.current_folder = kwargs.get('current_folder')
filters = kwargs.get('filters', {})
##{FilterName:[[filter_type, filter_data],...]}
self.filters = {}
for key, val in filters.iteritems():
self.add_filter(key, val)
dlg_kwargs = {}
if 'label' in kwargs:
dlg_kwargs.update({'title':kwargs.get('label')})
dlg_kwargs.update({'buttons':kwargs.get('buttons', self.default_buttons[self.mode])})
dlg_kwargs.update({'action':self.modes[self.mode]})
self.dialog = gtk.FileChooserDialog(**dlg_kwargs)
for filter in self.filters.itervalues():
self.dialog.add_filter(filter)
default_filter = kwargs.get('default_filter')
if default_filter is not None:
f = self.filters.get(default_filter)
self.dialog.set_filter(f)
if self.current_folder is not None:
self.dialog.set_current_folder_uri(self.current_folder)
if 'filename' in kwargs:
self.dialog.set_filename(kwargs['filename'])
elif 'current_name' in kwargs:
self.dialog.set_current_name(kwargs['current_name'])
self.dialog.set_do_overwrite_confirmation(self.overwrite_confirmation)
#self.dialog.connect('response', self.on_dialog_response)
def show(self):
response = self.dialog.run()
resp_dict = {'dialog':self, 'response':False}
if response == responses['OK']:
resp_dict.update({'response':True, 'filename':self.dialog.get_filename(), 'uri':self.dialog.get_uri()})
#print resp_dict
self.emit('response', **resp_dict)
self.dialog.destroy()
return resp_dict
def add_filter(self, name, filter_data):
f = gtk.FileFilter()
f.set_name(name)
for filter in filter_data:
if filter[0] == 'pattern':
f.add_pattern(filter[1])
elif filter[0] == 'mime':
f.add_mime_type(filter[1])
elif filter[0] == 'custom':
f.add_custom(filter_flags['FILENAME'] | filter_flags['URI'],
*filter[1:])
self.filters.update({name:f})
def on_dialog_response(self, *args):
pass
#print args
if GTK_VERSION < 3:
dlg_flags = get_gtk2_enum('DIALOG')
else:
dlg_flags = get_gtk3_enum('DialogFlags')
class EntryDialog(BaseObject):
def __init__(self, **kwargs):
kwargs['ParentEmissionThread'] = get_gui_thread()
super(EntryDialog, self).__init__(**kwargs)
self.register_signal('response')
self.title = kwargs.get('title')
self.message = kwargs.get('message')
self.entry_text = kwargs.get('entry_text', '')
if GTK_VERSION < 3:
dlg_flags = get_gtk2_enum('DIALOG')
else:
dlg_flags = get_gtk3_enum('DialogFlags')
#self.dialog = gtk.Dialog(self.title, None, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
# (btn_info['ok'] + btn_info['cancel']))
self.dialog = gtk.Dialog(self.title, None, dlg_flags['MODAL'] | dlg_flags['DESTROY_WITH_PARENT'],
(btn_info['ok'] + btn_info['cancel']))
self.dialog.vbox.pack_start(Label(self.message))
self.entry = Entry(no_frame=True)
self.entry.set_widget_text(self.entry_text)
self.dialog.action_area.pack_start(self.entry.topwidget)
self.dialog.show_all()
def run(self):
response = self.dialog.run()
if response == btn_info['ok'][1]:
value = self.entry.get_widget_text()
else:
value = None
self.emit('response', dialog=self, value=value)
self.dialog.destroy()
class ColorSelection(gtksimple.Color):
def __init__(self, **kwargs):
super(ColorSelection, self).__init__(**kwargs)
kwargs.setdefault('label', 'Color')
self.topwidget = Frame(**kwargs)
self.topwidget.pack_start(self.widget)
def unlink(self):
super(ColorSelection, self).unlink()
for w in self.spinbtns.itervalues():
w.unlink()
def hide_extra_widgets(self, widget=None):
if widget is None:
widget = self._widget
flag = False
if widget in self.widgets_to_show.values():
flag = True
#if widget in [w.get_parent() for w in self.widgets_to_show.values()]:
# flag = True
for w in self.widgets_to_show.itervalues():
if widget.is_ancestor(w) or w.is_ancestor(widget):
flag = True
if not flag:
widget.hide()
return
if not hasattr(widget, 'get_children'):
return
children = widget.get_children()
for child in children:
self.hide_extra_widgets(child)
def setup_widgets(self, **kwargs):
widget = gtk.ColorSelection()
d = {}
d['hsv'] = widget.get_children()[0].get_children()[0].get_children()[0]
d['display'] = widget.get_children()[0].get_children()[0].get_children()[1].get_children()[0].get_children()[0]
d['picker'] = widget.get_children()[0].get_children()[0].get_children()[1].get_children()[1]
#for w in d.itervalues():
# w.get_parent().remove(w)
#d['display'].remove(d['display'].get_children()[0])
self.widgets_to_show = d
vbox = VBox()
vbox.pack_start(widget, expand=True)
#vbox.pack_start(d['hsv'], expand=True)
#hbox = HBox()
#for key in ['display', 'picker']:
# hbox.pack_start(d[key], expand=True)
#vbox.pack_start(hbox, expand=True)
self.spinbtns = {}
vbox2 = VBox()
spinhbox = [HBox(), HBox()]
for i, keys in enumerate([['red', 'green', 'blue'], ['hue', 'sat', 'val']]):
for key in keys:
w = SpinBtn(label=key, Property=(self.color, key))
if key == 'hue':
w.widget.set_wrap(True)
spinhbox[i].pack_start(w.topwidget)
self.spinbtns[key] = w
vbox2.pack_start(spinhbox[i])
vbox.pack_start(vbox2, expand=False)
self.widget = vbox
self._widget = widget
self.hsv_widget = d['hsv']
self._widget.connect('color-changed', self.on_widget_update)
self._widget.connect('show', self.on_widget_show)
def get_widget_color(self):
return self.hsv_widget.get_color()
#return self.widget.get_current_color()
@gtksimple.ThreadToGtk
def set_widget_color(self, hsv):
self.hsv_widget.set_color(*hsv)
def on_widget_show(self, *args):
self.hide_extra_widgets()
@property
def is_adjusting(self):
if hasattr(self, '_widget'):
return self._widget.is_adjusting()
return False
class ColorBtn(gtksimple.Color):
def __init__(self, **kwargs):
self.gcolor = gtk.gdk.Color(0, 0, 0)
super(ColorBtn, self).__init__(**kwargs)
self.topwidget = self.widget
def setup_widgets(self, **kwargs):
self.widget = ColorBtnButton()
def get_widget_color(self):
gcolor = self.widget.get_color()
return list((getattr(gcolor, key) for key in ['hue', 'saturation', 'value']))
@gtksimple.ThreadToGtk
def set_widget_color(self, hsv):
rgb = self.color.rgb_seq
gcolor = self.gcolor
for i, key in enumerate(['red', 'green', 'blue']):
setattr(gcolor, key, rgb[i] * 65535)
#gtksimple.thread_to_gtk(self._do_set_color, self.gcolor)
self.widget.set_color(gcolor)
def _do_set_color(self, *args, **kwargs):
self.widget.set_color(*args, **kwargs)
@property
def is_adjusting(self):
# TODO: make it find the colorselection.is_adjusting value
return False
class ColorBtnButton(gtk.ColorButton):
def __init__(self, **kwargs):
super(ColorBtnButton, self).__init__()
class oldColorBtnButton(gtk.Button):
#bg_states = [getattr(gtk.StateType, key) for key in ['NORMAL', 'PRELIGHT']]
#bg_states = gtk.StateFlags.NORMAL | gtk.StateFlags.PRELIGHT
def __init__(self, **kwargs):
super(ColorBtnButton, self).__init__()
state_keys = [key for key in dir(gtk.StateFlags) if key.isupper()]
f = gtk.StateFlags(0)
for key in state_keys:
f |= getattr(gtk.StateFlags, key)
self.bg_states = f
#print self.bg_states
self.set_property('height-request', 24)
self.gc = None
self.align = gtk.Alignment(xscale=1., yscale=1.)
self.align.set_property('height-request', 24)
self.drawing = gtk.DrawingArea()
#self.drawing.connect('expose-event', self._on_expose)
#gcolor = gtk.gdk.Color(0, 0, 0)
self.set_color([0., 0., 0.])
self.align.add(self.drawing)
self.add(self.align)
def _on_expose(self, *args):
# h = self.get_allocation().height
# self.remove(self.get_children()[0])
# self.add(self.align)
# self.align.show_all()
# rect = self.get_allocation()
# rect.height = h
# self.allocation = rect
# self.align.allocation = rect
self.gc = self.drawing.window.new_gc()#function=gtk.gdk.CLEAR)
gcolor = gtk.gdk.color_from_hsv(0., 1., 1.)
self.gc.set_foreground(gcolor)
#self.gc.set_background(gtk.gdk.Color())
self.draw_rect()
def draw_rect(self):
#rect = self.drawing.allocation
rect = self.drawing.window.get_size()
#l = [getattr(rect, key) for key in ['x', 'y', 'width', 'height']]
self.drawing.window.draw_rectangle(self.gc, True, 0, 0, *[int(v / 2.) for v in rect])#0, 0, rect.width, rect.height)
def set_color(self, rgb):
#if self.gc is None:
# return
#gcolor = gtk.gdk.color_from_hsv(*hsv)
#l = [getattr(gcolor, key) / 65536. for key in ['red', 'green', 'blue']]
#l.append(1.)
rgb = rgb[:]
rgb.append(1.)
grgba = gdk.RGBA(*rgb)
#print 'colorbtn set: ', rgb, grgba
self.drawing.override_color(self.bg_states, grgba)
#self.gc.set_background(gcolor)
#self.gc.set_foreground(gcolor)
#self.draw_rect()
#print 'w alloc=%s, align alloc=%s, dwg alloc=%s, dwg size=%s, color=%s' % (self.allocation, self.align.allocation, self.drawing.allocation, self.drawing.window.get_size(), gcolor)
class Entry(gtksimple.EntryBuffer):
def setup_widgets(self, **kwargs):
self.widget = gtk.Entry()
self.widget.connect('activate', self.on_widget_value_changed)
if kwargs.get('no_frame', False):
self.topwidget = self.widget
else:
self.topwidget = Frame(label=self.name)
self.topwidget.pack_start(self.widget)
def get_widget_text(self):
return self.widget.get_text()
def set_widget_text(self, text):
if text is None:
text = ''
self.widget.set_text(text)
class Text(gtksimple.TextBuffer):
def __init__(self, **kwargs):
self.name = kwargs.get('name', '')
self.topwidget = Frame(label=self.name, topwidget=gtk.ScrolledWindow)
kwargs.setdefault('widget', gtk.TextView())
super(Text, self).__init__(**kwargs)
#expand = kwargs.get('expand', True)
self.topwidget.add(self.widget)
class Tree(tree.TreeViewConnector):
def __init__(self, **kwargs):
super(Tree, self).__init__(**kwargs)
self.name = kwargs.get('name', '')
self.topwidget = Frame(label=self.name, topwidget=ScrolledWindow)
#self.scrolled_win = ScrolledWindow()
#self.scrolled_win.add(self.widget)
self.topwidget.topwidget.add(self.widget)
class SpinBtn(gtksimple.Spin):
def __init__(self, **kwargs):
super(SpinBtn, self).__init__(**kwargs)
if kwargs.get('no_frame', False):
self.topwidget = self.widget
else:
self.topwidget = Frame(label=kwargs.get('label', ''))
self.topwidget.pack_start(self.widget)
def setup_widgets(self, **kwargs):
if not hasattr(self, 'widget'):
self.widget = kwargs.get('widget', gtk.SpinButton())
self.adj = gtk.Adjustment()
self.adj.set_property('step-increment', 1)
self.widget.set_adjustment(self.adj)
self.adj.connect('value-changed', self.on_widget_value_changed)
def set_widget_range(self):
if self.value_type == int:
self.widget.set_digits(0)
step = 1
else:
self.widget.set_digits(3)
min, max = self.value_range
step = (max - min) / 100.
if step > 1:
step = 1
#self.adj.lower, self.adj.upper = self.value_range
keys = ['lower', 'upper', 'step-increment']
vals = self.value_range[:]
vals.append(step)
for key, val in zip(keys, vals):
self.adj.set_property(key, val)
@gtksimple.ThreadToGtk
def set_widget_value(self, value):
#gtksimple.thread_to_gtk(self._do_set_widget_value, value)
if value is not None:
self.widget_value_set_by_program = True
self.adj.set_value(value)
self.widget_value_set_by_program = False
def get_widget_value(self):
return self.adj.get_value()
class RadioBtn(gtksimple.Radio):
def __init__(self, **kwargs):
self.name = kwargs.get('name', kwargs.get('label', ''))
self.topwidget = Frame(label=self.name)
super(RadioBtn, self).__init__(**kwargs)
def build_widget(self, key):
w = super(RadioBtn, self).build_widget(key)
self.topwidget.pack_start(w)
return w
def attach_Property(self, prop):
super(RadioBtn, self).attach_Property(prop)
if self.name == '':
self.topwidget.set_label(prop.name)
class TreeList(listmodel.ListModelTree):
def __init__(self, **kwargs):
self.name = kwargs.get('name', '')
self.topwidget = Frame(label=self.name)
super(TreeList, self).__init__(**kwargs)
self.scrolled_win = ScrolledWindow()
self.scrolled_win.add(self.widget)
self.topwidget.pack_start(self.scrolled_win, expand=True)
class TreeView(BaseObject):
_Properties = {'selected':dict(ignore_type=True)}
def __init__(self, **kwargs):
kwargs['ParentEmissionThread'] = get_gui_thread()
super(TreeView, self).__init__(**kwargs)
self._model = None
self.selection_set_by_property = False
self.selection_set_by_widget = False
self.name = kwargs.get('name', '')
self.id = kwargs.get('id', id(self))
self.model = kwargs.get('model')
self.topwidget = Frame(label=self.name)
self.widget = gtk.TreeView()
self.widget.get_selection().connect('changed', self.on_widget_sel_changed)
self.scrolled_win = ScrolledWindow()
self.scrolled_win.add(self.widget)
self.topwidget.pack_start(self.scrolled_win, expand=True)
self.bind(selected=self._on_selected_set)
def unlink(self):
super(TreeView, self).unlink()
if self.model is not None:
self.model.unbind(self)
@property
def model(self):
return self._model
@model.setter
def model(self, value):
self._model = value
if self.model is not None:
self.model.bind(obj_added=self.on_model_obj_added,
obj_removed=self.on_model_obj_removed)
def on_widget_sel_changed(self, treesel):
if not self.model:
return
if self.selection_set_by_property:
return
iter = treesel.get_selected()[1]
if iter is not None:
key = self.model.sorted_store[iter][0]
else:
key = None
self.selection_set_by_widget = True
self.selected = key
self.selection_set_by_widget = False
def _on_selected_set(self, **kwargs):
if not self.model:
return
if self.selection_set_by_widget:
return
key = kwargs.get('value')
treesel = self.widget.get_selection()
self.selection_set_by_property = True
if key is None:
treesel.unselect_all()
else:
modelobj = self.model.child_obj.get(key)
if modelobj is None:
#self.selected = None
return
path = self.model.store.get_path(modelobj.iter)
s_path = self.model.sorted_store.convert_child_path_to_path(path)
s_iter = self.model.sorted_store.get_iter(path)
treesel.select_iter(s_iter)
self.selection_set_by_property = False
def on_model_obj_added(self, **kwargs):
key = kwargs.get('id')
if key == self.selected:
self._on_selected_set(value=self.selected)
def on_model_obj_removed(self, **kwargs):
key = kwargs.get('id')
if key == self.selected:
self.selected = None
class Combo(listmodel.ListModelCombo):
def __init__(self, **kwargs):
self.name = kwargs.get('name', '')
#kwargs.setdefault('list_types', [str])
#kwargs.setdefault('widget', gtk.ComboBox())
super(Combo, self).__init__(**kwargs)
if kwargs.get('no_frame', False):
self.topwidget = self.widget
else:
self.topwidget = Frame(label=self.name)
self.topwidget.pack_start(self.widget)
class Button(gtk.Button):
pass
class ToggleBtn(gtksimple.Toggle):
def __init__(self, **kwargs):
super(ToggleBtn, self).__init__(**kwargs)
#self.topwidget = VBox()
#self.topwidget.pack_start(self.widget)
def setup_widgets(self, **kwargs):
self.widget = gtk.ToggleButton(label=kwargs.get('label', ''))
self.widget_packing = {'expand':False}
id = self.widget.connect('toggled', self.on_widget_toggled)
self.widget_signals.append(id)
def get_widget_state(self):
return self.widget.get_active()
def set_widget_state(self, state):
self.widget.set_active(state)
class CheckBox(gtk.CheckButton):
@property
def state(self):
return self.get_active()
@state.setter
def state(self, value):
self.set_active(value)
class Slider(gtksimple.Fader):
def __init__(self, **kwargs):
#self.name = kwargs.get('name', kwargs.get('label', ''))
label = kwargs.get('label')
if label is not None:
kwargs.setdefault('name', label)
self.topwidget = Frame(label='')
super(Slider, self).__init__(**kwargs)
self.topwidget.pack_start(self.widget, expand=True)
self.widget.set_digits(2)
def _on_name_set(self, **kwargs):
self.topwidget.set_label(kwargs.get('value'))
super(Slider, self)._on_name_set(**kwargs)
class VSlider(Slider):
def __init__(self, **kwargs):
kwargs.setdefault('fader_type', 'VSlider')
super(VSlider, self).__init__(**kwargs)
self.widget_packing.update({'xoptions':AttachOptions['FILL'],
'yoptions':AttachOptions['EXPAND'] | AttachOptions['FILL']})
self.widget.set_property('inverted', True)
#self.widget.set_property('width-request', 40)
#self.widget.set_property('height-request', 128)
class HSlider(Slider):
def __init__(self, **kwargs):
kwargs.setdefault('fader_type', 'HSlider')
super(HSlider, self).__init__(**kwargs)
self.widget_packing.update({'xoptions':AttachOptions['EXPAND'] | AttachOptions['FILL'],
'yoptions':AttachOptions['FILL']})
#self.widget.set_property('width-request', 128)
#self.widget.set_property('height-request', 40)
class ProgressBar(BaseObject, PropertyConnector):
_Properties = {'value':dict(default=0., min=0., max=1., quiet=True)}
def __init__(self, **kwargs):
kwargs['ParentEmissionThread'] = get_gui_thread()
super(ProgressBar, self).__init__(**kwargs)
self.widget = gtk.ProgressBar()
self.topwidget = self.widget
self.bind(value=self._on_value_set)
def attach_Property(self, prop):
super(ProgressBar, self).attach_Property(prop)
self.update_Property_value()
def unlink_Property(self, prop):
super(ProgressBar, self).unlink_Property(prop)
self.value = 0.
def on_Property_value_changed(self, **kwargs):
self.update_Property_value()
def update_Property_value(self):
prop = self.Property
if prop is None or prop.value is None:
value = 0.
else:
value = prop.normalized_and_offset
self.Properties['value'].normalized_and_offset = value
def _on_value_set(self, **kwargs):
value = kwargs.get('value')
self.widget.set_fraction(value)
if self.Property is not None:
self.widget.set_text('%s' % (self.Property.value))
else:
self.widget.set_text('')
class HProgressBar(ProgressBar):
def __init__(self, **kwargs):
super(HProgressBar, self).__init__(**kwargs)
self.widget.set_orientation(gtk.Orientation.HORIZONTAL)
class VProgressBar(ProgressBar):
def __init__(self, **kwargs):
super(VProgressBar, self).__init__(**kwargs)
self.widget.set_orientation(gtk.Orientation.VERTICAL)
class XYSlider(BaseObject, PropertyConnector):
def __init__(self, **kwargs):
kwargs['ParentEmissionThread'] = get_gui_thread()
self._attribute = None
super(XYSlider, self).__init__(**kwargs)
self.value_obj = {}
self.sliders = {}
self.spins = {}
for key, cls in zip(['pan', 'tilt'], [HSlider, VSlider]):
obj = ValueObject(Property=self.Property, prop_key=key)
w = cls()#Property=(obj, 'value'))
self.sliders[key] = w
self.value_obj[key] = obj
w.widget.get_parent().remove(w.widget)
spin = SpinBtn(label=key)
self.spins[key] = spin
self.Property = kwargs.get('Property')
self.topwidget = Frame(label=kwargs.get('label', 'XY'))
self.table = Table(rows=3, columns=3, homogeneous=True)
self.table.attach(self.spins['pan'].topwidget, 0, 1, 0, 1, expand=True)
self.table.attach(self.sliders['pan'].widget, 0, 2, 2, 3, expand=True)
self.table.attach(self.spins['tilt'].topwidget, 1, 2, 0, 1, expand=True)
self.table.attach(self.sliders['tilt'].widget, 2, 3, 0, 2, expand=True)
self.topwidget.pack_start(self.table, expand=True)
self.topwidget.show_all()
def unlink(self):
super(XYSlider, self).unlink()
self.Property = None
def attach_Property(self, prop):
super(XYSlider, self).attach_Property(prop)
for obj in self.value_obj.itervalues():
obj.Property = prop
for key, w in self.sliders.iteritems():
prop = (self.value_obj[key], 'value')
w.Property = prop
s = self.spins[key]
s.Property = prop
def unlink_Property(self, prop):
super(XYSlider, self).unlink_Property(prop)
for w in self.sliders.itervalues():
w.Property = None
for w in self.spins.itervalues():
w.Property = None
for obj in self.value_obj.itervalues():
obj.Property = None
class ValueObject(BaseObject, PropertyConnector):
_Properties = {'value':dict(ignore_type=True, ignore_range=True)}
def __init__(self, **kwargs):
kwargs['ParentEmissionThread'] = get_gui_thread()
self._update = False
super(ValueObject, self).__init__(**kwargs)
self.prop_key = kwargs.get('prop_key')
self.Property = kwargs.get('Property')
self.bind(value=self.on_own_value_set)
def attach_Property(self, prop):
super(ValueObject, self).attach_Property(prop)
myprop = self.Properties['value']
myprop.type = type(self.get_Property_value()[self.prop_key])
myprop.min = prop.min[self.prop_key]
myprop.max = prop.max[self.prop_key]
myprop.value = self.get_Property_value()[self.prop_key]
# def get_Property_value(self):
# return super(ValueObject, self).get_Property_value()[self.prop_key]
# def set_Property_value(self, value):
# value = {self.prop_key:value}
# super(ValueObject, self).set_Property_value(value)
def on_Property_value_changed(self, **kwargs):
old = kwargs.get('old')[self.prop_key]
value = kwargs.get('value')[self.prop_key]
if self._update or old == value:
return
self.value = value
def on_own_value_set(self, **kwargs):
if self.value is None:
return
self._update = True
propval = self.get_Property_value()
propval[self.prop_key] = self.value
#self.set_Property_value(self.value)
self._update = False
class CenteringSlider(gtksimple.Fader):
def __init__(self, **kwargs):
self.release_timer = None
kwargs.setdefault('adj_kwargs', {'lower':-100., 'upper':100.})
super(CenteringSlider, self).__init__(**kwargs)
self.topwidget = Frame(label=self.attribute.name)
self.topwidget.pack_start(self.widget, expand=True)
self.widget.set_digits(0)
def on_widget_button_press(self, *args):
if self.release_timer:
self.release_timer.cancel()
self.release_timer = None
super(CenteringSlider, self).on_widget_button_press(*args)
def on_widget_button_release(self, *args):
self.release_timer = threading.Timer(.1, self.on_release_timer)
self.release_timer.start()
super(CenteringSlider, self).on_widget_button_release(*args)
def on_release_timer(self):
self.release_timer = None
self.attribute.value = 0.
class CenteringVSlider(CenteringSlider):
def __init__(self, **kwargs):
kwargs.setdefault('fader_type', 'VSlider')
super(CenteringVSlider, self).__init__(**kwargs)
self.widget_packing.update({'xoptions':gtk.AttachOptions.FILL,
'yoptions':gtk.AttachOptions.EXPAND | gtk.AttachOptions.FILL})
self.widget.set_property('inverted', True)
class CenteringHSlider(CenteringSlider):
def __init__(self, **kwargs):
kwargs.setdefault('fader_type', 'HSlider')
super(CenteringHSlider, self).__init__(**kwargs)
self.widget_packing.update({'xoptions':gtk.AttachOptions.EXPAND | gtk.AttachOptions.FILL,
'yoptions':gtk.AttachOptions.FILL})
def XYWidget(**kwargs):
import clutter_bases
return clutter_bases.XYWidget(**kwargs)
class XYShuttle(BaseObject):
pos_keys = ['x', 'y']
def __init__(self, **kwargs):
kwargs['ParentEmissionThread'] = get_gui_thread()
self._widget_pos = dict(zip([key for key in self.pos_keys], [50., 50.]))
super(XYShuttle, self).__init__(**kwargs)
self.MainController = kwargs.get('MainController')
self.src_object = kwargs.get('src_object')
self.src_attr = kwargs.get('src_attr')
self.src_signal = kwargs.get('src_signal')
self.value_objects = {}
group = self.src_object.name
self.shuttle = self.MainController.EffectsSystem.add_shuttle(group_names=[self.src_object.name])
self.shuttle.add_obj(self.src_object)
self.xywidget = XYWidget(src_object=self, src_attr='widget_pos',
label=kwargs.get('label', ''), value_range=[[-100, 100], [-100, 100]])
self.topwidget = self.xywidget.topwidget
self.xywidget.connect('position_changed', self.on_widget_pos_changed)
self.xywidget.scene.connect('clicked', self.on_widget_clicked)
self.xywidget.scene.connect('released', self.on_widget_released)
@property
def widget_pos(self):
return self._widget_pos
@widget_pos.setter
def widget_pos(self, value):
for key, val in value.iteritems():
if val != self._widget_pos[key]:
self._widget_pos.update({key:val})
def set_expression(self, **kwargs):
d = {}
for key in self.pos_keys:
pos = self.widget_pos[key]
inc = (pos - 50) / 10
comp = ''
zero = False
if inc == 0:
zero = True
elif inc > 0:
comp = '<'
else:
comp = '>'
s = '%(getv)s + %(inc)s if %(getv)s + %(inc)s %(comp)s %(getv)s' % {'getv':'self.get_object_value()', 'inc':inc, 'comp':comp}
if zero:
s = 'self.get_object_value()'
#print key, s
d[key] = s
objkey = self.src_object.id
self.shuttle.effects['Functional'].set_expression(objkey=objkey, expression=d)
def on_widget_clicked(self, **kwargs):
self.set_expression()
s = self.shuttle.sequencer
if not s.state:
s.start()
def on_widget_released(self, **kwargs):
s = self.shuttle.sequencer
if s.state:
s.stop()
def on_widget_pos_changed(self, **kwargs):
self.set_expression()
class XYValueObject(BaseObject):
def __init__(self, **kwargs):
kwargs['ParentEmissionThread'] = get_gui_thread()
self._value = None
super(XYValueObject, self).__init__(**kwargs)
self.id = id(self)
self.value_min = kwargs.get('value_min')
self.value_max = kwargs.get('value_max')
self.src_object = kwargs.get('src_object')
self.src_attr = kwargs.get('src_attr')
self.src_attr_key = kwargs.get('src_attr_key')
self._value = self.get_object_value()
self.src_object.connect(self.src_signal, self.on_object_update)
def unlink(self):
self.src_object.disconnect(callback=self.on_object_update)
super(XYValueObject, self).unlink()
@property
def value(self):
return self._value
@value.setter
def value(self, value):
if value != self._value:
#self._value = value
self.set_object_value(value)
def get_object_value(self):
return getattr(self.src_object, self.src_attr)[self.src_attr_key]
def set_object_value(self, value):
setattr(self.src_object, self.src_attr, {self.src_attr_key:value})
def on_object_update(self, **kwargs):
self._value = self.get_object_value()
def get_widget_classes():
return {'ToggleBtn':ToggleBtn, 'Radio':RadioBtn, 'VSlider':VSlider,
'HSlider':HSlider, 'Dial':HSlider, 'MenuBar':MenuBar}
def get_container_classes():
return {'VBox':VBox, 'HBox':HBox, 'Table':Table, 'Frame':Frame,
'Expander':Expander, 'ScrolledWindow':ScrolledWindow, 'Notebook':Notebook}
| [
"matt@nomadic-recording.com"
] | matt@nomadic-recording.com |
929aec9c56cc8ca4c570af9cd6043c4d9256e2da | bfe6c95fa8a2aae3c3998bd59555583fed72900a | /smallestRepunitDivByK.py | 3071d89089c335e92fa7eb56b3efbec7c1ac437c | [] | no_license | zzz136454872/leetcode | f9534016388a1ba010599f4771c08a55748694b2 | b5ea6c21bff317884bdb3d7e873aa159b8c30215 | refs/heads/master | 2023-09-01T17:26:57.624117 | 2023-08-29T03:18:56 | 2023-08-29T03:18:56 | 240,464,565 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 416 | py | class Solution:
def smallestRepunitDivByK(self, k: int) -> int:
if k == 1:
return 1
a = [1]
seen = set(a)
while True:
a.append((a[-1] * 10 + 1) % k)
if a[-1] == 0:
return len(a)
if a[-1] in seen:
return -1
seen.add(a[-1])
k = 1
k = 2
k = 3
print(Solution().smallestRepunitDivByK(k))
| [
"zzz136454872@163.com"
] | zzz136454872@163.com |
fe9981ffe57faaf1b5ac9e27c981810646f19b43 | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_1_neat/16_0_1_odieatla_sheep.py | 8de3f71090aece5636ed89940641042f89fa0783 | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 743 | py | #!/usr/bin/env python
t = int(raw_input()) # read a line with a single integer
for i in xrange(1, t + 1):
#n = [int(s) for s in raw_input().split(" ")]
n = int(raw_input())
limit = 100
result = range(0, 10)
m = n
to_break = False
for l in xrange(1, limit+1):
num_str = str(n*l)
#print "current number is {}".format(num_str)
for c in num_str:
#print "digit is {}".format(int(c))
#print "leftover digits are {}".format(result)
if int(c) in result:
result.remove(int(c))
if len(result) is 0:
m = num_str
to_break = True
break
if to_break:
break
if not to_break:
m = "INSOMNIA"
print "Case #{}: {}".format(i, m)
| [
"[dhuo@tcd.ie]"
] | [dhuo@tcd.ie] |
f2f1dabe7a5b575b5074614f8cad0c656ff69e8b | 982194a86c5a11185bb55a653ba1730807b9f67f | /source-builder/sb/log.py | 8e4602311574c32458949df50ebd0837b23593a3 | [] | no_license | jeffmurphy/rtems-source-builder | f476ef158d3a4103168e70731bbad6d5ea521d04 | 539c48a40543193fc088fd8a67b3ab0ef5308c24 | refs/heads/master | 2020-12-25T13:23:57.017108 | 2015-02-09T22:37:43 | 2015-02-09T22:37:43 | 30,490,786 | 0 | 0 | null | 2015-02-08T12:48:18 | 2015-02-08T12:48:17 | null | UTF-8 | Python | false | false | 6,078 | py | #
# RTEMS Tools Project (http://www.rtems.org/)
# Copyright 2010-2012 Chris Johns (chrisj@rtems.org)
# All rights reserved.
#
# This file is part of the RTEMS Tools package in 'rtems-testing'.
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
# Log output to stdout and/or a file.
#
import os
import sys
import error
#
# A global log.
#
default = None
#
# Global parameters.
#
tracing = False
quiet = False
def set_default_once(log):
if default is None:
default = log
def _output(text = os.linesep, log = None):
"""Output the text to a log if provided else send it to stdout."""
if text is None:
text = os.linesep
if type(text) is list:
_text = ''
for l in text:
_text += l + os.linesep
text = _text
if log:
log.output(text)
elif default is not None:
default.output(text)
else:
for l in text.replace(chr(13), '').splitlines():
print l
def stdout_raw(text = os.linesep):
print text,
sys.stdout.flush()
def stderr(text = os.linesep, log = None):
for l in text.replace(chr(13), '').splitlines():
print >> sys.stderr, l
def output(text = os.linesep, log = None):
if not quiet:
_output(text, log)
def notice(text = os.linesep, log = None):
if not quiet and default is not None and not default.has_stdout():
for l in text.replace(chr(13), '').splitlines():
print l
_output(text, log)
def trace(text = os.linesep, log = None):
if tracing:
_output(text, log)
def warning(text = os.linesep, log = None):
for l in text.replace(chr(13), '').splitlines():
notice('warning: %s' % (l), log)
def flush(log = None):
if log:
log.flush()
elif default is not None:
default.flush()
def tail(log = None):
if log is not None:
return log.tail
if default is not None:
return default.tail
return 'No log output'
class log:
"""Log output to stdout or a file."""
def __init__(self, streams = None, tail_size = 200):
self.tail = []
self.tail_size = tail_size
self.fhs = [None, None]
if streams:
for s in streams:
if s == 'stdout':
self.fhs[0] = sys.stdout
elif s == 'stderr':
self.fhs[1] = sys.stderr
else:
try:
self.fhs.append(file(s, 'w'))
except IOError, ioe:
raise error.general("creating log file '" + s + \
"': " + str(ioe))
def __del__(self):
for f in range(2, len(self.fhs)):
self.fhs[f].close()
def __str__(self):
t = ''
for tl in self.tail:
t += tl + os.linesep
return t[:-len(os.linesep)]
def _tail(self, text):
if type(text) is not list:
text = text.splitlines()
self.tail += text
if len(self.tail) > self.tail_size:
self.tail = self.tail[-self.tail_size:]
def has_stdout(self):
return self.fhs[0] is not None
def has_stderr(self):
return self.fhs[1] is not None
def output(self, text):
"""Output the text message to all the logs."""
# Reformat the text to have local line types.
text = text.replace(chr(13), '').splitlines()
self._tail(text)
out = ''
for l in text:
out += l + os.linesep
for f in range(0, len(self.fhs)):
if self.fhs[f] is not None:
self.fhs[f].write(out)
self.flush()
def flush(self):
"""Flush the output."""
for f in range(0, len(self.fhs)):
if self.fhs[f] is not None:
self.fhs[f].flush()
if __name__ == "__main__":
l = log(['stdout', 'log.txt'], tail_size = 20)
for i in range(0, 10):
l.output('log: hello world: %d\n' % (i))
l.output('log: hello world CRLF\r\n')
l.output('log: hello world NONE')
l.flush()
print '=-' * 40
print 'tail: %d' % (len(l.tail))
print l
print '=-' * 40
for i in range(0, 10):
l.output('log: hello world 2: %d\n' % (i))
l.flush()
print '=-' * 40
print 'tail: %d' % (len(l.tail))
print l
print '=-' * 40
for i in [0, 1]:
quiet = False
tracing = False
print '- quiet:%s - trace:%s %s' % (str(quiet), str(tracing), '-' * 30)
trace('trace with quiet and trace off')
notice('notice with quiet and trace off')
quiet = True
tracing = False
print '- quiet:%s - trace:%s %s' % (str(quiet), str(tracing), '-' * 30)
trace('trace with quiet on and trace off')
notice('notice with quiet on and trace off')
quiet = False
tracing = True
print '- quiet:%s - trace:%s %s' % (str(quiet), str(tracing), '-' * 30)
trace('trace with quiet off and trace on')
notice('notice with quiet off and trace on')
quiet = True
tracing = True
print '- quiet:%s - trace:%s %s' % (str(quiet), str(tracing), '-' * 30)
trace('trace with quiet on and trace on')
notice('notice with quiet on and trace on')
default = l
print '=-' * 40
print 'tail: %d' % (len(l.tail))
print l
print '=-' * 40
del l
| [
"chrisj@rtems.org"
] | chrisj@rtems.org |
60b32cb26774747a827fb99ec828f7caeb007fd8 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_86/264.py | 47c4d9f5b59ad9d3d7dc6bc03f717392097e26ce | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 574 | py | #!/usr/bin/env python
def solves(n, freqs):
if all(map(lambda a: a%n==0 or n%a==0 or a==1, freqs)):
return True
else:
return False
def print_solution(freqs, n, l, h):
for i in xrange(l, h+1):
if solves(i, freqs):
return str(i)
return 'NO'
def main():
T = int(raw_input())
for i in xrange(T):
n, l, h = map(int, raw_input().split(' '))
freqs = map(int, raw_input().split(' '))
print 'Case #{0}: {1}'.format(i+1, print_solution(freqs, n, l, h))
if __name__ == '__main__':
main()
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
e67bf271c819e68a36805a80819c5935dc3a5ec8 | 946062524e1995a33cc9de01dc1766da27cba16b | /py_ad_3_3.py | f0ed5c35114fb0d58b178c1b4dcdf44ee11d0342 | [
"MIT"
] | permissive | seopbo/con-par-python | 87f684332d6a72969bb5d1759143d7374c2454b1 | e74cb9c30acfdd78c12c9f7aba039d16ed1f7e78 | refs/heads/main | 2023-05-29T03:27:33.564631 | 2021-06-16T12:51:44 | 2021-06-16T12:51:44 | 372,824,317 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,174 | py | """
Section 3
Concurrency, CPU Bound vs I/O Bound - Multiprocessing vs Threading vs AsyncIO
Keyword - CPU Bound, I/O Bound, AsyncIO
"""
"""
CPU Bound vs I/O Bound
CPU Bound
- 프로세스 진행 -> CPU 속도에 의해 제한(결정) -> 행렬 곱, 고속 연산, 압축 파일, 집합 연산 등
- CPU 연산 위주 작업
I/O Bound
- 파일쓰기, 디스크 작업, 네트워크 통신, 시리얼 포트 송수신 -> 작업에 의해서 병목(수행시간)이 결정
- CPU 성능 지표가 수행시간 단축으로 크게 영향을 끼치지 않음.
메모리 바인딩, 캐시 바운딩
작업 목적에 따라서 적절한 동시성 라이브러리 선택이 중요함.
최종 비교
- Multiprocessing: Multiple processes, 고가용성(CPU) Utilization -> CPU-Bound -> 10개 부엌, 10명 요리사, 10개 요리
- Threading: Single(Multi) process, Multiple threads, OS decides task switching -> Fast I/O Bound -> 1개 부엌, 10명 요리사, 10개 요리
- AsyncIO: Single process, single thread, cooperative multitasking, tasks cooperatively decide switching -> Slow I/O-Bound -> 1개 1부엌, 1명 요리사, 10개 요리
""" | [
"bsk0130@gmail.com"
] | bsk0130@gmail.com |
14433120dd972d49171446aa029b1e37702465bc | 46afa2d7d3c531c898d1ade4f9e8f896661b8db3 | /interpolate.py | 8ce445555f6323405a6d9072f48bf3612941f121 | [
"Apache-2.0"
] | permissive | Guymer/PyGuymer | b07062e9464ec134e1c83122ee1734d8bff6b4ad | 7970659645f363788d371d00e2128f0cc3a47362 | refs/heads/master | 2021-01-19T13:19:39.797501 | 2020-11-28T08:26:27 | 2020-11-28T08:26:27 | 82,384,393 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 687 | py | # -*- coding: utf-8 -*-
##############################################################################################
# This file is deprecated because Python 2.x is deprecated #
# A Python 3.x version of this file can be found at: #
# #
# https://github.com/Guymer/PyGuymer3/blob/master/interpolate.py #
##############################################################################################
def interpolate(x1, x2, y1, y2, x):
return (y1 * (x2 - x) + y2 * (x - x1)) / (x2 - x1)
| [
"t.m.guymer@thomasguymer.co.uk"
] | t.m.guymer@thomasguymer.co.uk |
3c9895e85da7219013180c96d2bc112a18efb809 | 7bededcada9271d92f34da6dae7088f3faf61c02 | /pypureclient/flashblade/FB_2_1/models/active_directory_post.py | 405133c46235fa042776c926c040402d94c2c59c | [
"BSD-2-Clause"
] | permissive | PureStorage-OpenConnect/py-pure-client | a5348c6a153f8c809d6e3cf734d95d6946c5f659 | 7e3c3ec1d639fb004627e94d3d63a6fdc141ae1e | refs/heads/master | 2023-09-04T10:59:03.009972 | 2023-08-25T07:40:41 | 2023-08-25T07:40:41 | 160,391,444 | 18 | 29 | BSD-2-Clause | 2023-09-08T09:08:30 | 2018-12-04T17:02:51 | Python | UTF-8 | Python | false | false | 8,543 | py | # coding: utf-8
"""
FlashBlade REST API
A lightweight client for FlashBlade REST API 2.1, developed by Pure Storage, Inc. (http://www.purestorage.com/).
OpenAPI spec version: 2.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flashblade.FB_2_1 import models
class ActiveDirectoryPost(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'computer_name': 'str',
'directory_servers': 'list[str]',
'domain': 'str',
'encryption_types': 'list[str]',
'fqdns': 'list[str]',
'join_ou': 'str',
'kerberos_servers': 'list[str]',
'password': 'str',
'service_principal_names': 'list[str]',
'user': 'str'
}
attribute_map = {
'computer_name': 'computer_name',
'directory_servers': 'directory_servers',
'domain': 'domain',
'encryption_types': 'encryption_types',
'fqdns': 'fqdns',
'join_ou': 'join_ou',
'kerberos_servers': 'kerberos_servers',
'password': 'password',
'service_principal_names': 'service_principal_names',
'user': 'user'
}
required_args = {
'domain',
'password',
'user',
}
def __init__(
self,
domain, # type: str
password, # type: str
user, # type: str
computer_name=None, # type: str
directory_servers=None, # type: List[str]
encryption_types=None, # type: List[str]
fqdns=None, # type: List[str]
join_ou=None, # type: str
kerberos_servers=None, # type: List[str]
service_principal_names=None, # type: List[str]
):
"""
Keyword args:
computer_name (str): The common name of the computer account to be created in the Active Directory domain. If not specified, defaults to the name of the Active Directory configuration.
directory_servers (list[str]): A list of directory servers that will be used for lookups related to user authorization. Accepted server formats are IP address and DNS name. All specified servers must be registered to the domain appropriately in the array's configured DNS and will only be communicated with over the secure LDAP (LDAPS) protocol. If not specified, servers are resolved for the domain in DNS. The specified list can have a maximum length of 5.
domain (str, required): The Active Directory domain to join.
encryption_types (list[str]): The encryption types that will be supported for use by clients for Kerberos authentication. Defaults to `aes256-cts-hmac-sha1-96`. Valid values include `aes256-cts-hmac-sha1-96`, `aes128-cts-hmac-sha1-96`, and `arcfour-hmac`. Cannot be provided if using an existing machine account.
fqdns (list[str]): A list of fully qualified domain names to use to register service principal names for the machine account. If specified, every service principal that is supported by the array will be registered for each fully qualified domain name specified. If neither `fqdns` nor `service_principal_names` is specified, the default `service_principal_names` are constructed using the `computer_name` and `domain` fields. Cannot be provided in combination with `service_principal_names`. Cannot be provided if using an existing machine account.
join_ou (str): The relative distinguished name of the organizational unit in which the computer account should be created when joining the domain. Cannot be provided if using an existing machine account. If not specified, defaults to `CN=Computers`.
kerberos_servers (list[str]): A list of key distribution servers to use for Kerberos protocol. Accepted server formats are IP address and DNS name. All specified servers must be registered to the domain appropriately in the array's configured DNS. If not specified, servers are resolved for the domain in DNS. The specified list can have a maximum length of 5.
password (str, required): The login password of the user with privileges to create the computer account in the domain. If using an existing computer account, the user must have privileges to read attributes from the computer account and reset the password on that account. This is not persisted on the array.
service_principal_names (list[str]): A list of service principal names to register for the machine account, which can be used for the creation of keys for Kerberos authentication. If neither `service_principal_names` nor `fqdns` is specified, the default `service_principal_names` are constructed using the `computer_name` and `domain` fields. Cannot be provided in combination with `fqdns`. Cannot be provided if using an existing machine account.
user (str, required): The login name of the user with privileges to create the computer account in the domain. If using an existing computer account, the user must have privileges to read attributes from the computer account and reset the password on that account. This is not persisted on the array.
"""
if computer_name is not None:
self.computer_name = computer_name
if directory_servers is not None:
self.directory_servers = directory_servers
self.domain = domain
if encryption_types is not None:
self.encryption_types = encryption_types
if fqdns is not None:
self.fqdns = fqdns
if join_ou is not None:
self.join_ou = join_ou
if kerberos_servers is not None:
self.kerberos_servers = kerberos_servers
self.password = password
if service_principal_names is not None:
self.service_principal_names = service_principal_names
self.user = user
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `ActiveDirectoryPost`".format(key))
if key == "domain" and value is None:
raise ValueError("Invalid value for `domain`, must not be `None`")
if key == "password" and value is None:
raise ValueError("Invalid value for `password`, must not be `None`")
if key == "user" and value is None:
raise ValueError("Invalid value for `user`, must not be `None`")
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
return None
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ActiveDirectoryPost, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ActiveDirectoryPost):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"bcai@purestorage.com"
] | bcai@purestorage.com |
6de16e882d54b189cd4432684fe408fb63810100 | 162e0e4791188bd44f6ce5225ff3b1f0b1aa0b0d | /trex/widgets/variableexplorer/tests/test_utils.py | f441a93f49aee22028a0bd2c3efe39bcbaa31376 | [] | no_license | testsleeekGithub/trex | 2af21fa95f9372f153dbe91941a93937480f4e2f | 9d27a9b44d814ede3996a37365d63814214260ae | refs/heads/master | 2020-08-01T11:47:43.926750 | 2019-11-06T06:47:19 | 2019-11-06T06:47:19 | 210,987,245 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 676 | py | # -*- coding: utf-8 -*-
#
# Copyright © TRex Project Contributors
# Licensed under the terms of the MIT License
"""
Tests for utils.py
"""
# Third party imports
import pytest
# Local imports
from trex.widgets.variableexplorer.utils import sort_against
# --- Tests
# -----------------------------------------------------------------------------
def test_sort_against():
lista = [5, 6, 7]
listb = [2, 3, 1]
res = sort_against(lista, listb)
assert res == [7, 5, 6]
def test_sort_against_is_stable():
lista = [3, 0, 1]
listb = [1, 1, 1]
res = sort_against(lista, listb)
assert res == lista
if __name__ == "__main__":
pytest.main()
| [
"shkolanovaya@gmail.com"
] | shkolanovaya@gmail.com |
3c41a57cd18183f8d1d971ce71b94d38243f5a85 | a6e4a6f0a73d24a6ba957277899adbd9b84bd594 | /sdk/python/pulumi_azure_native/cdn/v20161002/get_profile_supported_optimization_types.py | c7c51e915af6ec616d256f41b35028165f4f7fb8 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | MisinformedDNA/pulumi-azure-native | 9cbd75306e9c8f92abc25be3f73c113cb93865e9 | de974fd984f7e98649951dbe80b4fc0603d03356 | refs/heads/master | 2023-03-24T22:02:03.842935 | 2021-03-08T21:16:19 | 2021-03-08T21:16:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,758 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'GetProfileSupportedOptimizationTypesResult',
'AwaitableGetProfileSupportedOptimizationTypesResult',
'get_profile_supported_optimization_types',
]
@pulumi.output_type
class GetProfileSupportedOptimizationTypesResult:
"""
The result of the GetSupportedOptimizationTypes API
"""
def __init__(__self__, supported_optimization_types=None):
if supported_optimization_types and not isinstance(supported_optimization_types, list):
raise TypeError("Expected argument 'supported_optimization_types' to be a list")
pulumi.set(__self__, "supported_optimization_types", supported_optimization_types)
@property
@pulumi.getter(name="supportedOptimizationTypes")
def supported_optimization_types(self) -> Optional[Sequence[str]]:
"""
Supported optimization types for a profile.
"""
return pulumi.get(self, "supported_optimization_types")
class AwaitableGetProfileSupportedOptimizationTypesResult(GetProfileSupportedOptimizationTypesResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetProfileSupportedOptimizationTypesResult(
supported_optimization_types=self.supported_optimization_types)
def get_profile_supported_optimization_types(profile_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetProfileSupportedOptimizationTypesResult:
"""
The result of the GetSupportedOptimizationTypes API
:param str profile_name: Name of the CDN profile which is unique within the resource group.
:param str resource_group_name: Name of the Resource group within the Azure subscription.
"""
__args__ = dict()
__args__['profileName'] = profile_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:cdn/v20161002:getProfileSupportedOptimizationTypes', __args__, opts=opts, typ=GetProfileSupportedOptimizationTypesResult).value
return AwaitableGetProfileSupportedOptimizationTypesResult(
supported_optimization_types=__ret__.supported_optimization_types)
| [
"noreply@github.com"
] | MisinformedDNA.noreply@github.com |
1134064fddcca0a4808b546e92daff2f17d7533e | bbf025a5f8596e5513bd723dc78aa36c46e2c51b | /recursion + DP/416 partitionEqualSubsetSum.py | 2c06293fc187af819729dcf730c357c9850251c7 | [] | no_license | AlanFermat/leetcode | 6209bb5cf2d1b19e3fe7b619e1230f75bb0152ab | cacba4abaca9c4bad8e8d12526336115067dc6a0 | refs/heads/master | 2021-07-11T04:00:00.594820 | 2020-06-22T21:31:02 | 2020-06-22T21:31:02 | 142,341,558 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22 | py | def partition():
pass | [
"zy19@rice.edu"
] | zy19@rice.edu |
fb337d2f93cb7bc1c230ac5ee85b0ae6153d1a2b | 673bf701a310f92f2de80b687600cfbe24612259 | /misoclib/mem/litesata/test/scrambler_tb.py | 20c4045c45769c09a380c0ad879bb473df375f0a | [
"BSD-2-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | mogorman/misoc | d78340a9bf67feaede20e8cac473bcfddbd186a3 | 4ec49e2aadcff0c3ca34ebd0d35013d88f4d3e1f | refs/heads/master | 2021-01-18T05:38:39.670977 | 2015-03-10T05:37:52 | 2015-03-10T05:37:52 | 30,672,191 | 1 | 0 | null | 2015-02-11T22:05:05 | 2015-02-11T22:05:05 | null | UTF-8 | Python | false | false | 1,291 | py | import subprocess
from misoclib.mem.litesata.common import *
from misoclib.mem.litesata.core.link.scrambler import *
from misoclib.mem.litesata.test.common import *
class TB(Module):
def __init__(self, length):
self.submodules.scrambler = InsertReset(Scrambler())
self.length = length
def get_c_values(self, length):
stdin = "0x%08x" %length
with subprocess.Popen("./scrambler", stdin=subprocess.PIPE, stdout=subprocess.PIPE) as process:
process.stdin.write(stdin.encode("ASCII"))
out, err = process.communicate()
return [int(e, 16) for e in out.decode("ASCII").split("\n")[:-1]]
def gen_simulation(self, selfp):
# init CRC
selfp.scrambler.ce = 1
selfp.scrambler.reset = 1
yield
selfp.scrambler.reset = 0
# log results
yield
sim_values = []
for i in range(self.length):
sim_values.append(selfp.scrambler.value)
yield
# stop
selfp.scrambler.ce = 0
for i in range(32):
yield
# get C code reference
c_values = self.get_c_values(self.length)
# check results
s, l, e = check(c_values, sim_values)
print("shift "+ str(s) + " / length " + str(l) + " / errors " + str(e))
if __name__ == "__main__":
from migen.sim.generic import run_simulation
length = 8192
run_simulation(TB(length), ncycles=length+100, vcd_name="my.vcd")
| [
"florent@enjoy-digital.fr"
] | florent@enjoy-digital.fr |
f0d1d5adff2ca0f3f278aaf8ad723ec45be24682 | d28a65d23c204a9736b597ae510d9dd54d2ffd0f | /tests/testRF2Namespace.py | 9c77dd12cc6eaf93924009005bcb02f414c7cd61 | [
"BSD-3-Clause"
] | permissive | cts2/rf2db | 99ba327611e620fc5533245064afcc1daff7c164 | 985cd7ad84c8907306a0d7d309d4a1c0fb422ba4 | refs/heads/master | 2020-05-17T22:37:25.476553 | 2015-08-24T22:18:19 | 2015-08-24T22:18:19 | 15,264,407 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,835 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2014, Mayo Clinic
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of the <ORGANIZATION> nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from rf2db.db.RF2Namespaces import RF2Namespace, DecodedNamespace
from rf2db.utils.sctid_generator import sctid_generator, CIMI_Namespace
from SetConfig import setConfig
from ClearConfig import clearConfig
class RF2NamespaceTestCase(unittest.TestCase):
def setUp(self):
setConfig()
def tearDown(self):
clearConfig()
def nsTest(self, ns):
x = RF2Namespace(ns)
dns = DecodedNamespace(x.nextConceptId())
self.assertEqual(ns, dns.namespace)
self.assertEqual(sctid_generator.CONCEPT._lid, dns.partition)
base = dns.item
dns2 = DecodedNamespace(x.nextConceptId())
self.assertEqual(ns, dns2.namespace)
self.assertEqual(sctid_generator.CONCEPT._lid, dns2.partition)
self.assertEqual(dns2.item, base+1)
dns = DecodedNamespace(x.nextRelationshipId())
self.assertEqual(ns, dns.namespace)
self.assertEqual(sctid_generator.RELATIONSHIP._lid, dns.partition)
dns = DecodedNamespace(x.nextDescriptionId())
self.assertEqual(ns, dns.namespace)
self.assertEqual(sctid_generator.DESCRIPTION._lid, dns.partition)
def test_CIMI(self):
self.nsTest(CIMI_Namespace)
self.nsTest(100087)
def test_strNS(self):
ns = "1000087"
x = RF2Namespace(ns)
dns = DecodedNamespace(x.nextConceptId())
self.assertEqual(ns, str(dns.namespace))
self.assertEqual(sctid_generator.CONCEPT._lid, dns.partition)
def testDecodedNamespace(self):
dns1 = DecodedNamespace(101291013)
self.assertEqual(3, dns1.checkdigit)
self.assertEqual(1, dns1.partition)
self.assertEqual(101291, dns1.item)
self.assertEqual(0, dns1.namespace)
dns1 = DecodedNamespace(101291111)
self.assertEqual(1, dns1.checkdigit)
self.assertEqual(11, dns1.partition)
self.assertEqual(101291, dns1.namespace)
self.assertEqual(0, dns1.item)
dns1 = DecodedNamespace(999999990989121104)
self.assertEqual(4, dns1.checkdigit)
self.assertEqual(10, dns1.partition)
self.assertEqual(99999999, dns1.item)
self.assertEqual(989121, dns1.namespace)
if __name__ == '__main__':
unittest.main()
| [
"solbrig.harold@mayo.edu"
] | solbrig.harold@mayo.edu |
6a2ffbb56766a47705ad8233ad60ec4b258bb9d7 | dac7095e7b5ad4dae993871c1ae45cbb7a5ce5f7 | /Character/25.Milim/Milim_O.py | 8c2045b511085f6a3595ca72e7b3ac72960686d2 | [] | no_license | Lastation/RenewalAniChaos | d12a8423f4b83cb019495c59ed059451e67e0483 | c3edb29af58925de55c11110ccaf927d2b5d1b39 | refs/heads/master | 2023-08-24T11:28:35.614844 | 2023-08-22T21:23:14 | 2023-08-22T21:23:14 | 246,617,812 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,766 | py | import Function as f;
const s = StringBuffer();
function main(cp)
{
if (f.count[cp] == 1)
{
MoveLocation("25.Milim_Bozo", f.heroID[cp], cp, "Anywhere");
}
f.HoldPosition(cp);
f.BanReturn(cp);
if (f.delay[cp] == 0)
{
if (f.count[cp] == 0)
{
if (f.loop[cp] == 0)
{
CreateUnit(1, "Flame Blue", "[Skill]Unit_Wait_8", cp);
SetInvincibility(Enable, "Any unit", cp, "[Skill]Unit_Wait_ALL");
MoveLocation(f.location[cp], f.heroID[cp], cp, "Anywhere");
MoveUnit(All, "Flame Blue", cp, "Anywhere", f.location[cp]);
MoveLocation("25.Milim_Bozo", "Flame Blue", cp, "Anywhere");
f.SkillWait(cp, 80);
f.loop[cp] += 1;
}
else if (f.loop[cp] < 60)
{
var x = 50;
if (cp >= 3) x = -x;
f.DotShape(cp, 8, "40 + 1n Zealot", 0, 0);
KillUnitAt(All, "40 + 1n Zealot", "Anywhere", cp);
addloc("25.Milim_Bozo", x * 3, 0);
MoveUnit(All, "Flame Blue", cp, "Anywhere", "25.Milim_Bozo");
CreateUnit(3, "40 + 1n Wraith", "[Skill]Unit_Wait_8", cp);
SetInvincibility(Enable, "Any unit", cp, "[Skill]Unit_Wait_ALL");
MoveLocation(f.location[cp], "Flame Blue", cp, "Anywhere");
MoveUnit(1, "40 + 1n Wraith", cp, "[Skill]Unit_Wait_ALL", f.location[cp]);
addloc(f.location[cp], -x, 0);
MoveUnit(1, "40 + 1n Wraith", cp, "[Skill]Unit_Wait_ALL", f.location[cp]);
addloc(f.location[cp], -x, 0);
MoveUnit(1, "40 + 1n Wraith", cp, "[Skill]Unit_Wait_ALL", f.location[cp]);
KillUnitAt(All, "40 + 1n Wraith", "Anywhere", cp);
if ((cp >= 3 && (Bring(cp, AtLeast, 1, "Flame Blue", "[Potal]Shop7") || Bring(cp, AtLeast, 1, "Flame Blue", "[Potal]Potal7")))
|| (cp < 3 && (Bring(cp, AtLeast, 1, "Flame Blue", "[Potal]Shop8") || Bring(cp, AtLeast, 1, "Flame Blue", "[Potal]Potal8"))))
{
SetDeaths(cp, SetTo, 120, " `UniqueCoolTime");
f.SkillWait(cp, 80);
f.count[cp] = 2;
f.loop[cp] = 0;
}
else if (cp < 3)
{
if (Bring(P8, AtLeast, 1, "Buildings", "25.Milim_Bozo"))
{
SetSwitch("Unique - MilimWarning", Set);
SetSwitch("Recall - Milim", Set);
f.Voice_Routine(cp, 3);
f.SkillWait(cp, 80);
f.count[cp] += 1;
f.loop[cp] = 0;
}
else
{
f.SkillWait(cp, 80);
f.loop[cp] += 1;
}
}
else if (cp >= 3)
{
if (Bring(P7, AtLeast, 1, "Buildings", "25.Milim_Bozo"))
{
SetSwitch("Unique - MilimWarning", Set);
SetSwitch("Recall - Milim", Set);
f.Voice_Routine(cp, 3);
f.SkillWait(cp, 80);
f.count[cp] += 1;
f.loop[cp] = 0;
}
else
{
f.SkillWait(cp, 80);
f.loop[cp] += 1;
}
}
}
else if (f.loop[cp] == 60)
{
SetDeaths(cp, SetTo, 120, " `UniqueCoolTime");
f.SkillWait(cp, 80);
f.count[cp] = 2;
f.loop[cp] = 0;
}
}
else if (f.count[cp] == 1)
{
if (f.loop[cp] < 40)
{
f.EdgeShape(cp, 1, "50 + 1n Tank", 0, 7, 120);
if (f.loop[cp] % 2 == 0)
{
f.EdgeShape(cp, 1, "Protoss Dark Archon", 0, 3, 40);
}
else if (f.loop[cp] % 2 == 1)
{
f.EdgeShape(cp, 1, "Protoss Dark Archon", 0, 5, 80);
}
KillUnitAt(All, "Protoss Dark Archon", "Anywhere", cp);
KillUnitAt(All, "50 + 1n Tank", "Anywhere", cp);
KillUnitAt(All, "60 + 1n High Templar", "Anywhere", cp);
KillUnitAt(All, "Rhynadon (Badlands)", "Anywhere", cp);
f.SkillWait(cp, 80);
f.loop[cp] += 1;
}
else if (f.loop[cp] == 40)
{
MoveLocation("25.Milim_Bozo", "Flame Blue", cp, "Anywhere");
if (Deaths(CurrentPlayer, Exactly, 0, (210)))
{
MoveUnit(All, f.heroID[cp], cp, "Anywhere", "25.Milim_Bozo");
CenterView("25.Milim_Bozo");
}
f.NxNSquareShape(cp, 1, "130 + 1n Norad", 3, 75);
f.DotShape(cp, 16, "80 + 1n Goliath", 0, 0);
Order("130 + 1n Norad", cp, "Anywhere", Attack, "Anywhere");
MoveUnit(All, "80 + 1n Goliath", cp, "[Skill]Unit_Wait_ALL", f.location[cp]);
Order("80 + 1n Goliath", cp, "Anywhere", Attack, "Anywhere");
SetSwitch("Recall - Milim", Clear);
f.SkillWait(cp, 80);
f.loop[cp] += 1;
}
else if (f.loop[cp] < 45)
{
var i = f.loop[cp] - 41;
f.EdgeShape(cp, 1, "60 + 1n Siege", 0, 5 + 2 * i, 100 + 50 * i);
f.EdgeShape(cp, 1, "50 + 1n Battlecruiser", 0, 3 + 2 * i, 50 + 50 * i);
KillUnitAt(All, "60 + 1n Siege", "Anywhere", cp);
KillUnitAt(All, "50 + 1n Battlecruiser", "Anywhere", cp);
f.SkillWait(cp, 80);
f.loop[cp] += 1;
}
else if (f.loop[cp] == 45)
{
KillUnitAt(All, "130 + 1n Norad", "Anywhere", cp);
KillUnitAt(All, "80 + 1n Goliath", "Anywhere", cp);
f.EdgeShape(cp, 1, " Unit. Hoffnung 25000", 0, 3, 50);
f.EdgeShape(cp, 1, " Unit. Hoffnung 25000", 0, 5, 100);
f.EdgeShape(cp, 1, " Unit. Hoffnung 25000", 0, 7, 150);
f.EdgeShape(cp, 1, " Unit. Hoffnung 25000", 0, 9, 150);
KillUnitAt(All, " Unit. Hoffnung 25000", "Anywhere", cp);
f.SkillWait(cp, 80);
f.loop[cp] += 1;
}
else if (f.loop[cp] == 46)
{
f.Voice_Routine(cp, 4);
SetSwitch("Unique - Milim", Set);
SetDeaths(cp, SetTo, 2880, " `UniqueCoolTime");
SetDeaths(cp, SetTo, 720, " `UniqueSkill");
f.SkillWait(cp, 80);
f.count[cp] += 1;
f.loop[cp] = 0;
}
}
else if (f.count[cp] == 2)
{
RemoveUnitAt(All, "Flame Blue", "Anywhere", cp);
SetSwitch("Unique - MilimWarning", Clear);
f.SkillEnd(cp);
}
}
} | [
"ghtjd000129@naver.com"
] | ghtjd000129@naver.com |
80bb3d72b5be850cf02eca31d0d3c0b58fe08313 | 3d3f629105b0a350c011976cae02cb10b385d873 | /keras_180112/klab-10-2-mnist_nn.py | 36d1418056f2ff3caacf1143ea13e15413242fbf | [] | no_license | vt0311/acorn_tensor | fbe7a9507db15161f029f297df64bfe0c937764f | eb9d94c5f28d673b82becb31abe6640cbd18cf89 | refs/heads/master | 2021-09-07T00:32:33.819768 | 2018-02-14T07:45:06 | 2018-02-14T07:45:06 | 114,960,417 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,238 | py | from __future__ import print_function
import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout
batch_size = 128
num_classes = 10
epochs = 12
# ==============================================================================
# prepare data
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train = X_train.reshape(60000, 784)
X_test = X_test.reshape(10000, 784)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
# ==============================================================================
# build model
# (model code from http://iostream.tistory.com/111)
model = Sequential()
# glorot_uniform : Xavier uniform initializer.
model.add(Dense(256, input_dim=784,
kernel_initializer='glorot_uniform', activation='relu'))
model.add(Dropout(0.3))
model.add(Dense(256, kernel_initializer='glorot_uniform', activation='relu'))
model.add(Dropout(0.3))
model.add(Dense(256, kernel_initializer='glorot_uniform', activation='relu'))
model.add(Dropout(0.3))
model.add(Dense(256, kernel_initializer='glorot_uniform', activation='relu'))
model.add(Dropout(0.3))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='adam', metrics=['accuracy'])
# fit 메소드
# callbacks 매개변수 : fitting 이후에 적용할 함수
# validation_split : 훈련에 반영시키지 않을 데이터의 비율( 0.0 <= 비율 <= 1.0 )
history = model.fit(X_train, y_train,
batch_size=batch_size,
epochs=epochs,
verbose=1,
#validation_split=0.2)
validation_split=0.3)
# ==============================================================================
# predict
score = model.evaluate(X_test, y_test, batch_size=batch_size)
print('\nTest loss:', score[0])
print('Test accuracy:', score[1])
'''
Test loss: 0.0742975851574
Test accuracy: 0.9811
'''
| [
"hsw0311@nate.com"
] | hsw0311@nate.com |
fb1758594df68342b967cc1f53b28f8b6252e80c | a2e638cd0c124254e67963bda62c21351881ee75 | /Extensions/StructuredProductsDealPackage/FPythonCode/StructuredProductBase.py | a8a4961df7b260465ed6c73acdd4678ac88b1d4c | [] | no_license | webclinic017/fa-absa-py3 | 1ffa98f2bd72d541166fdaac421d3c84147a4e01 | 5e7cc7de3495145501ca53deb9efee2233ab7e1c | refs/heads/main | 2023-04-19T10:41:21.273030 | 2021-05-10T08:50:05 | 2021-05-10T08:50:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,371 | py |
import acm
import FUxCore
from DealPackageDevKit import DealPackageDefinition, DealPackageException, DealPackageUserException, CalcVal, Object, Str, Action, List, Bool, Float, Int, Date, Text, DatePeriod, DealPackageChoiceListSource, Settings, UXDialogsWrapper, TradeActions, CorrectCommand, NovateCommand, CloseCommand, AcquirerChoices, CounterpartyChoices, PortfolioChoices, TradeStatusChoices, CompositeAttributeDefinition, InstrumentPart, DealPart, ParseSuffixedFloat, ReturnDomainDecorator
from SP_DealPackageHelper import SafeDivision, AddFArrayItemToFDictionary
from CompositeTradeComponents import StructuredTradeInput
# ####################################################################################### #
# # # #
# # NOTE: # #
# # # #
# # Any Deal Package that is using RegisterAlignmentAcrossComponents as well # #
# # as implementing its own IsValid method MUST call super(myDpClass, self).IsValid() # #
# # for the automatic validation of fields registersd to be aligned to be activated. # #
# # # #
# ####################################################################################### #
class ComponentBasedDealPackage(DealPackageDefinition):
def MapSolverTopValueToComponent(self, value, attrName, parameters):
f = self.GetFormatter(attrName)
# map([suffix], [keys])
mappings = [
(['C', 'COUPON', 'CO'],
['coupon']),
(['B', 'BARRIER', 'BA', 'BAR'],
['barrierLevelPct', 'barrierLevel']),
(['S', 'STR', 'STRIKE'],
['strikePricePct', 'strikePrice']),
(['BSEC', 'SECOND', 'SEC'],
['doubleBarrierLevel'])
]
for suffix, keys in mappings:
goalValue = ParseSuffixedFloat(value, suffix=suffix, formatter=f)
if goalValue is None:
continue # Not candidate
for key in keys:
if key in parameters: # Match found
self.solverParameter = '_'.join([parameters[key], key])
return goalValue
return value
def TopValueFields(self):
return {}
def IsValid(self, exceptionAccumulator, aspect):
# Validate that all fields that have been registered to be kept in sync
# are not made "out of sync".
if hasattr(self, '_alignFields'):
failedPairs = acm.FDictionary()
for attributeKey in self._alignFields.Keys().Sort():
for dependentField in self._alignFields[attributeKey]:
if getattr(self, dependentField) != getattr(self, attributeKey):
if not self.HasAlreadyFoundFailedPair(failedPairs, attributeKey, dependentField):
exceptionAccumulator('Deal package fields %s and %s must be equal' %
(attributeKey, dependentField) )
self.AddValidationFailedPair(failedPairs, attributeKey, dependentField)
# ##################################################
# Mapping methods with a general purpose
# ##################################################
@ReturnDomainDecorator('double')
def SumOfTradePrices(self, value = '*Reading*'):
if value == '*Reading*':
price = 0.0
for t in self.Trades():
price += t.Price()
return price
@ReturnDomainDecorator('double')
def SumOfPremiums(self, value = '*Reading*'):
if value == '*Reading*':
premium = 0.0
premiumCurr = None
for t in self.Trades():
if premiumCurr is None:
premiumCurr = t.Currency()
if premiumCurr != t.Currency():
raise DealPackageException("Cannot calculate a sum of premium for premiums defined in different currencies")
premium += t.Premium()
return premium
# ##################################################
# Functions for being able to link attribute updates
# and to validate that they remian equal
# ##################################################
def AlignAcrossComponents(self, attrName, *rest):
for dependentField in self._alignFields[attrName]:
if getattr(self, dependentField) != getattr(self, attrName):
setattr(self, dependentField, getattr(self, attrName))
def RegisterAlignmentAcrossComponents(self, attrNames):
self.RegisterCallbackOnAttributeChanged(self.AlignAcrossComponents, attrNames)
if not hasattr(self, '_alignFields'):
self._alignFields = acm.FDictionary()
for attrName in attrNames:
self._alignFields.AtPut(attrName, attrNames)
def AddValidationFailedPair(self, failedPairs, attribute1, attribute2):
failedPairs = AddFArrayItemToFDictionary(failedPairs, attribute1, attribute2)
failedPairs = AddFArrayItemToFDictionary(failedPairs, attribute2, attribute1)
return failedPairs
def HasAlreadyFoundFailedPair(self, failedPairs, attribute1, attribute2):
return (
( failedPairs.HasKey(attribute1)
and failedPairs[attribute1].IndexOfFirstEqual(attribute2) >= 0
)
or
( failedPairs.HasKey(attribute2)
and failedPairs[attribute2].IndexOfFirstEqual(attribute1) >= 0
)
)
class ProductBase(ComponentBasedDealPackage):
ipName = Object ( objMapping = 'InstrumentPackage.Name',
label = 'Name' )
notional = Object ( objMapping = InstrumentPart("Notional"),
label = '@NotionalLabel',
formatter = '@NotionalFormatter',
domain = 'double',
defaultValue = 1000.0 )
currency = Object ( label = 'Currency',
domain = 'FCurrency',
objMapping = (InstrumentPart("InstrumentPartCurrencies.Currency").
DealPart("DealPartCurrencies.Currency" ) ) )
spotDays = Object ( objMapping = "Instruments.SpotBankingDaysOffset",
label = 'Spot Days',
visible = '@VisibleSpotDays',
validate = '@ValidateSpotDays')
tradeInput = StructuredTradeInput ( quantityMappingName = "TradeQuantitySpecification",
priceLayout = "PriceLayout" )
def AttributeOverrides(self, overrideAccumulator):
attrs = {}
attrs['tradeInput'] = {
'status' : dict(defaultValue = 'Simulated'),
'quantity_value' : dict(defaultValue = 1.0 )
}
for composite in attrs:
for field in attrs[composite]:
overrideAccumulator({'%s_%s' % (composite, field) : attrs[composite][field] })
# ################################################
# Methods that must be implemented by sub classes
# in order for object mappings to work
# ################################################
def InstrumentPartCurrencies(self, *rest):
raise DealPackageException ( "Missing method InstrumentPartCurrencies." )
def DealPartCurrencies(self, *rest):
raise DealPackageException ( "Missing method DealPartCurrencies." )
def Notional(self, *rest):
raise DealPackageException ( "Missing method Notional for notional object mapping." )
# ################################################
# Methods used by product base attribute that
# can be implemented per product but are not
# required
# ################################################
def ValidateSpotDays(self, *rest):
pass
def NotionalLabel(self, *rest):
return 'Notional'
def NotionalFormatter(self, *rest):
return acm.Get('formats/Volume')
# ###############################################
# Common methods that can be used as needed
#
# NOTE: Potentially methods that should be moved to
# a help library
# ###############################################
def UpdatePremiums(self, *rest):
for trade in self.Trades():
trade.UpdatePremium(True)
def AsPortfolio(self, *rest):
return self.DealPackage().AsPortfolio()
# #############################################
# ToDo: Descriptions
# #############################################
def TradeQuantitySpecification(self):
return self._tradeQuantityMapping
def PriceLayout(self):
return ''
def VisibleSpotDays(self, *rest):
return True
| [
"nencho.georogiev@absa.africa"
] | nencho.georogiev@absa.africa |
e23067ccfea11aa4b0dc25497a2590928d06d6cf | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-rocketmq/huaweicloudsdkrocketmq/v2/model/delete_instance_response.py | 6180c6e96bbaa0ca0622168580d5acbda4542657 | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 2,439 | py | # coding: utf-8
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class DeleteInstanceResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
}
attribute_map = {
}
def __init__(self):
"""DeleteInstanceResponse
The model defined in huaweicloud sdk
"""
super(DeleteInstanceResponse, self).__init__()
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DeleteInstanceResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"hwcloudsdk@huawei.com"
] | hwcloudsdk@huawei.com |
f6383dcfc12bffdb2d4511f0276c48052a68612c | bdefc2c85e274114180e18838d7eade7d0fa2479 | /OrcLib/LibDatabase.py | df1fa80a13fe98b70c0560fcd38dcd12563dbf7c | [] | no_license | langqy/OrcTestToolsKit | 93e88b5baec33d7d7d20f6e4d55754b2aa4b995b | 7dae0a4d410b233e0316526b3ff6bc157cbe24e0 | refs/heads/master | 2021-01-12T07:48:54.544507 | 2016-12-20T10:13:26 | 2016-12-20T10:13:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,737 | py | # -*- coding: utf-8 -*-
from datetime import datetime
from OrcLib.LibCommon import OrcCover
from OrcApi import orc_db
def gen_id(p_name):
"""
生成 id
:param p_name:
:return: id
"""
sequence = orc_db.session \
.query(LibSequence) \
.filter(LibSequence.field_name == p_name) \
.first()
sequence.field_seq += 1
orc_db.session.commit()
return sequence.field_seq
class TabRunTime(orc_db.Model):
"""
Table tab_run_time
"""
__tablename__ = 'tab_run_time'
id = orc_db.Column(orc_db.Integer, autoincrement=True, primary_key=True)
module = orc_db.Column(orc_db.String(16))
data_flag = orc_db.Column(orc_db.String(16))
data_index = orc_db.Column(orc_db.Integer)
data_value = orc_db.Column(orc_db.String(128))
def __init__(self, p_def=None):
self.id = p_def["id"] if p_def else None
self.module = p_def["module"] if p_def else None
self.data_flag = p_def["data_flag"] if p_def else None
self.data_index = p_def["data_flag"] if p_def else None
self.data_value = p_def["data_flag"] if p_def else None
def to_json(self):
return dict(
id=self.id,
module=self.module,
data_flag=self.data_flag,
data_index=self.data_index,
data_value=self.data_value
)
class TabBatchDef(orc_db.Model):
"""
Table orc_batch_def
"""
__tablename__ = 'tab_batch_def'
id = orc_db.Column(orc_db.Integer, primary_key=True)
pid = orc_db.Column(orc_db.Integer)
batch_no = orc_db.Column(orc_db.String(16))
batch_type = orc_db.Column(orc_db.String(8))
batch_name = orc_db.Column(orc_db.String(32))
batch_desc = orc_db.Column(orc_db.String(512))
comment = orc_db.Column(orc_db.String(1024))
create_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
modify_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
def __init__(self, p_def=None):
self.id = p_def["id"] if p_def else None
self.pid = p_def["pid"] if p_def else None
self.batch_no = p_def["batch_no"] if p_def else None
self.batch_type = p_def["batch_type"] if p_def else None
self.batch_name = p_def["batch_name"] if p_def else None
self.batch_desc = p_def["batch_desc"] if p_def else None
self.comment = p_def["comment"] if p_def else None
self.create_time = OrcCover.str2time(p_def["create_time"]) if p_def else None
self.modify_time = OrcCover.str2time(p_def["modify_time"]) if p_def else None
def to_json(self):
return dict(
id=str(self.id),
pid=str(self.pid),
batch_no=self.batch_no,
batch_type=self.batch_type,
batch_name=self.batch_name,
batch_desc=self.batch_desc,
comment=self.comment,
create_time=OrcCover.time2str(self.create_time),
modify_time=OrcCover.time2str(self.modify_time)
)
class TabBatchDet(orc_db.Model):
"""
Table tab_batch_det
"""
__tablename__ = 'tab_batch_det'
id = orc_db.Column(orc_db.Integer, primary_key=True)
batch_id = orc_db.Column(orc_db.Integer)
case_id = orc_db.Column(orc_db.Integer)
create_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
def __init__(self, p_def=None):
self.id = p_def["id"] if p_def else None
self.batch_id = p_def["batch_id"] if p_def else None
self.case_id = p_def["case_id"] if p_def else None
self.create_time = OrcCover.str2time(p_def["create_time"]) if p_def else None
def to_json(self):
return dict(
id=str(self.id),
batch_id=str(self.batch_id),
case_id=str(self.case_id),
create_time=OrcCover.time2str(self.create_time)
)
class TabCaseDef(orc_db.Model):
"""
Table tab_case_def
"""
__tablename__ = 'tab_case_def'
id = orc_db.Column(orc_db.Integer, primary_key=True)
pid = orc_db.Column(orc_db.Integer)
case_no = orc_db.Column(orc_db.String(8))
case_path = orc_db.Column(orc_db.String(32))
case_type = orc_db.Column(orc_db.String(8))
case_name = orc_db.Column(orc_db.String(64))
case_desc = orc_db.Column(orc_db.String(512))
comment = orc_db.Column(orc_db.String(1024))
create_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
modify_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
def __init__(self, p_def=None):
self.id = p_def["id"] if p_def else None
self.pid = p_def["pid"] if p_def else None
self.case_no = p_def["case_no"] if p_def else None
self.case_path = p_def["case_path"] if p_def else None
self.case_type = p_def["case_type"] if p_def else None
self.case_name = p_def["case_name"] if p_def else None
self.case_desc = p_def["case_desc"] if p_def else None
self.comment = p_def["comment"] if p_def else None
self.create_time = OrcCover.str2time(p_def["create_time"]) if p_def else None
self.modify_time = OrcCover.str2time(p_def["modify_time"]) if p_def else None
def to_json(self):
return dict(
id=str(self.id),
pid=str(self.pid),
case_no=str(self.case_no),
case_path=self.case_path,
case_type=self.case_type,
case_name=self.case_name,
case_desc=self.case_desc,
comment=self.comment,
create_time=OrcCover.time2str(self.create_time),
modify_time=OrcCover.time2str(self.modify_time)
)
class TabCaseDet(orc_db.Model):
"""
Table tab_case_det
"""
__tablename__ = 'tab_case_det'
id = orc_db.Column(orc_db.Integer, primary_key=True)
case_id = orc_db.Column(orc_db.Integer)
step_id = orc_db.Column(orc_db.Integer)
step_no = orc_db.Column(orc_db.String(8))
create_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
def __init__(self, p_def=None):
self.id = p_def["id"] if p_def else None
self.case_id = p_def["case_id"] if p_def else None
self.step_id = p_def["step_id"] if p_def else None
self.step_no = p_def["step_no"] if p_def else None
self.create_time = OrcCover.str2time(p_def["create_time"]) if p_def else None
def to_json(self):
return dict(
id=str(self.id),
case_id=str(self.case_id),
step_id=str(self.step_id),
step_no=self.step_no,
create_time=OrcCover.time2str(self.create_time)
)
class TabStepDef(orc_db.Model):
"""
Table tab_step_def
"""
__tablename__ = 'tab_step_def'
id = orc_db.Column(orc_db.Integer, primary_key=True)
step_type = orc_db.Column(orc_db.String(8))
step_desc = orc_db.Column(orc_db.String(512))
comment = orc_db.Column(orc_db.String(1024))
create_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
modify_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
def __init__(self, p_def=None):
self.id = p_def["id"] if p_def else None
self.step_type = p_def["step_type"] if p_def else None
self.step_desc = p_def["step_desc"] if p_def else None
self.comment = p_def["comment"] if p_def else None
self.create_time = OrcCover.str2time(p_def["create_time"]) if p_def else None
self.modify_time = OrcCover.str2time(p_def["modify_time"]) if p_def else None
def to_json(self):
return dict(
id=str(self.id),
step_type=self.step_type,
step_desc=self.step_desc,
comment=self.comment,
create_time=OrcCover.time2str(self.create_time),
modify_time=OrcCover.time2str(self.modify_time)
)
class TabStepDet(orc_db.Model):
"""
"""
__tablename__ = 'tab_step_det'
id = orc_db.Column(orc_db.Integer, primary_key=True)
step_id = orc_db.Column(orc_db.Integer)
item_id = orc_db.Column(orc_db.Integer)
item_no = orc_db.Column(orc_db.String(32))
create_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
def __init__(self, p_def=None):
self.id = p_def["id"] if p_def else None
self.step_id = p_def["step_id"] if p_def else None
self.item_id = p_def["item_id"] if p_def else None
self.item_no = p_def["item_no"] if p_def else None
self.create_time = OrcCover.str2time(p_def["create_time"]) if p_def else None
def to_json(self):
return dict(
id=str(self.id),
step_id=str(self.step_id),
item_id=str(self.item_id),
item_no=self.item_no,
create_time=OrcCover.time2str(self.create_time)
)
class TabItem(orc_db.Model):
"""
"""
__tablename__ = 'tab_item'
id = orc_db.Column(orc_db.Integer, primary_key=True)
item_type = orc_db.Column(orc_db.String(8))
item_mode = orc_db.Column(orc_db.String(8))
item_operate = orc_db.Column(orc_db.String(256))
item_desc = orc_db.Column(orc_db.String(256))
comment = orc_db.Column(orc_db.String(512))
create_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
modify_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
def __init__(self, p_def=None):
self.id = p_def["id"] if p_def else None
self.item_type = p_def["item_type"] if p_def else None
self.item_mode = p_def["item_mode"] if p_def else None
self.item_operate = p_def["item_operate"] if p_def else None
self.item_desc = p_def["item_desc"] if p_def else None
self.comment = p_def["comment"] if p_def else None
self.create_time = OrcCover.str2time(p_def["create_time"]) if p_def else None
self.modify_time = OrcCover.str2time(p_def["modify_time"]) if p_def else None
def to_json(self):
return dict(
id=str(self.id),
item_type=self.item_type,
item_mode=self.item_mode,
item_operate=self.item_operate,
item_desc=self.item_desc,
comment=self.comment,
create_time=OrcCover.time2str(self.create_time),
modify_time=OrcCover.time2str(self.modify_time)
)
class TabData(orc_db.Model):
"""
Data table
"""
__tablename__ = 'tab_data'
id = orc_db.Column(orc_db.Integer, primary_key=True)
test_env = orc_db.Column(orc_db.String(16))
src_id = orc_db.Column(orc_db.Integer)
src_type = orc_db.Column(orc_db.String(16))
step_order = orc_db.Column(orc_db.Integer)
data_flag = orc_db.Column(orc_db.String(32))
data_order = orc_db.Column(orc_db.Integer)
data_type = orc_db.Column(orc_db.String(16))
data_mode = orc_db.Column(orc_db.String(16))
data_value = orc_db.Column(orc_db.String(128))
comment = orc_db.Column(orc_db.String(512))
create_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
modify_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
def __init__(self, p_def=None):
self.id = p_def["id"] if p_def else None
self.test_env = p_def["test_env"] if p_def else None
self.src_id = p_def["src_id"] if p_def else None
self.src_type = p_def["src_type"] if p_def else None
self.step_order = p_def["step_order"] if p_def else None
self.data_flag = p_def["data_flag"] if p_def else None
self.data_order = p_def["data_order"] if p_def else None
self.data_type = p_def["data_type"] if p_def else None
self.data_mode = p_def["data_mode"] if p_def else None
self.data_value = p_def["data_value"] if p_def else None
self.comment = p_def["comment"] if p_def else None
self.create_time = OrcCover.str2time(p_def["create_time"]) if p_def else None
self.modify_time = OrcCover.str2time(p_def["modify_time"]) if p_def else None
def to_json(self):
return dict(
id=str(self.id),
test_env=self.test_env,
src_id=str(self.src_id),
src_type=self.src_type,
step_order=self.step_order,
data_flag=self.data_flag,
data_order=str(self.data_order),
data_type=self.data_type,
data_mode=self.data_mode,
data_value=self.data_value,
comment=self.comment,
create_time=OrcCover.time2str(self.create_time),
modify_time=OrcCover.time2str(self.modify_time)
)
class WebPageDef(orc_db.Model):
"""
Table page definition
"""
__tablename__ = 'web_page_def'
id = orc_db.Column(orc_db.Integer, primary_key=True)
page_flag = orc_db.Column(orc_db.String(32))
page_desc = orc_db.Column(orc_db.String(32))
comment = orc_db.Column(orc_db.String(512))
create_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
modify_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
def __init__(self, p_def=None):
self.id = p_def["id"] if p_def else None
self.page_flag = p_def["page_flag"] if p_def else None
self.page_desc = p_def["page_desc"] if p_def else None
self.comment = p_def["comment"] if p_def else None
self.create_time = OrcCover.str2time(p_def["create_time"]) if p_def else None
self.modify_time = OrcCover.str2time(p_def["modify_time"]) if p_def else None
def to_json(self):
return dict(
id=str(self.id),
page_flag=self.page_flag,
page_desc=self.page_desc,
comment=self.comment,
create_time=OrcCover.time2str(self.create_time),
modify_time=OrcCover.time2str(self.modify_time)
)
class WebPageDet(orc_db.Model):
"""
Table page detail
"""
__tablename__ = 'web_page_det'
id = orc_db.Column(orc_db.Integer, primary_key=True)
page_id = orc_db.Column(orc_db.Integer, primary_key=True)
page_env = orc_db.Column(orc_db.String(32))
page_url = orc_db.Column(orc_db.String(32))
comment = orc_db.Column(orc_db.String(512))
create_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
modify_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
def __init__(self, p_def=None):
self.id = p_def["id"] if p_def else None
self.page_id = p_def["page_id"] if p_def else None
self.page_env = p_def["page_env"] if p_def else None
self.page_url = p_def["page_url"] if p_def else None
self.comment = p_def["comment"] if p_def else None
self.create_time = OrcCover.str2time(p_def["create_time"]) if p_def else None
self.modify_time = OrcCover.str2time(p_def["modify_time"]) if p_def else None
def to_json(self):
return dict(
id=str(self.id),
page_id=str(self.page_id),
page_env=self.page_env,
page_url=self.page_url,
comment=self.comment,
create_time=OrcCover.time2str(self.create_time),
modify_time=OrcCover.time2str(self.modify_time)
)
class WebWidgetDef(orc_db.Model):
"""
Table widget definition
"""
__tablename__ = 'web_widget_def'
id = orc_db.Column(orc_db.Integer, primary_key=True)
pid = orc_db.Column(orc_db.Integer)
widget_flag = orc_db.Column(orc_db.String(8))
widget_path = orc_db.Column(orc_db.String(32))
widget_type = orc_db.Column(orc_db.String(16))
widget_desc = orc_db.Column(orc_db.String(255))
comment = orc_db.Column(orc_db.String(512))
create_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
modify_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
def __init__(self, p_def=None):
self.id = p_def["id"] if p_def else None
self.pid = p_def["pid"] if p_def else None
self.widget_flag = p_def["widget_flag"] if p_def else None
self.widget_path = p_def["widget_path"] if p_def else None
self.widget_type = p_def["widget_type"] if p_def else None
self.widget_desc = p_def["widget_desc"] if p_def else None
self.comment = p_def["comment"] if p_def else None
self.create_time = OrcCover.str2time(p_def["create_time"]) if p_def else None
self.modify_time = OrcCover.str2time(p_def["modify_time"]) if p_def else None
def to_json(self):
return dict(
id=str(self.id),
pid=str(self.pid),
widget_flag=self.widget_flag,
widget_path=self.widget_path,
widget_type=self.widget_type,
widget_desc=self.widget_desc,
comment=self.comment,
create_time=OrcCover.time2str(self.create_time),
modify_time=OrcCover.time2str(self.modify_time)
)
class WebWidgetDet(orc_db.Model):
"""
Table widget definition
"""
__tablename__ = 'web_widget_det'
id = orc_db.Column(orc_db.Integer, primary_key=True)
widget_id = orc_db.Column(orc_db.Integer)
widget_order = orc_db.Column(orc_db.String(16))
widget_attr_type = orc_db.Column(orc_db.String(16))
widget_attr_value = orc_db.Column(orc_db.String(64))
widget_desc = orc_db.Column(orc_db.String(255))
comment = orc_db.Column(orc_db.String(512))
create_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
modify_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
def __init__(self, p_def=None):
self.id = p_def["id"] if p_def else None
self.widget_id = p_def["widget_id"] if p_def else None
self.widget_order = p_def["widget_order"] if p_def else None
self.widget_attr_type = p_def["widget_attr_type"] if p_def else None
self.widget_attr_value = p_def["widget_attr_value"] if p_def else None
self.widget_desc = p_def["widget_desc"] if p_def else None
self.comment = p_def["comment"] if p_def else None
self.create_time = OrcCover.str2time(p_def["create_time"]) if p_def else None
self.modify_time = OrcCover.str2time(p_def["modify_time"]) if p_def else None
def to_json(self):
return dict(
id=str(self.id),
widget_id=self.widget_id,
widget_order=self.widget_order,
widget_attr_type=self.widget_attr_type,
widget_attr_value=self.widget_attr_value,
widget_desc=self.widget_desc,
comment=self.comment,
create_time=OrcCover.time2str(self.create_time),
modify_time=OrcCover.time2str(self.modify_time)
)
class WebWindowDef(orc_db.Model):
"""
Table widget definition
"""
__tablename__ = 'web_window_def'
id = orc_db.Column(orc_db.Integer, primary_key=True)
window_mark = orc_db.Column(orc_db.String(16))
window_desc = orc_db.Column(orc_db.String(255))
comment = orc_db.Column(orc_db.String(512))
create_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
modify_time = orc_db.Column(orc_db.DateTime, default=datetime.now())
def __init__(self, p_def=None):
self.id = p_def["id"] if p_def else None
self.window_mark = p_def["window_mark"] if p_def else None
self.window_desc = p_def["window_desc"] if p_def else None
self.comment = p_def["comment"] if p_def else None
self.create_time = OrcCover.str2time(p_def["create_time"]) if p_def else None
self.modify_time = OrcCover.str2time(p_def["modify_time"]) if p_def else None
def to_json(self):
return dict(
id=str(self.id),
window_mark=self.window_mark,
window_desc=self.window_desc,
comment=self.comment,
create_time=OrcCover.time2str(self.create_time),
modify_time=OrcCover.time2str(self.modify_time)
)
class LibDictionary(orc_db.Model):
"""
Table dictionary
"""
__tablename__ = 'lib_dictionary'
id = orc_db.Column(orc_db.Integer, primary_key=True)
dict_flag = orc_db.Column(orc_db.String(32))
dict_order = orc_db.Column(orc_db.String(32))
dict_value = orc_db.Column(orc_db.String(16))
dict_text = orc_db.Column(orc_db.String(16))
dict_desc = orc_db.Column(orc_db.String(255))
def __init__(self, p_def=None):
"""
:param p_def: dict
:return: None
"""
self.id = int(p_def["id"]) if p_def else None
self.dict_flag = p_def["dict_flag"] if p_def else None
self.dict_order = p_def["dict_order"] if p_def else None
self.dict_value = p_def["dict_value"] if p_def else None
self.dict_text = p_def["dict_text"] if p_def else None
self.dict_desc = p_def["dict_desc"] if p_def else None
def to_json(self):
return dict(
id=str(self.id),
dict_flag=self.dict_flag,
dict_order=self.dict_order,
dict_value=self.dict_value,
dict_text=self.dict_text,
dict_desc=self.dict_desc
)
class LibWidgetType(orc_db.Model):
"""
Table dictionary
"""
__tablename__ = 'lib_widget_type'
id = orc_db.Column(orc_db.Integer, primary_key=True)
type_order = orc_db.Column(orc_db.Integer)
type_mode = orc_db.Column(orc_db.String(16)) # 固有或自定义
type_name = orc_db.Column(orc_db.String(16), unique=True)
type_text = orc_db.Column(orc_db.String(16))
type_desc = orc_db.Column(orc_db.String(255))
def __init__(self, p_def):
self.id = int(p_def["id"]) if p_def else None
self.type_order = p_def["type_order"] if p_def else None
self.type_mode = p_def["type_mode"] if p_def else None
self.type_name = p_def["type_name"] if p_def else None
self.type_text = p_def["type_text"] if p_def else None
self.type_desc = p_def["type_desc"] if p_def else None
def to_json(self):
return dict(
id=str(self.id),
type_order=self.type_order,
type_mode=self.type_mode,
type_name=self.type_name,
type_text=self.type_text,
type_desc=self.type_desc
)
class LibWidgetOperation(orc_db.Model):
"""
Table dictionary
"""
__tablename__ = 'lib_widget_operation'
id = orc_db.Column(orc_db.Integer, primary_key=True)
type_name = orc_db.Column(orc_db.String(16))
ope_order = orc_db.Column(orc_db.Integer)
ope_name = orc_db.Column(orc_db.String(16))
ope_text = orc_db.Column(orc_db.String(16))
ope_desc = orc_db.Column(orc_db.String(255))
def __init__(self, p_def):
self.id = int(p_def["id"]) if p_def else None
self.type_name = p_def["type_name"] if p_def else None
self.ope_order = int(p_def["ope_order"]) if p_def else None
self.ope_name = p_def["ope_name"] if p_def else None
self.ope_text = p_def["ope_text"] if p_def else None
self.ope_desc = p_def["ope_desc"] if p_def else None
def to_json(self):
return dict(
id=str(self.id),
type_name=self.type_name,
ope_order=self.ope_order,
ope_name=self.ope_name,
ope_text=self.ope_text,
ope_desc=self.ope_desc
)
class LibSequence(orc_db.Model):
"""
Table sequence
"""
__tablename__ = 'lib_sequence'
id = orc_db.Column(orc_db.Integer, primary_key=True)
field_name = orc_db.Column(orc_db.String(32))
field_seq = orc_db.Column(orc_db.Integer)
def __init__(self, p_def=None):
self.id = int(p_def["id"]) if p_def else None
self.field_name = p_def["field_name"] if p_def else None
self.field_seq = p_def["field_seq"] if p_def else None
| [
"orange21cn@126.com"
] | orange21cn@126.com |
71589f5d140046f2491dc757b051d502ae0432d8 | e4ec5b6cf3cfe2568ef0b5654c019e398b4ecc67 | /azure-cli/2.0.18/libexec/lib/python3.6/site-packages/azure/mgmt/network/v2016_09_01/operations/network_watchers_operations.py | e2b172b2409f0f44f218c4f4f638d39ceb50cde1 | [] | no_license | EnjoyLifeFund/macHighSierra-cellars | 59051e496ed0e68d14e0d5d91367a2c92c95e1fb | 49a477d42f081e52f4c5bdd39535156a2df52d09 | refs/heads/master | 2022-12-25T19:28:29.992466 | 2017-10-10T13:00:08 | 2017-10-10T13:00:08 | 96,081,471 | 3 | 1 | null | 2022-12-17T02:26:21 | 2017-07-03T07:17:34 | null | UTF-8 | Python | false | false | 53,720 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrestazure.azure_operation import AzureOperationPoller
from .. import models
class NetworkWatchersOperations(object):
"""NetworkWatchersOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: Client API version. Constant value: "2016-09-01".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2016-09-01"
self.config = config
def create_or_update(
self, resource_group_name, network_watcher_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Creates or updates a network watcher in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the network watcher
resource.
:type parameters: :class:`NetworkWatcher
<azure.mgmt.network.v2016_09_01.models.NetworkWatcher>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: :class:`NetworkWatcher
<azure.mgmt.network.v2016_09_01.models.NetworkWatcher>` or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
raw=true
:rtype: :class:`NetworkWatcher
<azure.mgmt.network.v2016_09_01.models.NetworkWatcher>` or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'NetworkWatcher')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('NetworkWatcher', response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkWatcher', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get(
self, resource_group_name, network_watcher_name, custom_headers=None, raw=False, **operation_config):
"""Gets the specified network watcher by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: :class:`NetworkWatcher
<azure.mgmt.network.v2016_09_01.models.NetworkWatcher>` or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
raw=true
:rtype: :class:`NetworkWatcher
<azure.mgmt.network.v2016_09_01.models.NetworkWatcher>` or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('NetworkWatcher', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def delete(
self, resource_group_name, network_watcher_name, custom_headers=None, raw=False, **operation_config):
"""Deletes the specified network watcher resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns None or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
raw=true
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
or :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.delete(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def list(
self, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Gets all network watchers by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of :class:`NetworkWatcher
<azure.mgmt.network.v2016_09_01.models.NetworkWatcher>`
:rtype: :class:`NetworkWatcherPaged
<azure.mgmt.network.v2016_09_01.models.NetworkWatcherPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.NetworkWatcherPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.NetworkWatcherPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def list_all(
self, custom_headers=None, raw=False, **operation_config):
"""Gets all network watchers by subscription.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of :class:`NetworkWatcher
<azure.mgmt.network.v2016_09_01.models.NetworkWatcher>`
:rtype: :class:`NetworkWatcherPaged
<azure.mgmt.network.v2016_09_01.models.NetworkWatcherPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkWatchers'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.NetworkWatcherPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.NetworkWatcherPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def get_topology(
self, resource_group_name, network_watcher_name, target_resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Gets the current network topology by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param target_resource_group_name: The name of the target resource
group to perform topology on.
:type target_resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: :class:`Topology
<azure.mgmt.network.v2016_09_01.models.Topology>` or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
raw=true
:rtype: :class:`Topology
<azure.mgmt.network.v2016_09_01.models.Topology>` or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
parameters = models.TopologyParameters(target_resource_group_name=target_resource_group_name)
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/topology'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'TopologyParameters')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Topology', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def verify_ip_flow(
self, resource_group_name, network_watcher_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Verify IP flow from the specified VM to a location given the currently
configured NSG rules.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the IP flow to be verified.
:type parameters: :class:`VerificationIPFlowParameters
<azure.mgmt.network.v2016_09_01.models.VerificationIPFlowParameters>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`VerificationIPFlowResult
<azure.mgmt.network.v2016_09_01.models.VerificationIPFlowResult>` or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
raw=true
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
or :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/ipFlowVerify'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'VerificationIPFlowParameters')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VerificationIPFlowResult', response)
if response.status_code == 202:
deserialized = self._deserialize('VerificationIPFlowResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def get_next_hop(
self, resource_group_name, network_watcher_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Gets the next hop from the specified VM.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the source and destination
endpoint.
:type parameters: :class:`NextHopParameters
<azure.mgmt.network.v2016_09_01.models.NextHopParameters>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`NextHopResult
<azure.mgmt.network.v2016_09_01.models.NextHopResult>` or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
raw=true
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
or :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/nextHop'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'NextHopParameters')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('NextHopResult', response)
if response.status_code == 202:
deserialized = self._deserialize('NextHopResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def get_vm_security_rules(
self, resource_group_name, network_watcher_name, target_resource_id, custom_headers=None, raw=False, **operation_config):
"""Gets the configured and effective security group rules on the specified
VM.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param target_resource_id: ID of the target VM.
:type target_resource_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`SecurityGroupViewResult
<azure.mgmt.network.v2016_09_01.models.SecurityGroupViewResult>` or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
raw=true
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
or :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
parameters = models.SecurityGroupViewParameters(target_resource_id=target_resource_id)
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/securityGroupView'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'SecurityGroupViewParameters')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SecurityGroupViewResult', response)
if response.status_code == 202:
deserialized = self._deserialize('SecurityGroupViewResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def get_troubleshooting(
self, resource_group_name, network_watcher_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Initiate troubleshooting on a specified resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that define the resource to
troubleshoot.
:type parameters: :class:`TroubleshootingParameters
<azure.mgmt.network.v2016_09_01.models.TroubleshootingParameters>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`TroubleshootingResult
<azure.mgmt.network.v2016_09_01.models.TroubleshootingResult>` or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
raw=true
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
or :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/troubleshoot'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'TroubleshootingParameters')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('TroubleshootingResult', response)
if response.status_code == 202:
deserialized = self._deserialize('TroubleshootingResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def get_troubleshooting_result(
self, resource_group_name, network_watcher_name, target_resource_id, custom_headers=None, raw=False, **operation_config):
"""Get the last completed troubleshooting result on a specified resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param target_resource_id: The target resource ID to query the
troubleshooting result.
:type target_resource_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`TroubleshootingResult
<azure.mgmt.network.v2016_09_01.models.TroubleshootingResult>` or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
raw=true
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
or :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
parameters = models.QueryTroubleshootingParameters(target_resource_id=target_resource_id)
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/queryTroubleshootResult'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'QueryTroubleshootingParameters')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('TroubleshootingResult', response)
if response.status_code == 202:
deserialized = self._deserialize('TroubleshootingResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def set_flow_log_configuration(
self, resource_group_name, network_watcher_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Configures flow log on a specified resource.
:param resource_group_name: The name of the network watcher resource
group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that define the configuration of flow
log.
:type parameters: :class:`FlowLogInformation
<azure.mgmt.network.v2016_09_01.models.FlowLogInformation>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`FlowLogInformation
<azure.mgmt.network.v2016_09_01.models.FlowLogInformation>` or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
raw=true
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
or :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/configureFlowLog'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'FlowLogInformation')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('FlowLogInformation', response)
if response.status_code == 202:
deserialized = self._deserialize('FlowLogInformation', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def get_flow_log_status(
self, resource_group_name, network_watcher_name, target_resource_id, custom_headers=None, raw=False, **operation_config):
"""Queries status of flow log on a specified resource.
:param resource_group_name: The name of the network watcher resource
group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param target_resource_id: The target resource where getting the flow
logging status.
:type target_resource_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`FlowLogInformation
<azure.mgmt.network.v2016_09_01.models.FlowLogInformation>` or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
raw=true
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
or :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
parameters = models.FlowLogStatusParameters(target_resource_id=target_resource_id)
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/queryFlowLogStatus'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'FlowLogStatusParameters')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('FlowLogInformation', response)
if response.status_code == 202:
deserialized = self._deserialize('FlowLogInformation', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
| [
"Raliclo@gmail.com"
] | Raliclo@gmail.com |
9e4713c9b200c4824571edd9a6992ce2fe3ae3a1 | f707303e4dfe383cf82c23a6bb42ccfdc4cfdb67 | /pandas-ml-quant-rl/pandas_ml_quant_rl/renderer/wrappers.py | 3fb96c84205231b00a6dece0e609e87c5eba87bc | [
"MIT"
] | permissive | jcoffi/pandas-ml-quant | 1830ec256f8c09c04f1aa77e2eecfba07d34fe68 | 650a8e8f77bc4d71136518d1c7ee65c194a99cf0 | refs/heads/master | 2023-08-31T06:45:38.060737 | 2021-09-09T04:44:35 | 2021-09-09T04:44:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,233 | py | import time
from queue import Empty
from multiprocessing import Process, Event, Queue
from .abstract_renderer import Renderer
def render_frame(data_q: Queue, finish_e: Event, renderer_provider):
renderer = renderer_provider()
while True:
try:
old_state, action, new_state, reward, done = data_q.get(timeout=0.1)
renderer.plot(old_state, action, new_state, reward, done)
renderer.render()
except Empty:
renderer.render()
if finish_e.wait(0.1):
break
print("shut down online rendering !!!")
class OnlineRenderer(Renderer):
def __init__(self, renderer_provider):
super().__init__()
self.data_q = Queue()
self.finish_e = Event()
self.worker = Process(target=render_frame, args=(self.data_q, self.finish_e, renderer_provider))
self.startup = True
def plot(self, old_state, action, new_state, reward, done):
self.data_q.put_nowait((old_state, action, new_state, reward, done))
if self.startup:
time.sleep(1)
self.startup = False
def stop(self):
self.finish_e.set()
def render(self, mode=None, min_time_step=1.0):
if not self.worker.is_alive():
self.worker.start()
class MovieRenderer(Renderer):
def __init__(self, renderer_provider):
"""
import numpy as np
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import matplotlib.animation as manimation
FFMpegWriter = manimation.writers['ffmpeg']
metadata = dict(title='Movie Test', artist='Matplotlib',
comment='Movie support!')
writer = FFMpegWriter(fps=15, metadata=metadata)
fig = plt.figure()
l, = plt.plot([], [], 'k-o')
plt.xlim(-5, 5)
plt.ylim(-5, 5)
x0, y0 = 0, 0
with writer.saving(fig, "writer_test.mp4", 100):
for i in range(100):
x0 += 0.1 * np.random.randn()
y0 += 0.1 * np.random.randn()
l.set_data(x0, y0)
writer.grab_frame()
:param renderer_provider:
"""
pass
| [
"kic@kic.kic"
] | kic@kic.kic |
1c69983d542daa4862fc550a96058d269e269317 | d3b77550a40b860970450e702b6bcd28d5f9b3e4 | /LeetCode/top_interview_questions/SingleNumber.py | 946e7f04b71da2dfe1ffc065803ebed6b32acc7d | [] | no_license | CateGitau/Python_programming | 47bc9277544814ad853b44a88f129713f1a40697 | 6ae42b3190134c4588ad785d62e08b0763cf6b3a | refs/heads/master | 2023-07-08T03:08:46.236063 | 2021-08-12T09:38:03 | 2021-08-12T09:38:03 | 228,712,021 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 485 | py | '''
Given a non-empty array of integers nums,
every element appears twice except for one.
Find that single one.
Follow up: Could you implement a solution with a
linear runtime complexity and without using extra memory?
'''
nums = [2,2,1]
def singleNumber(num):
countDict = {}
for i in nums:
if i not in countDict:
countDict[i] = 1
else:
countDict[i] +=1
return min(countDict, key = countDict.get)
print(singleNumber(nums)) | [
"catherinegitau94@gmail.com"
] | catherinegitau94@gmail.com |
6fa21b8ade431f7c3d86510e7d9f2ccc1c8b4271 | efbe970cb374d4416c2c500a495994397ea18dd5 | /utils/db.py | 7a8018cd56adb2b0f4ac8c2920dbbc8cbbb04e43 | [
"MIT"
] | permissive | void-being/bepis-bot | f7d9fbc7663bb8a28c70e312fa4fb20c53c406c7 | 491b8de94b94384df6b26fa6a1325ee578020b7e | refs/heads/master | 2020-07-11T17:28:10.080879 | 2018-11-15T23:44:06 | 2018-11-15T23:44:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,845 | py | from datetime import datetime, timedelta
from logging import getLogger
from hashlib import md5
from os import environ
from uuid import uuid4
from threading import Thread
from queue import Queue
from pymongo import mongo_client
class EventHandler(Thread):
def __init__(self):
self.q = Queue()
super().__init__()
self.start()
def do(self, command, *args, **kwargs):
cb = Queue()
self.q.put((cb, command, args, kwargs))
return cb.get()
def run(self):
while True:
cb, cmd, args, kwargs = self.q.get()
cb.put(cmd(*args, **kwargs))
handler = EventHandler()
class BepisUser:
def __init__(self, name: str, master, contents: dict):
self.master = master
self.logger = getLogger(name + "-" + str(contents['user_id']))
self.user_id = contents['user_id']
self.shibes = contents['shibes']
self._bepis = contents['bepis']
self._last_daily = contents['last_daily']
if "powerups" not in contents:
handler.do(self.master.update_one, {"user_id": self.user_id}, {"$set": {"powerups": []}})
self.powerups = []
else:
self.powerups = contents['powerups']
@property
def bepis(self):
return self._bepis
@bepis.setter
def bepis(self, value):
handler.do(self.master.update_one, {"user_id": self.user_id}, {"$set": {"bepis": value}})
self._bepis = value
self.logger.debug("Updated bepis to: " + str(value))
@property
def last_daily(self):
return self._last_daily
@last_daily.setter
def last_daily(self, value):
handler.do(self.master.update_one, {"user_id": self.user_id}, {"$set": {"last_daily": value}})
self._last_daily = value
self.logger.debug("Updated last_daily to now")
def add_shibe(self, shibe_name: str, amount: int=None):
for i, shibe in enumerate(self.shibes):
if shibe[0] == shibe_name:
shibe_amount = (shibe[1] + 1) if amount is None else amount
self.shibes[i] = shibe_name, shibe_amount
break
else:
self.shibes.append((shibe_name, (1 if amount is None else amount)))
handler.do(self.master.update_one, {"user_id": self.user_id}, {"$set": {"shibes": self.shibes}})
self.logger.debug("Added shibe: " + shibe_name)
def remove_shibe(self, shibe_index: int):
shibe = self.shibes[shibe_index]
new_count = shibe[1] - 1
if not new_count:
self.shibes.pop(shibe_index)
else:
self.shibes[shibe_index] = (shibe[0], new_count)
handler.do(self.master.update_one, {"user_id": self.user_id}, {"$set": {"shibes": self.shibes}})
self.logger.debug("Removed shibe: " + shibe[0])
def add_powerup(self, *data):
powerups = self.powerups.copy()
powerups.append(data)
handler.do(self.master.update_one, {"user_id": self.user_id}, {"$set": {"powerups": powerups}})
self.logger.debug("Added powerup: " + powerups[0])
def remove_powerup(self, name: str):
for powerup in self.powerups:
if powerup[0] == name and powerup[1] is not None:
break
self.powerups.remove(powerup)
handler.do(self.master.update_one, {"user_id": self.user_id}, {"$set": {"powerups": powerup}})
self.logger.debug("Removed powerup: " + powerup[0])
class Database(EventHandler):
def __init__(self, name: str):
self.logger = getLogger(name + "-database")
self.client = mongo_client.MongoClient(environ["MONGO_URI"])
self.profiles = self.client['bepis_bot']['profiles']
self.profiles.create_index("user_id", unique=True)
super().__init__()
def create_user(self, user):
payload = {
"user_id": user.id,
"bepis": 0,
"shibes": [],
"last_daily": datetime.now() - timedelta(days=1),
"invite_url": None
}
handler.do(self.profiles.insert_one, payload)
self.logger.debug("Created User: " + str(user.id))
return BepisUser(self.logger.name, self.profiles, payload)
def find_user(self, user_id: int):
prof = handler.do(self.profiles.find_one, {"user_id": user_id})
if prof:
self.logger.debug("Found user: " + str(user_id))
return BepisUser(self.logger.name, self.profiles, prof)
self.logger.debug("Could not find user: " + str(user_id))
class InviteDatabase(EventHandler):
def __init__(self):
self.logger = getLogger("InviteDatabase")
self.client = mongo_client.MongoClient(environ["MONGO_URI"])
self.invites = self.client['bepis_bot']['invites']
self.profiles = self.client['bepis_bot']['profiles']
super().__init__()
def already_joined(self, member):
user = handler.do(self.profiles.find_one, {"user_id": member.user.id})
if user is None:
self.logger.debug("Checking join on {0} (hasn't joined)".format(member.user.id))
return False
else:
self.logger.debug("Checking join on {0} (has joined)".format(member.user.id))
return True
def register_invite(self, invite_code: str, user_id: int):
handler.do(self.invites.insert_one, {
"invite_code": invite_code,
"user_id": user_id
})
self.logger.debug("Created invite reg for {0}, invite: {1}".format(user_id, invite_code))
def __iter__(self):
for invite in self.invites.find({}):
yield invite
def remove_invite(self, invite_code: str):
handler.do(self.invites.delete_one, {"invite_code": invite_code})
self.logger.debug("Removed invite: {0}".format(invite_code))
class CodeDatabase:
def __init__(self):
self.logger = getLogger("CodeDatabase")
self.client = mongo_client.MongoClient(environ["MONGO_URI"])
self.codes = self.client['bepis_bot']['codes']
self.codes.create_index("hash", unique=True)
def create_code(self, value: str):
code = str(uuid4()).upper()
hashed = md5(code.encode()).hexdigest()
handler.do(self.codes.insert_one, {"hash": hashed,
"value": value})
return code
def activate_code(self, code: str):
hashed = md5(code.encode()).hexdigest()
result = self.codes.find_one({"hash": hashed})
if result:
self.codes.delete_one({"hash": hashed})
return result['value']
class LotteryDatabase:
def __init__(self):
self.logger = getLogger("LotteryDatabase")
self.client = mongo_client.MongoClient(environ["MONGO_URI"])
self.lottery = self.client['bepis_bot']['lottery']
def start_lottery(self, value, length=(12 * 60 * 60), price=10):
handler.do(self.lottery.delete_many, {})
handler.do(self.lottery.insert_one, {
"type": "LOTTERY",
"start_time": datetime.now(),
"length": length,
"price": price,
"value": value
})
def add_tickets(self, id: str, amount: int):
current_amount = handler.do(self.lottery.find_one, {"user_id": id})
if current_amount:
total = current_amount["amount"] + amount
handler.do(self.lottery.update_one, {"user_id": id}, {"$set": {"amount": total}})
else:
handler.do(self.lottery.insert_one, {
"type": "USER",
"user_id": id,
"amount": amount
})
def get_event(self):
return handler.do(self.lottery.find_one, {"type": "LOTTERY"})
def get_users(self):
return handler.do(self.lottery.find, {"type": "USER"})
| [
"zwork101@gmail.com"
] | zwork101@gmail.com |
386fd37b61525c2096f4a65d5ec83bd686be6528 | d68ca034018d66f73024223d4b2266b3c3c901d7 | /prev/myworks/lennys/chainxy/spiders/lennys.py | 16da51425667364402f6e451a3adb30645e65cd6 | [] | no_license | GoodyIT/scrapy | caff30d26660f778008ad50532e364ab36aba4c2 | 5ae80cf83dc62c4e1bd2bfa11049ca39a3ca3488 | refs/heads/master | 2021-01-20T10:10:18.365194 | 2017-07-22T13:09:07 | 2017-07-22T13:09:07 | 90,330,210 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,527 | py | import scrapy
import json
import csv
from scrapy.spiders import Spider
from scrapy.http import FormRequest
from scrapy.http import Request
from scrapy.selector import HtmlXPathSelector
from chainxy.items import ChainItem
import pdb
class Lennys(scrapy.Spider):
name = "lennys"
domain = "http://www.lennys.com/"
store_id = []
# calculate number of pages
def start_requests(self):
yield scrapy.Request(url='https://www.lennys.com/locations/', headers={'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.81 Safari/537.36', 'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'Cookie':'CFID=59662906; CFTOKEN=31653469; _gat=1; _ga=GA1.2.1230111863.1493822037; _gid=GA1.2.1456468815.1493823274'}, callback=self.parse_state)
def parse_state(self, response):
pdb.set_trace()
state_list=response.xpath('area');
for state in state_list:
if state.xpath('.//@href'):
url = "http://www.lennys.com" + state.xpath('.//@href/text()').extract_first()
request = scrapy.Request(url=url, callback=self.parse_store_contents)
yield request
# pare store detail page
def parse_store_contents(self, response):
pdb.set_trace()
store = response.xpath("//div[contains(@class, 'contact_information BasicInfo-BS')]")
item = ChainItem()
item['store_name'] = ''
item['store_number'] = response.meta["store_number"]
address = store.xpath("//p[contains(@class, 'Address')]/text()").extract()
item['address'] = response.meta["address"]
item['address2'] = ''
item['phone_number'] = response.meta["phone_number"]
item['city'] = response.meta["city"]
item['state'] = response.meta["state"]
item['country'] = 'United States'
item['city'] = response.meta["city"]
item['latitude'] = response.meta["lat"]
item['longitude'] = response.meta["lng"]
item['zip_code'] = location.raw["address"]["postcode"]
item['store_hours'] = self.validate(store.xpath(".//dd/text()"))
#item['store_type'] = info_json["@type"]
item['other_fields'] = ""
item['coming_soon'] = "0"
yield item
def validate(self, xpath_obj):
try:
return xpath_obj.extract_first().strip()
except:
return ""
| [
"johnsondavid489@yahoo.com"
] | johnsondavid489@yahoo.com |
52d9885475a54a1a940aad89d53e0ad69b4ede67 | 3dbc74df3a97ce8254935a08608e8a7966a1bba6 | /math/0x00-linear_algebra/12-bracin_the_elements.py | eb3480dcce06fe9737c77707f4fb45cfa3363dd2 | [] | no_license | s0m35h1t/holbertonschool-machine_learning | 9c0475da64eea87565dd90d70eeb23016ef17600 | 2eb7965900fd018f4092d2fb1e2055d35ba4899e | refs/heads/master | 2023-02-28T21:57:50.558030 | 2021-02-07T20:44:01 | 2021-02-07T20:44:01 | 279,866,501 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 348 | py | #!/usr/bin/env python3
"""Array operations"""
def np_elementwise(mat1, mat2):
"""+. -. *, / operations between matrix.
Args:
mat1 (numpy.ndarray): M1 matrix.
mat2 (numpy.ndarray): M2 matrix.
Returns:
numpy.ndarray: M1+M2 M1-M2, M1*M2 M1/M2
"""
return mat1 + mat2, mat1 - mat2, mat1 * mat2, mat1 / mat2
| [
"adib.grouz@gmail.com"
] | adib.grouz@gmail.com |
a107f778c6b4a2e7c1e2a0ecaaa9d282162faee0 | c0dab4c3f949b09044b8731d19c1c3cb82abc9ae | /airmozilla/new/views.py | 5bd534963b12c7f1fe79b85af3368f528c171db9 | [] | no_license | mudithkr/airmozilla | 274d64464ca24ed9c0f8d805fdf9233cdf2fed22 | 5e78ebd28c678dabf54d471aab2b31fd742d4205 | refs/heads/master | 2020-04-05T23:12:44.840465 | 2016-02-04T19:07:23 | 2016-02-04T19:07:23 | 45,274,575 | 1 | 0 | null | 2015-10-30T20:19:54 | 2015-10-30T20:19:54 | null | UTF-8 | Python | false | false | 31,381 | py | # -*- coding: utf-8 -*-
import json
import os
from cStringIO import StringIO
from xml.parsers.expat import ExpatError
import requests
import xmltodict
from PIL import Image
from slugify import slugify
from django import http
from django.shortcuts import render, get_object_or_404, redirect
from django.db import transaction
from django.conf import settings
from django.utils import timezone
from django.db.models import Count, Q
from django.contrib.auth.decorators import login_required
from django.utils.functional import wraps
from django.template.base import TemplateDoesNotExist
from django.views.decorators.cache import never_cache
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_exempt
from django.core.cache import cache
from django.contrib.auth.models import User
from django.core.files.base import ContentFile
from django.core.urlresolvers import reverse
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
from jsonview.decorators import json_view
from sorl.thumbnail import get_thumbnail
from airmozilla.manage import vidly
from airmozilla.base.utils import get_base_url, prepare_vidly_video_url
from airmozilla.main.models import (
Event,
VidlySubmission,
Template,
Picture,
EventOldSlug,
Channel,
Approval,
get_profile_safely,
Tag,
)
from airmozilla.comments.models import Discussion
from airmozilla.uploads.models import Upload
from airmozilla.manage import videoinfo
from airmozilla.base.helpers import show_duration
from airmozilla.base.utils import simplify_form_errors
from airmozilla.manage import sending
from airmozilla.base import youtube
from . import forms
def xhr_login_required(view_func):
"""similar to django.contrib.auth.decorators.login_required
except instead of redirecting it returns a 403 message if not
authenticated."""
@wraps(view_func)
def inner(request, *args, **kwargs):
if not request.user.is_authenticated():
return http.HttpResponse(
json.dumps({'error': "You must be logged in"}),
content_type='application/json',
status=403
)
return view_func(request, *args, **kwargs)
return inner
def must_be_your_event(f):
@wraps(f)
def inner(request, id, **kwargs):
assert request.user.is_authenticated()
event = get_object_or_404(Event, pk=id)
if event.creator != request.user:
return http.HttpResponseForbidden(
"Not your event to meddle with"
)
return f(request, event, **kwargs)
return inner
@login_required
def home(request):
context = {}
request.show_sidebar = False
return render(request, 'new/home.html', context)
@xhr_login_required
def partial_template(request, template_name):
context = {}
if template_name == 'details.html':
context['form'] = forms.DetailsForm()
template_path = os.path.join('new/partials', template_name)
try:
return render(request, template_path, context)
except TemplateDoesNotExist:
raise http.Http404(template_name)
@json_view
@xhr_login_required
@require_POST
@transaction.atomic
def save_upload(request):
data = json.loads(request.body)
form = forms.SaveForm(data)
if not form.is_valid():
return http.HttpResponseBadRequest(form.errors)
url = form.cleaned_data['url']
file_name = form.cleaned_data['file_name'] or os.path.basename(url)
mime_type = form.cleaned_data['mime_type']
size = form.cleaned_data['size']
upload_time = form.cleaned_data['upload_time']
duration = data.get('duration')
new_upload = Upload.objects.create(
user=request.user,
url=url,
size=size,
file_name=file_name,
mime_type=mime_type,
upload_time=upload_time,
)
# now we can create the event to start with
event = Event.objects.create(
upload=new_upload,
status=Event.STATUS_INITIATED,
start_time=timezone.now(),
privacy=Event.PRIVACY_PUBLIC,
creator=request.user,
duration=duration,
)
new_upload.event = event
new_upload.save()
return {'id': event.id}
@never_cache
@xhr_login_required
@transaction.atomic
@must_be_your_event
@json_view
def event_edit(request, event):
if request.method == 'POST':
if event.status != Event.STATUS_INITIATED:
return http.HttpResponseBadRequest(
"You can't edit events that are NOT in the state of initiated."
)
title_before = event.title
form = forms.DetailsForm(json.loads(request.body), instance=event)
if form.is_valid():
form.save()
if event.title != title_before:
# Un-setting it will automatically pick a good slug.
# But first we need to forget any EventOldSlug
EventOldSlug.objects.filter(slug=event.slug).delete()
event.slug = None
event.save()
else:
return {'errors': simplify_form_errors(form.errors)}
context = {
'event': serialize_event(event),
}
return context
def serialize_event(event, extended=False):
data = {
'id': event.id,
'title': event.title,
'slug': event.slug,
'description': event.description,
'privacy': event.privacy,
'privacy_display': event.get_privacy_display(),
'status': event.status,
'status_display': event.get_status_display(),
'additional_links': event.additional_links,
'duration': event.duration,
'tags': [],
'channels': {},
'topics': {},
}
if extended:
# When it's the extended version, we return a list of dicts
# that contain the id, name, etc.
data['channels'] = []
data['topics'] = []
if event.slug:
data['url'] = reverse('main:event', args=(event.slug,))
for tag in event.tags.all():
data['tags'].append(tag.name) # good enough?
# lastly, make it a string
data['tags'] = ', '.join(sorted(data['tags']))
for channel in event.channels.all():
if extended:
data['channels'].append({
'id': channel.id,
'name': channel.name,
'url': reverse('main:home_channels', args=(channel.slug,)),
})
else:
data['channels'][channel.id] = True
for topic in event.topics.all():
if extended:
data['topics'].append({
'id': topic.id,
'topic': topic.topic,
})
else:
data['topics'][topic.id] = True
if extended:
# get a list of all the groups that need to approve it
data['approvals'] = []
for approval in Approval.objects.filter(event=event, approved=False):
data['approvals'].append({
'group_name': approval.group.name,
})
if event.placeholder_img or event.picture:
geometry = '160x90'
crop = 'center'
if event.picture:
thumb = get_thumbnail(
event.picture.file, geometry, crop=crop
)
else:
thumb = get_thumbnail(
event.placeholder_img, geometry, crop=crop
)
data['picture'] = {
'url': thumb.url,
'width': thumb.width,
'height': thumb.height,
}
if event.upload:
data['upload'] = {
'size': event.upload.size,
'url': event.upload.url,
'mime_type': event.upload.mime_type,
}
elif (
'youtube' in event.template.name.lower() and
event.template_environment.get('id')
):
data['upload'] = None
data['youtube_id'] = event.template_environment['id']
return data
@require_POST
@login_required
@transaction.atomic
@must_be_your_event
@json_view
def event_archive(request, event):
if event.status != Event.STATUS_INITIATED:
return http.HttpResponseBadRequest(
"You can't archive events that are NOT in the state of initiated."
)
submissions = VidlySubmission.objects.filter(
event=event,
url__startswith=event.upload.url
)
for vidly_submission in submissions.order_by('-submission_time'):
break
else:
# we haven't sent it in for archive yet
upload = event.upload
base_url = get_base_url(request)
webhook_url = base_url + reverse('new:vidly_media_webhook')
video_url = prepare_vidly_video_url(upload.url)
tag, error = vidly.add_media(
video_url,
hd=True,
notify_url=webhook_url,
# Note that we deliberately don't bother yet to set
# token_protection here because we don't yet know if the
# event is going to be private or not.
# Also, it's much quicker to make screencaptures of videos
# that are not token protected on vid.ly.
)
# then we need to record that we did this
vidly_submission = VidlySubmission.objects.create(
event=event,
url=video_url,
tag=tag,
hd=True,
submission_error=error or None
)
default_template = Template.objects.get(default_archive_template=True)
# Do an in place edit in case this started before the fetch_duration
# has started.
Event.objects.filter(id=event.id).update(
template=default_template,
template_environment={'tag': tag}
)
return {
'tag': vidly_submission.tag,
'error': vidly_submission.submission_error
}
@require_POST
@login_required
@must_be_your_event
@json_view
def event_screencaptures(request, event):
if event.status != Event.STATUS_INITIATED:
return http.HttpResponseBadRequest(
"Events NOT in the state of initiated."
)
upload = event.upload
video_url = upload.url
context = {}
cache_key = 'fetching-{0}'.format(event.id)
# This function sets the cache `fetching-{id}` before and after calling
# those functions in the videoinfo module.
# The reason is that those calls might take many many seconds
# and the webapp might send async calls to the event_picture view
# which will inform the webapp that the slow videoinfo processes
# are running and thus that the webapp shouldn't kick if off yet.
seconds = event.duration
if not event.duration:
# it's a poor man's lock
if not cache.get(cache_key):
cache.set(cache_key, True, 60)
seconds = videoinfo.fetch_duration(
event,
video_url=video_url,
save=True,
verbose=settings.DEBUG
)
cache.delete(cache_key)
event = Event.objects.get(id=event.id)
context['seconds'] = seconds
# The reason we can't use `if event.duration:` is because the
# fetch_duration() does an inline-update instead of modifying
# the instance object.
no_pictures = Picture.objects.filter(event=event).count()
if event.duration and not no_pictures:
if not cache.get(cache_key):
cache.set(cache_key, True, 60)
event = Event.objects.get(id=event.id)
no_pictures = videoinfo.fetch_screencapture(
event,
video_url=video_url,
save=True,
verbose=settings.DEBUG,
set_first_available=not event.picture,
import_immediately=True,
)
cache.delete(cache_key)
event = Event.objects.get(id=event.id)
if no_pictures and not event.picture:
# no picture has been chosen previously
pictures = Picture.objects.filter(event=event).order_by('created')[:1]
for picture in pictures:
event.picture = picture
event.save()
break
context['no_pictures'] = no_pictures
return context
# Note that this view is publically available.
# That means we can't trust the content but we can take it as a hint.
@csrf_exempt
@require_POST
def vidly_media_webhook(request):
if not request.POST.get('xml'):
return http.HttpResponseBadRequest("no 'xml'")
xml_string = request.POST['xml'].strip()
try:
struct = xmltodict.parse(xml_string)
except ExpatError:
return http.HttpResponseBadRequest("Bad 'xml'")
try:
task = struct['Response']['Result']['Task']
try:
vidly_submission = VidlySubmission.objects.get(
url=task['SourceFile'],
tag=task['MediaShortLink']
)
if task['Status'] == 'Finished':
if not vidly_submission.finished:
vidly_submission.finished = timezone.now()
vidly_submission.save()
event = vidly_submission.event
if (
task['Private'] == 'false' and
event.privacy != Event.PRIVACY_PUBLIC
):
# the event is private but the video is not
vidly.update_media_protection(
vidly_submission.tag,
True # make it private
)
if not vidly_submission.token_protection:
vidly_submission.token_protection = True
vidly_submission.save()
# Awesome!
# This event now has a fully working transcoded piece of
# media.
if event.status == Event.STATUS_PENDING:
event.status = Event.STATUS_SCHEDULED
event.archive_time = timezone.now()
event.save()
# More awesome! We can start processing the transcoded media.
if not event.duration:
videoinfo.fetch_duration(
event,
save=True,
verbose=settings.DEBUG
)
event = Event.objects.get(id=event.id)
if event.duration:
if not Picture.objects.filter(event=event):
videoinfo.fetch_screencapture(
event,
save=True,
verbose=settings.DEBUG,
set_first_available=True,
)
elif task['Status'] == 'Error':
if not vidly_submission.errored:
vidly_submission.errored = timezone.now()
vidly_submission.save()
except VidlySubmission.DoesNotExist:
# remember, we can't trust the XML since it's publicly
# available and exposed as a webhook
pass
except KeyError:
# If it doesn't have a "Result" or "Task", it was just a notification
# that the media was added.
pass
return http.HttpResponse('OK\n')
@never_cache
@login_required
@must_be_your_event
@json_view
def event_picture(request, event):
if request.method == 'POST':
form = forms.PictureForm(json.loads(request.body), instance=event)
if not form.is_valid():
return http.HttpResponseBadRequest(form.errors)
with transaction.atomic():
form.save()
# if it has screen captures start returning them
pictures = Picture.objects.filter(event=event).order_by('created')
thumbnails = []
# geometry = request.GET.get('geometry', '160x90')
# crop = request.GET.get('crop', 'center')
geometry = '160x90'
crop = 'center'
for p in pictures:
thumb = get_thumbnail(
p.file, geometry, crop=crop
)
picked = event.picture and event.picture == p
thumbnails.append({
'id': p.id,
'url': thumb.url,
'width': thumb.width,
'height': thumb.height,
'picked': picked,
# 'large_url': large_thumb.url,
})
context = {}
if thumbnails:
context['thumbnails'] = thumbnails
cache_key = 'fetching-{0}'.format(event.id)
context['fetching'] = bool(cache.get(cache_key))
return context
@never_cache
@login_required
@must_be_your_event
@json_view
def event_summary(request, event):
return {
'event': serialize_event(event, extended=True),
'pictures': Picture.objects.filter(event=event).count(),
}
def _videos_by_tags(tags):
"""Return a list of dicts where each dict looks something like this:
{'id': 123, 'tag': 'abc123', 'Status': 'Processing', 'finished': False}
And if there's no VidlySubmission the dict will just look like this:
{'id': 124}
The advantage of this function is that you only need to do 1 query
to Vid.ly for a long list of tags.
"""
all_results = vidly.query(tags.keys())
video_contexts = []
for tag, event in tags.items():
video_context = {
'id': event.id,
}
if event.duration:
video_context['duration'] = event.duration
video_context['duration_human'] = show_duration(event.duration)
qs = VidlySubmission.objects.filter(event=event, tag=tag)
for vidly_submission in qs.order_by('-submission_time')[:1]:
video_context['tag'] = tag
results = all_results.get(tag, {})
video_context['status'] = results.get('Status')
video_context['finished'] = results.get('Status') == 'Finished'
if video_context['finished']:
if not vidly_submission.finished:
vidly_submission.finished = timezone.now()
vidly_submission.save()
if not event.archive_time:
event.archive_time = timezone.now()
event.save()
elif results.get('Status') == 'Error':
if not vidly_submission.errored:
vidly_submission.errored = timezone.now()
vidly_submission.save()
else:
video_context['estimated_time_left'] = (
vidly_submission.get_estimated_time_left()
)
break
video_contexts.append(video_context)
return video_contexts
@never_cache
@login_required
@must_be_your_event
@json_view
def event_video(request, event):
context = {}
tag = event.template_environment and event.template_environment.get('tag')
if tag:
tags = {tag: event}
contexts = _videos_by_tags(tags)
context = contexts[0]
return context
@require_POST
@login_required
@json_view
def videos(request):
"""Similar to event_video except it expects a 'ids' request parameter
and returns a dict of videos where the event ID is the keys."""
try:
ids = json.loads(request.body)['ids']
except ValueError as x:
return http.HttpResponseBadRequest(str(x))
events = Event.objects.filter(
id__in=ids,
creator=request.user,
template__name__icontains='vid.ly',
)
tags = {}
for event in events:
tag = (
event.template_environment and
event.template_environment.get('tag')
)
tags[tag] = event
return dict(
(x['id'], x)
for x in _videos_by_tags(tags)
)
@require_POST
@login_required
@must_be_your_event
@json_view
def event_publish(request, event):
if event.status != Event.STATUS_INITIATED:
return http.HttpResponseBadRequest("Not in an initiated state")
groups = []
with transaction.atomic():
# there has to be a Vid.ly video
if 'youtube' in event.template.name.lower():
event.status = Event.STATUS_SCHEDULED
else:
tag = event.template_environment['tag']
submission = None
qs = VidlySubmission.objects.filter(event=event, tag=tag)
for each in qs.order_by('-submission_time'):
submission = each
break
assert submission, "Event has no vidly submission"
results = vidly.query(tag).get(tag, {})
# Let's check the privacy/tokenization of the video.
# What matters (source of truth) is the event's privacy state.
if event.privacy != Event.PRIVACY_PUBLIC and results:
# make sure the submission the the video IS token protected
if not submission.token_protection:
submission.token_protection = True
submission.save()
if results['Private'] == 'false':
# We can only do this if the video has been successfully
# transcoded.
if results['Status'] == 'Finished':
vidly.update_media_protection(
tag,
True
)
if results.get('Status') == 'Finished':
event.status = Event.STATUS_SCHEDULED
# If it's definitely finished, it means we managed to ask
# Vid.ly this question before Vid.ly had a chance to ping
# us on the webhook. Might as well set it now.
if not event.archive_time:
event.archive_time = timezone.now()
else:
# vidly hasn't finished processing it yet
event.status = Event.STATUS_PENDING
event.save()
if not event.picture and not event.placeholder_img:
# assign the default placeholder picture if there is one
try:
event.picture = Picture.objects.get(default_placeholder=True)
event.save()
except Picture.DoesNotExist: # pragma: no cover
pass
if not event.channels.all():
# forcibly put it in the default channel(s)
for channel in Channel.objects.filter(default=True):
event.channels.add(channel)
if not Discussion.objects.filter(event=event):
discussion = Discussion.objects.create(
event=event,
enabled=True,
notify_all=True
)
discussion.moderators.add(event.creator)
if event.privacy == Event.PRIVACY_PUBLIC:
for topic in event.topics.all():
for group in topic.groups.all():
if group not in groups:
groups.append(group)
for group in groups:
Approval.objects.create(event=event, group=group)
for group in groups:
sending.email_about_approval_requested(
event,
group,
request
)
return True
@never_cache
@login_required
@json_view
def your_events(request):
# If you have some uploads that are lingering but not associated
# with an event, we might want to create empty events for them
# now.
lingering_uploads = Upload.objects.filter(
mime_type__startswith='video/',
user=request.user,
event__isnull=True,
size__gt=0
)
with transaction.atomic():
for upload in lingering_uploads:
event = Event.objects.create(
status=Event.STATUS_INITIATED,
creator=upload.user,
upload=upload,
start_time=upload.created,
privacy=Event.PRIVACY_PUBLIC,
created=upload.created
)
# event.channels.add(default_channel)
# We'll pretend the event was created at the time the
# video was uploaded.
# Doing this after the create() is necessary because the
# model uses the auto_now_add=True
event.created = upload.created
event.save()
upload.event = event
upload.save()
events = (
Event.objects.filter(
creator=request.user,
status=Event.STATUS_INITIATED,
)
.filter(
Q(upload__isnull=False) | Q(template__name__icontains='YouTube')
)
.select_related('upload', 'picture')
.order_by('-created')
)
all_possible_pictures = (
Picture.objects
.filter(event__in=events)
.values('event_id')
.annotate(Count('event'))
)
pictures_count = {}
for each in all_possible_pictures:
pictures_count[each['event_id']] = each['event__count']
serialized = []
for event in events:
upload = event.upload
if upload:
upload = {
'size': upload.size,
'mime_type': upload.mime_type
}
thumbnail = None
if event.picture or event.placeholder_img:
geometry = '160x90'
crop = 'center'
if event.picture:
thumb = get_thumbnail(
event.picture.file, geometry, crop=crop
)
else:
thumb = get_thumbnail(
event.placeholder_img, geometry, crop=crop
)
thumbnail = {
'url': thumb.url,
'width': thumb.width,
'height': thumb.height,
}
serialized.append({
'id': event.id,
'title': event.title,
'upload': upload,
'picture': thumbnail,
'pictures': pictures_count.get(event.id, 0),
'modified': event.modified,
})
return {'events': serialized}
@require_POST
@login_required
@must_be_your_event
@json_view
def event_delete(request, event):
with transaction.atomic():
event.status = Event.STATUS_REMOVED
event.save()
return True
@transaction.atomic
def unsubscribe(request, identifier):
context = {}
cache_key = 'unsubscribe-%s' % identifier
user_id = cache.get(cache_key)
if user_id:
user = get_object_or_404(User, id=user_id)
else:
user = None
cache.set(cache_key, request.user.id, 60)
context['user'] = user
if request.method == 'POST':
if not user:
return http.HttpResponseBadRequest('No user')
user_profile = get_profile_safely(user, create_if_necessary=True)
user_profile.optout_event_emails = True
user_profile.save()
cache.delete(cache_key)
return redirect('new:unsubscribed')
return render(request, 'new/unsubscribe.html', context)
def unsubscribed(request):
context = {}
return render(request, 'new/unsubscribed.html', context)
@require_POST
@login_required
@must_be_your_event
@json_view
@transaction.atomic
def event_pictures_rotate(request, event):
try:
post = request.body and json.loads(request.body) or {}
except ValueError:
return http.HttpResponseBadRequest('invalid JSON body')
direction = post.get('direction', 'left')
for picture in Picture.objects.filter(event=event):
img = Image.open(picture.file.path)
format = picture.file.name.lower().endswith('.png') and 'png' or 'jpeg'
img = img.rotate(direction == 'left' and 90 or 270, expand=True)
f = StringIO()
try:
img.save(f, format=format)
picture.file.save(
picture.file.name,
ContentFile(f.getvalue())
)
finally:
f.close()
return True
@login_required
@json_view
def youtube_extract(request):
url = request.GET.get('url')
if not url:
return http.HttpResponseBadRequest('No url')
try:
return youtube.extract_metadata_by_url(url)
except ValueError:
return {'error': 'Video ID not found by that URL'}
except youtube.VideoNotFound as ex:
return {'error': 'No video by that ID could be found (%s)' % ex}
@require_POST
@login_required
@json_view
@transaction.atomic
def youtube_create(request):
try:
body = json.loads(request.body)
except ValueError:
# it wasn't sent as a JSON request body
return http.HttpResponseBadRequest('Missing JSON request body')
if not body.get('id'):
return http.HttpResponseBadRequest('Missing id')
# extract all the details again
data = youtube.extract_metadata_by_id(body['id'])
for template in Template.objects.filter(name__icontains='YouTube'):
break
else:
template = Template.objects.create(
name='YouTube',
content=(
'<iframe width="896" height="504" src="https://www.youtube-noc'
'ookie.com/embed/{{ id }}?rel=0&showinfo=0" '
'frameborder="0" allowfullscreen></iframe>'
)
)
youtube_url = 'https://www.youtube.com/watch?v=' + data['id']
additional_links = u'On YouTube™ {}'.format(youtube_url)
event = Event.objects.create(
title=data['title'],
description=data['description'],
template=template,
template_environment={'id': data['id']},
creator=request.user,
status=Event.STATUS_INITIATED,
privacy=Event.PRIVACY_PUBLIC,
start_time=timezone.now(),
additional_links=additional_links,
archive_time=timezone.now(),
)
img_temp = NamedTemporaryFile(delete=True)
img_temp.write(requests.get(data['thumbnail_url']).content)
img_temp.flush()
event.placeholder_img.save(
os.path.basename(data['thumbnail_url']),
File(img_temp)
)
for tag in data['tags']:
try:
event.tags.add(Tag.objects.get(name__iexact=tag))
except Tag.DoesNotExist:
event.tags.add(Tag.objects.create(name=tag))
# first get the parent of all YouTube channels
youtube_parent, __ = Channel.objects.get_or_create(
name=u'YouTube™',
slug='youtube',
never_show=True,
)
try:
channel = Channel.objects.get(
parent=youtube_parent,
youtube_id=data['channel']['id'],
name=data['channel']['title'],
)
except Channel.DoesNotExist:
channel = Channel.objects.create(
parent=youtube_parent,
youtube_id=data['channel']['id'],
name=data['channel']['title'],
slug=slugify(data['channel']['title'])
)
if data['channel']['thumbnail_url']:
img_temp = NamedTemporaryFile(delete=True)
img_temp.write(
requests.get(data['channel']['thumbnail_url']).content
)
img_temp.flush()
channel.image.save(
os.path.basename(data['channel']['thumbnail_url']),
File(img_temp)
)
event.channels.add(channel)
# also put it in the other default channels
for channel in Channel.objects.filter(default=True):
event.channels.add(channel)
return serialize_event(event)
| [
"mail@peterbe.com"
] | mail@peterbe.com |
4d3e7de00e12814dd0c68c0dde2ce72c7722b95b | 6aee7149a16a71389e0916de1854f4edea026c2b | /test/download.py | 85081f5885c05233a84be7bdecbb5551a69b5b5b | [
"BSD-2-Clause"
] | permissive | orionzhou/maize | d5e3c66af285d5d3a490fe09e85f840bd033240a | 605c895c397c9f614955a6df8eed0edc553f543d | refs/heads/main | 2022-12-27T02:08:26.747564 | 2022-11-24T07:57:30 | 2022-11-24T07:57:30 | 11,537,821 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,657 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import os.path as op
import wget
if sys.version_info[0] < 3:
int_types = (int, long)
urlopen = urllib.urlopen
else:
int_types = (int,)
basestring = str
from urllib.request import urlopen
def download_file(src_ftp, dst_file, prt=sys.stdout, loading_bar=True):
"""Download specified file if necessary."""
if os.path.isfile(dst_file):
return
do_gunzip = src_ftp[-3:] == '.gz' and dst_file[-3:] != '.gz'
dst_wget = "{DST}.gz".format(DST=dst_file) if do_gunzip else dst_file
# Write to stderr, not stdout so this message will be seen when running nosetests
wget_msg = "wget.download({SRC} out={DST})\n".format(SRC=src_ftp, DST=dst_wget)
sys.stderr.write(" {WGET}".format(WGET=wget_msg))
if loading_bar:
loading_bar = wget.bar_adaptive
try:
wget.download(src_ftp, out=dst_wget, bar=loading_bar)
if do_gunzip:
if prt is not None:
prt.write(" gunzip {FILE}\n".format(FILE=dst_wget))
gzip_open_to(dst_wget, dst_file)
except IOError as errmsg:
import traceback
traceback.print_exc()
sys.stderr.write("**FATAL cmd: {WGET}".format(WGET=wget_msg))
sys.stderr.write("**FATAL msg: {ERR}".format(ERR=str(errmsg)))
sys.exit(1)
def gzip_open_to(fin_gz, fout):
"""Unzip a file.gz file."""
with gzip.open(fin_gz, 'rb') as zstrm:
with open(fout, 'wb') as ostrm:
ostrm.write(zstrm.read())
assert os.path.isfile(fout), "COULD NOT GUNZIP({G}) TO FILE({F})".format(G=fin_gz, F=fout)
os.remove(fin_gz)
| [
"zhoupenggeni@gmail.com"
] | zhoupenggeni@gmail.com |
3f22c1357f42e66a1fb44272dced80b7843cca37 | a16d190c16781bf4fde5960673d2897e469e0174 | /flink-ai-flow/lib/airflow/airflow/providers/cncf/kubernetes/hooks/kubernetes.py | cf27713a991fd503d74c0d23c9dbaf7409dde42a | [
"Apache-2.0",
"BSD-3-Clause",
"Python-2.0",
"MIT",
"BSD-2-Clause"
] | permissive | bgeng777/flink-ai-extended | 742a1bb80d07c090c3ecb960394422896b5899d7 | f83b5d661240c45c767002767c0cbddc847fff81 | refs/heads/master | 2023-08-15T00:32:40.260537 | 2021-07-27T04:20:53 | 2021-07-27T04:20:53 | 349,360,984 | 1 | 2 | Apache-2.0 | 2021-05-20T03:05:56 | 2021-03-19T09:03:50 | Python | UTF-8 | Python | false | false | 9,757 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tempfile
from typing import Any, Dict, Generator, Optional, Tuple, Union
import yaml
from cached_property import cached_property
from kubernetes import client, config, watch
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
def _load_body_to_dict(body):
try:
body_dict = yaml.safe_load(body)
except yaml.YAMLError as e:
raise AirflowException("Exception when loading resource definition: %s\n" % e)
return body_dict
class KubernetesHook(BaseHook):
"""
Creates Kubernetes API connection.
- use in cluster configuration by using ``extra__kubernetes__in_cluster`` in connection
- use custom config by providing path to the file using ``extra__kubernetes__kube_config_path``
- use custom configuration by providing content of kubeconfig file via
``extra__kubernetes__kube_config`` in connection
- use default config by providing no extras
This hook check for configuration option in the above order. Once an option is present it will
use this configuration.
.. seealso::
For more information about Kubernetes connection:
:doc:`/connections/kubernetes`
:param conn_id: the connection to Kubernetes cluster
:type conn_id: str
"""
conn_name_attr = 'kubernetes_conn_id'
default_conn_name = 'kubernetes_default'
conn_type = 'kubernetes'
hook_name = 'Kubernetes Cluster Connection'
@staticmethod
def get_connection_form_widgets() -> Dict[str, Any]:
"""Returns connection widgets to add to connection form"""
from flask_appbuilder.fieldwidgets import BS3TextFieldWidget
from flask_babel import lazy_gettext
from wtforms import BooleanField, StringField
return {
"extra__kubernetes__in_cluster": BooleanField(lazy_gettext('In cluster configuration')),
"extra__kubernetes__kube_config_path": StringField(
lazy_gettext('Kube config path'), widget=BS3TextFieldWidget()
),
"extra__kubernetes__kube_config": StringField(
lazy_gettext('Kube config (JSON format)'), widget=BS3TextFieldWidget()
),
"extra__kubernetes__namespace": StringField(
lazy_gettext('Namespace'), widget=BS3TextFieldWidget()
),
}
@staticmethod
def get_ui_field_behaviour() -> Dict:
"""Returns custom field behaviour"""
return {
"hidden_fields": ['host', 'schema', 'login', 'password', 'port', 'extra'],
"relabeling": {},
}
def __init__(
self, conn_id: str = default_conn_name, client_configuration: Optional[client.Configuration] = None
) -> None:
super().__init__()
self.conn_id = conn_id
self.client_configuration = client_configuration
def get_conn(self) -> Any:
"""Returns kubernetes api session for use with requests"""
connection = self.get_connection(self.conn_id)
extras = connection.extra_dejson
in_cluster = extras.get("extra__kubernetes__in_cluster")
kubeconfig_path = extras.get("extra__kubernetes__kube_config_path")
kubeconfig = extras.get("extra__kubernetes__kube_config")
num_selected_configuration = len([o for o in [in_cluster, kubeconfig, kubeconfig_path] if o])
if num_selected_configuration > 1:
raise AirflowException(
"Invalid connection configuration. Options extra__kubernetes__kube_config_path, "
"extra__kubernetes__kube_config, extra__kubernetes__in_cluster are mutually exclusive. "
"You can only use one option at a time."
)
if in_cluster:
self.log.debug("loading kube_config from: in_cluster configuration")
config.load_incluster_config()
return client.ApiClient()
if kubeconfig_path is not None:
self.log.debug("loading kube_config from: %s", kubeconfig_path)
config.load_kube_config(
config_file=kubeconfig_path, client_configuration=self.client_configuration
)
return client.ApiClient()
if kubeconfig is not None:
with tempfile.NamedTemporaryFile() as temp_config:
self.log.debug("loading kube_config from: connection kube_config")
temp_config.write(kubeconfig.encode())
temp_config.flush()
config.load_kube_config(
config_file=temp_config.name, client_configuration=self.client_configuration
)
return client.ApiClient()
self.log.debug("loading kube_config from: default file")
config.load_kube_config(client_configuration=self.client_configuration)
return client.ApiClient()
@cached_property
def api_client(self) -> Any:
"""Cached Kubernetes API client"""
return self.get_conn()
def create_custom_object(
self, group: str, version: str, plural: str, body: Union[str, dict], namespace: Optional[str] = None
):
"""
Creates custom resource definition object in Kubernetes
:param group: api group
:type group: str
:param version: api version
:type version: str
:param plural: api plural
:type plural: str
:param body: crd object definition
:type body: Union[str, dict]
:param namespace: kubernetes namespace
:type namespace: str
"""
api = client.CustomObjectsApi(self.api_client)
if namespace is None:
namespace = self.get_namespace()
if isinstance(body, str):
body = _load_body_to_dict(body)
try:
response = api.create_namespaced_custom_object(
group=group, version=version, namespace=namespace, plural=plural, body=body
)
self.log.debug("Response: %s", response)
return response
except client.rest.ApiException as e:
raise AirflowException("Exception when calling -> create_custom_object: %s\n" % e)
def get_custom_object(
self, group: str, version: str, plural: str, name: str, namespace: Optional[str] = None
):
"""
Get custom resource definition object from Kubernetes
:param group: api group
:type group: str
:param version: api version
:type version: str
:param plural: api plural
:type plural: str
:param name: crd object name
:type name: str
:param namespace: kubernetes namespace
:type namespace: str
"""
api = client.CustomObjectsApi(self.api_client)
if namespace is None:
namespace = self.get_namespace()
try:
response = api.get_namespaced_custom_object(
group=group, version=version, namespace=namespace, plural=plural, name=name
)
return response
except client.rest.ApiException as e:
raise AirflowException("Exception when calling -> get_custom_object: %s\n" % e)
def get_namespace(self) -> str:
"""Returns the namespace that defined in the connection"""
connection = self.get_connection(self.conn_id)
extras = connection.extra_dejson
namespace = extras.get("extra__kubernetes__namespace", "default")
return namespace
def get_pod_log_stream(
self,
pod_name: str,
container: Optional[str] = "",
namespace: Optional[str] = None,
) -> Tuple[watch.Watch, Generator[str, None, None]]:
"""
Retrieves a log stream for a container in a kubernetes pod.
:param pod_name: pod name
:type pod_name: str
:param container: container name
:param namespace: kubernetes namespace
:type namespace: str
"""
api = client.CoreV1Api(self.api_client)
watcher = watch.Watch()
return (
watcher,
watcher.stream(
api.read_namespaced_pod_log,
name=pod_name,
container=container,
namespace=namespace if namespace else self.get_namespace(),
),
)
def get_pod_logs(
self,
pod_name: str,
container: Optional[str] = "",
namespace: Optional[str] = None,
):
"""
Retrieves a container's log from the specified pod.
:param pod_name: pod name
:type pod_name: str
:param container: container name
:param namespace: kubernetes namespace
:type namespace: str
"""
api = client.CoreV1Api(self.api_client)
return api.read_namespaced_pod_log(
name=pod_name,
container=container,
_preload_content=False,
namespace=namespace if namespace else self.get_namespace(),
)
| [
"jiangxin.jiang@alibaba-inc.com"
] | jiangxin.jiang@alibaba-inc.com |
b99beec62cc60a55e6c46768b861059a9b5a6843 | 147648c6b25ecc33e82a36b36de6623df9340e62 | /examples/hacker_news_assets/hacker_news_assets_tests/test_sensors/test_slack_on_pipeline_failure_sensor.py | 285174018e5397b10ae037b9f992ed283ee06718 | [
"Apache-2.0"
] | permissive | asdlei99/dagster | be81009ff00dbad02f7cec974650388a5cc2af59 | bbfd1a22e85a10881d7dbbcc888957a487f0c3e5 | refs/heads/master | 2023-08-28T07:18:23.838943 | 2021-11-08T23:09:07 | 2021-11-08T23:09:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 790 | py | from dagster import repository
from hacker_news_assets.sensors.slack_on_pipeline_failure_sensor import make_pipeline_failure_sensor
def test_slack_on_pipeline_failure_def():
@repository
def my_repo_local():
return [
make_pipeline_failure_sensor("localhost"),
]
@repository
def my_repo_staging():
return [
make_pipeline_failure_sensor("https://dev.something.com"),
]
@repository
def my_repo_prod():
return [
make_pipeline_failure_sensor("https://prod.something.com"),
]
assert my_repo_local.has_sensor_def("slack_on_pipeline_failure")
assert my_repo_staging.has_sensor_def("slack_on_pipeline_failure")
assert my_repo_prod.has_sensor_def("slack_on_pipeline_failure")
| [
"noreply@github.com"
] | asdlei99.noreply@github.com |
c4c5dc4c7c52ced3d2aac3855ae1ca3b733e6aec | 130a98632d2ab4c171503b79e455b7aa27a1dda4 | /models/research/delf/delf/python/feature_extractor.py | 9545337f18724520e260af4e36ffa6ee35bce4c6 | [
"MIT",
"Apache-2.0"
] | permissive | aboerzel/German_License_Plate_Recognition | d7fc0314295f5cf0c9d7ae9c93a795e3ef1c5787 | 6fc53292b1d3ce3c0340ce724c2c11c77e663d27 | refs/heads/master | 2023-01-30T18:08:37.339542 | 2023-01-07T07:41:36 | 2023-01-07T07:41:36 | 245,586,430 | 34 | 12 | MIT | 2023-01-07T07:41:37 | 2020-03-07T07:16:51 | Python | UTF-8 | Python | false | false | 6,181 | py | # Copyright 2017 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""DELF feature extractor."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
def NormalizePixelValues(image,
pixel_value_offset=128.0,
pixel_value_scale=128.0):
"""Normalize image pixel values.
Args:
image: a uint8 tensor.
pixel_value_offset: a Python float, offset for normalizing pixel values.
pixel_value_scale: a Python float, scale for normalizing pixel values.
Returns:
image: a float32 tensor of the same shape as the input image.
"""
image = tf.cast(image, dtype=tf.float32)
image = tf.truediv(tf.subtract(image, pixel_value_offset), pixel_value_scale)
return image
def CalculateReceptiveBoxes(height, width, rf, stride, padding):
"""Calculate receptive boxes for each feature point.
Args:
height: The height of feature map.
width: The width of feature map.
rf: The receptive field size.
stride: The effective stride between two adjacent feature points.
padding: The effective padding size.
Returns:
rf_boxes: [N, 4] receptive boxes tensor. Here N equals to height x width.
Each box is represented by [ymin, xmin, ymax, xmax].
"""
x, y = tf.meshgrid(tf.range(width), tf.range(height))
coordinates = tf.reshape(tf.stack([y, x], axis=2), [-1, 2])
# [y,x,y,x]
point_boxes = tf.cast(
tf.concat([coordinates, coordinates], 1), dtype=tf.float32)
bias = [-padding, -padding, -padding + rf - 1, -padding + rf - 1]
rf_boxes = stride * point_boxes + bias
return rf_boxes
def CalculateKeypointCenters(boxes):
"""Helper function to compute feature centers, from RF boxes.
Args:
boxes: [N, 4] float tensor.
Returns:
centers: [N, 2] float tensor.
"""
return tf.divide(
tf.add(
tf.gather(boxes, [0, 1], axis=1), tf.gather(boxes, [2, 3], axis=1)),
2.0)
def ApplyPcaAndWhitening(data,
pca_matrix,
pca_mean,
output_dim,
use_whitening=False,
pca_variances=None):
"""Applies PCA/whitening to data.
Args:
data: [N, dim] float tensor containing data which undergoes PCA/whitening.
pca_matrix: [dim, dim] float tensor PCA matrix, row-major.
pca_mean: [dim] float tensor, mean to subtract before projection.
output_dim: Number of dimensions to use in output data, of type int.
use_whitening: Whether whitening is to be used.
pca_variances: [dim] float tensor containing PCA variances. Only used if
use_whitening is True.
Returns:
output: [N, output_dim] float tensor with output of PCA/whitening operation.
"""
output = tf.matmul(
tf.subtract(data, pca_mean),
tf.slice(pca_matrix, [0, 0], [output_dim, -1]),
transpose_b=True,
name='pca_matmul')
# Apply whitening if desired.
if use_whitening:
output = tf.divide(
output,
tf.sqrt(tf.slice(pca_variances, [0], [output_dim])),
name='whitening')
return output
def PostProcessDescriptors(descriptors, use_pca, pca_parameters=None):
"""Post-process descriptors.
Args:
descriptors: [N, input_dim] float tensor.
use_pca: Whether to use PCA.
pca_parameters: Only used if `use_pca` is True. Dict containing PCA
parameter tensors, with keys 'mean', 'matrix', 'dim', 'use_whitening',
'variances'.
Returns:
final_descriptors: [N, output_dim] float tensor with descriptors after
normalization and (possibly) PCA/whitening.
"""
# L2-normalize, and if desired apply PCA (followed by L2-normalization).
final_descriptors = tf.nn.l2_normalize(
descriptors, axis=1, name='l2_normalization')
if use_pca:
# Apply PCA, and whitening if desired.
final_descriptors = ApplyPcaAndWhitening(final_descriptors,
pca_parameters['matrix'],
pca_parameters['mean'],
pca_parameters['dim'],
pca_parameters['use_whitening'],
pca_parameters['variances'])
# Re-normalize.
final_descriptors = tf.nn.l2_normalize(
final_descriptors, axis=1, name='pca_l2_normalization')
return final_descriptors
def DelfFeaturePostProcessing(boxes, descriptors, use_pca, pca_parameters=None):
"""Extract DELF features from input image.
Args:
boxes: [N, 4] float tensor which denotes the selected receptive box. N is
the number of final feature points which pass through keypoint selection
and NMS steps.
descriptors: [N, input_dim] float tensor.
use_pca: Whether to use PCA.
pca_parameters: Only used if `use_pca` is True. Dict containing PCA
parameter tensors, with keys 'mean', 'matrix', 'dim', 'use_whitening',
'variances'.
Returns:
locations: [N, 2] float tensor which denotes the selected keypoint
locations.
final_descriptors: [N, output_dim] float tensor with DELF descriptors after
normalization and (possibly) PCA/whitening.
"""
# Get center of descriptor boxes, corresponding to feature locations.
locations = CalculateKeypointCenters(boxes)
final_descriptors = PostProcessDescriptors(descriptors, use_pca,
pca_parameters)
return locations, final_descriptors
| [
"andreas.boerzel@gmx.de"
] | andreas.boerzel@gmx.de |
c49aed0557e05e299c3be8527df443644b2e2241 | a8a2491a21ee53066f42ed3cd8c1d5169858790b | /pizzaria/pizzaria/entrega/fixtures/db-update.py | 6bfbb3d7608012395bcf9d9712af49c49baaeb69 | [] | no_license | huogerac/acpy_pizzaria | 80c7236f7c9d8bf58bcf4a9f45e2bfeacd8e4d38 | eec5f88478a424ed3b193c0a6ed4f31d88b1d0e5 | refs/heads/master | 2016-09-05T18:07:14.555495 | 2012-04-13T02:04:53 | 2012-04-13T02:04:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,371 | py | #!/usr/bin/env python
# coding: utf-8
import json
import argparse
import os
"""
atualiza estrutura de um arquivo json
>>> json = [{'pk': 1, 'model': 'entrega.cliente', 'fields': { 'nome': 'Juca' } }]
>>> updater = JsonUpdate( json )
>>> updater.json()
[{'pk': 1, 'model': 'entrega.cliente', 'fields': {'nome': 'Juca'}}]
>>> updater.addNewField(('ramal', ''))
>>> updater.updateJson()
>>> updater.json()
[{'pk': 1, 'model': 'entrega.cliente', 'fields': {'ramal': '', 'nome': 'Juca'}}]
>>> updater.addNewField(('logradouro', ''))
>>> updater.addNewField(('numero', 0))
>>> updater.addNewField(('complemento', ''))
>>> updater.addNewField(('obs', ''))
>>> updater.updateJson()
>>> updater.json()
[{'pk': 1, 'model': 'entrega.cliente', 'fields': {'ramal': '', 'complemento': '', 'nome': 'Juca', 'logradouro': '', 'numero': 0, 'obs': ''}}]
> >> updater.save_newfile('clientes_new.json')
"""
class DbUpdate(object):
def __init__(self, filename):
self.filename = filename
def read_file(self):
with open(os.path.join(os.path.dirname(__file__), self.filename)) as f:
self.clientes = json.load(f)
def addFields(self, newfield):
self.fields.append(newfield)
def update_content(self):
for cliente in self.clientes:
fields = cliente["fields"]
for newfield, value in self.fields:
fields[newfield] = value
def show_content(self):
print self.clientes
def save_newfile(self, newfile):
newcontent = json.dumps(self.clientes, sort_keys=False, indent=4)
jsonfile = open(newfile, "w")
jsonfile.write(newcontent)
jsonfile.close
class JsonUpdate(object):
def __init__(self, json):
self._json = json
self._fields = []
def addNewField(self, newfield):
self._fields.append(newfield)
def fields(self, list_fields):
self._fields = list_fields
def updateJson(self):
for item in self._json:
fields = item["fields"]
for newfield, value in self._fields:
fields[newfield] = value
def json(self):
return self._json
def update_json(filename, fields_filename):
print '-'*60
print 'atualizando ', filename
print 'atualizando ', fields_filename
json_content = []
with open(os.path.join(os.path.dirname(__file__), filename)) as f1:
json_content = json.load(f1)
json_fields = []
with open(os.path.join(os.path.dirname(__file__), fields_filename)) as f2:
json_fields = json.load(f2)
print json_content
print '------------'
print json_fields
parser = argparse.ArgumentParser(description='json update')
parser.add_argument('filename', help='file name ex: customer.json')
parser.add_argument('fields', help='file name with new fields content like: ["campo1": "valor", "campo2": "valor"]')
args = parser.parse_args()
update_json(args.filename, args.fields)
converter dict para list:
dict = {}
dict['Capital']="London"
dict['Food']="Fish&Chips"
dict['2012']="Olympics"
#lists
temp = []
dictList = []
#My attempt:
for key, value in dict.iteritems():
temp = [key,value]
dictlist.append(temp)
| [
"huogerac@gmail.com"
] | huogerac@gmail.com |
9aa34dbf85f69d40cc103e7934a8361b0e268c80 | 39e1e256acae3fe9be4434024d42b9bb47bdd02f | /analysis/submissions/34b1ef17e6625ba2350f6f1c169591a1_task7-2_1595553510/task7-2/main.py | 57e71b6a83e9ecb3955d6e56748da540dc53cbbf | [] | no_license | neulab/tranx-study | 9fb67b9a2181f0b362e4f97316c502eee4539b19 | e2a7089689f7f95e773e19c8f19513abe4fb8b9b | refs/heads/master | 2023-06-14T04:46:01.010892 | 2021-07-08T09:29:05 | 2021-07-08T09:29:05 | 250,357,553 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,872 | py | # Example code, write your program here
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib
import numpy as np
def autolabel(rects, i):
for rect in rects:
h = rect.get_height()
axs[i].text(rect.get_x()+rect.get_width()/2., h+1, '%.2f' % h, ha='center', va='bottom')
df = pd.read_csv("StudentsPerformance.csv")
fig, axs = plt.subplots(1, 3, figsize=(20, 6))
ind = np.arange(5)
width = 0.35
male = df.loc[df['gender'] == 'male'].groupby("race/ethnicity")
female = df.loc[df['gender'] == 'female'].groupby("race/ethnicity")
rects1 = axs[0].bar(ind - width/2, male['math score'].mean(), width, label='Male')
rects2 = axs[0].bar(ind + width/2, female['math score'].mean(), width, label='Female')
axs[0].set_xlabel('Race/Ethnicity')
axs[0].set_ylabel('Average Scores')
axs[0].set_title('Math')
axs[0].set_xticks(ind)
axs[0].set_xticklabels(('A', 'B', 'C', 'D', 'E'))
axs[0].legend()
autolabel(rects1, 0)
autolabel(rects2, 0)
rects1 = axs[1].bar(ind - width/2, male['reading score'].mean(), width, label='Male')
rects2 = axs[1].bar(ind + width/2, female['reading score'].mean(), width, label='Female')
axs[1].set_xlabel('Race/Ethnicity')
axs[1].set_ylabel('Average Scores')
axs[1].set_title('Reading')
axs[1].set_xticks(ind)
axs[1].set_xticklabels(('A', 'B', 'C', 'D', 'E'))
axs[1].legend()
autolabel(rects1, 1)
autolabel(rects2, 1)
rects1 = axs[2].bar(ind - width/2, male['writing score'].mean(), width, label='Male')
rects2 = axs[2].bar(ind + width/2, female['writing score'].mean(), width, label='Female')
axs[2].set_xlabel('Race/Ethnicity')
axs[2].set_ylabel('Average Scores')
axs[2].set_title('Writing')
axs[2].set_xticks(ind)
axs[2].set_xticklabels(('A', 'B', 'C', 'D', 'E'))
axs[2].legend()
autolabel(rects1, 2)
autolabel(rects2, 2)
fig.suptitle("Scores by race/ethnicity and gender")
plt.savefig("output/grouped_scores.png")
| [
"frankxu2004@gmail.com"
] | frankxu2004@gmail.com |
496b5d2ed0eedb526dbcbefe24d68668eb530bb8 | 757132ddc333fdabf4b183ac59a232228f9d3f44 | /dict_os_play.py | a9fbc765161e07770a85ba4a0d51353d5fd57eca | [] | no_license | rjcmarkelz/RNAseq_rename_script | d9f6ebebbf80c6b36dd554d2bc5a7e4d731c586e | ebb40f61cbdcefc321aae9f7b2f4ffa489e03705 | refs/heads/master | 2021-01-19T22:13:50.149084 | 2013-04-27T05:57:02 | 2013-04-27T05:57:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 690 | py | #!/usr/bin/python
import os
import re
RN_Dict = {
'RIL_360' :'RIL_1.rn',
'RIL_73' :'RIL_1.rn',
'RIL_259' :'RIL_103.rn',
'RIL_251' :'RIL_104.rn',
'RIL_113' :'RIL_113.rn',
'RIL_265' :'RIL_113.rn',
}
print RN_Dict
# Open a file
#cody_2 path
#path = "/Users/Cody_2/git.repos/RILS/Block1/project.maloof/"
#cody_1 path
path = /Users/Cody/Documents/Maloof Lab/My Brassica/Block2/project.maloof/
pathfiles = os.listdir(path)
for file in pathfiles:
source_name = file
for key in RN_Dict:
link_name = file.replace(key, RN_Dict[key])
print link_name
#os.symlink(source_name, link_name)
#print file
#print source_name
#print link_name
| [
"rjmarkelz@ucdavis.edu"
] | rjmarkelz@ucdavis.edu |
be3a8630c86bf4562819caacf591832785f84592 | fa93e53a9eee6cb476b8998d62067fce2fbcea13 | /devel/.private/pal_navigation_msgs/lib/python2.7/dist-packages/pal_navigation_msgs/msg/__init__.py | d75a255b3ed14cbd6bca1f6eef3c4d8666bc8d6a | [] | no_license | oyetripathi/ROS_conclusion_project | 2947ee2f575ddf05480dabc69cf8af3c2df53f73 | 01e71350437d57d8112b6cec298f89fc8291fb5f | refs/heads/master | 2023-06-30T00:38:29.711137 | 2021-08-05T09:17:54 | 2021-08-05T09:17:54 | 392,716,311 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,644 | py | from ._AvailableMaps import *
from ._Emergency import *
from ._EulerAngles import *
from ._EulerAnglesStamped import *
from ._GoToAction import *
from ._GoToActionFeedback import *
from ._GoToActionGoal import *
from ._GoToActionResult import *
from ._GoToFeedback import *
from ._GoToGoal import *
from ._GoToPOIAction import *
from ._GoToPOIActionFeedback import *
from ._GoToPOIActionGoal import *
from ._GoToPOIActionResult import *
from ._GoToPOIFeedback import *
from ._GoToPOIGoal import *
from ._GoToPOIResult import *
from ._GoToResult import *
from ._Highways import *
from ._JoyPriorityAction import *
from ._JoyPriorityActionFeedback import *
from ._JoyPriorityActionGoal import *
from ._JoyPriorityActionResult import *
from ._JoyPriorityFeedback import *
from ._JoyPriorityGoal import *
from ._JoyPriorityResult import *
from ._JoyTurboAction import *
from ._JoyTurboActionFeedback import *
from ._JoyTurboActionGoal import *
from ._JoyTurboActionResult import *
from ._JoyTurboFeedback import *
from ._JoyTurboGoal import *
from ._JoyTurboResult import *
from ._LaserImage import *
from ._MapConfiguration import *
from ._NavigationStatus import *
from ._NiceMapTransformation import *
from ._POI import *
from ._PolarReading import *
from ._PolarReadingScan import *
from ._ServiceStatus import *
from ._TabletPOI import *
from ._VisualLocDB import *
from ._VisualTrainingAction import *
from ._VisualTrainingActionFeedback import *
from ._VisualTrainingActionGoal import *
from ._VisualTrainingActionResult import *
from ._VisualTrainingFeedback import *
from ._VisualTrainingGoal import *
from ._VisualTrainingResult import *
| [
"sandeepan.ghosh.ece20@itbhu.ac.in"
] | sandeepan.ghosh.ece20@itbhu.ac.in |
9da351c36861948347a97f559f18e3c7d0507905 | 3292017df3ff6c7190d5c5a60ecf5f8936cb7b90 | /checkio/Elementary/Index Power/index_power.py | e293ee43d262aad903dab773ab170bca99681aeb | [
"MIT"
] | permissive | KenMercusLai/checkio | 1e9cdfe70ccaf5315db36391c4710533d99cf9aa | 5082ab0c6a7ae2d97963568a6f41589332e88029 | refs/heads/master | 2022-05-12T18:22:22.604531 | 2022-05-11T09:00:28 | 2022-05-11T09:00:28 | 22,260,056 | 39 | 22 | NOASSERTION | 2022-05-11T08:42:05 | 2014-07-25T14:40:06 | Python | UTF-8 | Python | false | false | 528 | py | def index_power(array, n):
# Find Nth power of the element with index N.
if n > len(array) - 1:
return -1
else:
return array[n] ** n
if __name__ == '__main__': # pragma: no cover
# These "asserts" using only for self-checking and not necessary for
# auto-testing
assert index_power([1, 2, 3, 4], 2) == 9, "Square"
assert index_power([1, 3, 10, 100], 3) == 1_000_000, "Cube"
assert index_power([0, 1], 0) == 1, "Zero power"
assert index_power([1, 2], 3) == -1, "IndexError"
| [
"ken.mercus.lai@gmail.com"
] | ken.mercus.lai@gmail.com |
93f43ffd58185a4000535c395d163b5c91f6ebdd | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/223/users/1330/codes/1595_1446.py | ae1ff47705f5df6b8f4503beaa1b154e0044a419 | [] | no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 229 | py | # Teste seu codigo aos poucos.
# Nao teste tudo no final, pois fica mais dificil de identificar erros.
# Nao se intimide com as mensagens de erro. Elas ajudam a corrigir seu codigo.
q = float(input())
f = q/3
print(round(f, 3)) | [
"jvlo@icomp.ufam.edu.br"
] | jvlo@icomp.ufam.edu.br |
a7062d038bf0fc26a95efe6d963427056ee70d87 | cd18e8900018afb643c44286a8d4db5d8d1c87c1 | /likelihood.py | 6bfaac5b749d06bd419e94daf5f6ed584a395940 | [] | no_license | pbizimis/score_sde_pytorch | 064a0bf9b2f116b2f0a2e9c7e129a8c38f7aa0b6 | cb0e019fc7f1262724877730d64f75eb16aab1b0 | refs/heads/main | 2023-04-18T21:15:31.753968 | 2021-03-21T22:48:36 | 2021-03-21T22:48:36 | 349,578,514 | 1 | 0 | null | 2021-03-19T23:18:21 | 2021-03-19T23:18:20 | null | UTF-8 | Python | false | false | 4,713 | py | # coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: skip-file
# pytype: skip-file
"""Various sampling methods."""
import torch
import numpy as np
from scipy import integrate
from models import utils as mutils
def get_div_fn(fn):
"""Create the divergence function of `fn` using the Hutchinson-Skilling trace estimator."""
def div_fn(x, t, eps):
with torch.enable_grad():
x.requires_grad_(True)
fn_eps = torch.sum(fn(x, t) * eps)
grad_fn_eps = torch.autograd.grad(fn_eps, x)[0]
x.requires_grad_(False)
return torch.sum(grad_fn_eps * eps, dim=tuple(range(1, len(x.shape))))
return div_fn
def get_likelihood_fn(sde, inverse_scaler, hutchinson_type='Rademacher',
rtol=1e-5, atol=1e-5, method='RK45', eps=1e-5):
"""Create a function to compute the unbiased log-likelihood estimate of a given data point.
Args:
sde: A `sde_lib.SDE` object that represents the forward SDE.
inverse_scaler: The inverse data normalizer.
hutchinson_type: "Rademacher" or "Gaussian". The type of noise for Hutchinson-Skilling trace estimator.
rtol: A `float` number. The relative tolerance level of the black-box ODE solver.
atol: A `float` number. The absolute tolerance level of the black-box ODE solver.
method: A `str`. The algorithm for the black-box ODE solver.
See documentation for `scipy.integrate.solve_ivp`.
eps: A `float` number. The probability flow ODE is integrated to `eps` for numerical stability.
Returns:
A function that a batch of data points and returns the log-likelihoods in bits/dim,
the latent code, and the number of function evaluations cost by computation.
"""
def drift_fn(model, x, t):
"""The drift function of the reverse-time SDE."""
score_fn = mutils.get_score_fn(sde, model, train=False, continuous=True)
# Probability flow ODE is a special case of Reverse SDE
rsde = sde.reverse(score_fn, probability_flow=True)
return rsde.sde(x, t)[0]
def div_fn(model, x, t, noise):
return get_div_fn(lambda xx, tt: drift_fn(model, xx, tt))(x, t, noise)
def likelihood_fn(model, data):
"""Compute an unbiased estimate to the log-likelihood in bits/dim.
Args:
model: A score model.
data: A PyTorch tensor.
Returns:
bpd: A PyTorch tensor of shape [batch size]. The log-likelihoods on `data` in bits/dim.
z: A PyTorch tensor of the same shape as `data`. The latent representation of `data` under the
probability flow ODE.
nfe: An integer. The number of function evaluations used for running the black-box ODE solver.
"""
with torch.no_grad():
shape = data.shape
if hutchinson_type == 'Gaussian':
epsilon = torch.randn_like(data)
elif hutchinson_type == 'Rademacher':
epsilon = torch.randint_like(data, low=0, high=2).float() * 2 - 1.
else:
raise NotImplementedError(f"Hutchinson type {hutchinson_type} unknown.")
def ode_func(t, x):
sample = mutils.from_flattened_numpy(x[:-shape[0]], shape).to(data.device).type(torch.float32)
vec_t = torch.ones(sample.shape[0], device=sample.device) * t
drift = mutils.to_flattened_numpy(drift_fn(model, sample, vec_t))
logp_grad = mutils.to_flattened_numpy(div_fn(model, sample, vec_t, epsilon))
return np.concatenate([drift, logp_grad], axis=0)
init = np.concatenate([mutils.to_flattened_numpy(data), np.zeros((shape[0],))], axis=0)
solution = integrate.solve_ivp(ode_func, (eps, sde.T), init, rtol=rtol, atol=atol, method=method)
nfe = solution.nfev
zp = solution.y[:, -1]
z = mutils.from_flattened_numpy(zp[:-shape[0]], shape).to(data.device).type(torch.float32)
delta_logp = mutils.from_flattened_numpy(zp[-shape[0]:], (shape[0],)).to(data.device).type(torch.float32)
prior_logp = sde.prior_logp(z)
bpd = -(prior_logp + delta_logp) / np.log(2)
N = np.prod(shape[1:])
bpd = bpd / N
# A hack to convert log-likelihoods to bits/dim
offset = 7. - inverse_scaler(-1.)
bpd = bpd + offset
return bpd, z, nfe
return likelihood_fn
| [
"yang-song@live.cn"
] | yang-song@live.cn |
e5ccbd6b3b4118250785a12ea65dc1d183f177cb | 0b6a2c91d458e2e58aa5d732624f9189863e3567 | /tests/test_array.py | c6f2bc565035bbd147efab113ed5afa57f331c40 | [
"MIT"
] | permissive | SethMMorton/fastnumbers | ef45c5d060fc293397fee682e24e3b026f6355a4 | 50dee124f232422a66bcc603b43216bf7a94e587 | refs/heads/main | 2023-08-08T20:21:20.805580 | 2023-04-19T04:55:58 | 2023-04-19T04:56:42 | 22,624,904 | 103 | 16 | MIT | 2023-07-25T21:10:29 | 2014-08-05T00:44:53 | C++ | UTF-8 | Python | false | false | 34,358 | py | from __future__ import annotations
import array
import ctypes
from typing import Any, Callable, Dict, Iterator, List, NoReturn, Tuple, Union
import numpy as np
import pytest
from hypothesis import given as hyp_given
from hypothesis.strategies import (
binary,
floats,
integers,
lists,
text,
)
from typing_extensions import TypedDict
import fastnumbers
from conftest import base_n
# Map supported data types to the Python array internal format designator
formats = {
"signed char": "b",
"unsigned char": "B",
"signed short": "h",
"unsigned short": "H",
"signed int": "i",
"unsigned int": "I",
"signed long": "l",
"unsigned long": "L",
"signed long long": "q",
"unsigned long long": "Q",
"float": "f",
"double": "d",
}
# Extreme values for signed integers of various byte widths
signed_size_extreme = {
1: (-128, 127),
2: (-32768, 32767),
4: (-2147483648, 2147483647),
8: (-9223372036854775808, 9223372036854775807),
}
# Extreme values for unsigned integers of various byte widths
unsigned_size_extreme = {
1: (0, 255),
2: (0, 65535),
4: (0, 4294967295),
8: (0, 18446744073709551615),
}
# Map all supported data types to the extreme values they support
extremes = {
"signed char": signed_size_extreme[ctypes.sizeof(ctypes.c_byte)],
"unsigned char": unsigned_size_extreme[ctypes.sizeof(ctypes.c_ubyte)],
"signed short": signed_size_extreme[ctypes.sizeof(ctypes.c_short)],
"unsigned short": unsigned_size_extreme[ctypes.sizeof(ctypes.c_ushort)],
"signed int": signed_size_extreme[ctypes.sizeof(ctypes.c_int)],
"unsigned int": unsigned_size_extreme[ctypes.sizeof(ctypes.c_uint)],
"signed long": signed_size_extreme[ctypes.sizeof(ctypes.c_long)],
"unsigned long": unsigned_size_extreme[ctypes.sizeof(ctypes.c_ulong)],
"signed long long": signed_size_extreme[ctypes.sizeof(ctypes.c_longlong)],
"unsigned long long": unsigned_size_extreme[ctypes.sizeof(ctypes.c_ulonglong)],
}
float_extremes = {
"float": (1.17549e-38, 3.40282e38),
"double": (2.22507e-308, 1.79769e308),
}
# Create shortcuts to collections of data types to test
signed_data_types = [
"signed char",
"signed short",
"signed int",
"signed long",
"signed long long",
]
unsigned_data_types = [
"unsigned char",
"unsigned short",
"unsigned int",
"unsigned long",
"unsigned long long",
]
int_data_types = signed_data_types + unsigned_data_types
float_data_types = ["float", "double"]
data_types = int_data_types + float_data_types
def test_invalid_argument_raises_type_error() -> None:
given = [0, 1]
with pytest.raises(TypeError, match="got an unexpected keyword argument 'invalid'"):
fastnumbers.try_array(given, invalid="dummy") # type: ignore
@pytest.mark.parametrize(
"selector",
[
fastnumbers.RAISE,
fastnumbers.INPUT,
fastnumbers.DISALLOWED,
fastnumbers.NUMBER_ONLY,
fastnumbers.STRING_ONLY,
],
)
def test_selectors_are_rejected_when_invalid_for_inf_and_nan(selector: object) -> None:
with pytest.raises(ValueError, match="values for 'inf' and 'nan'"):
fastnumbers.try_array(["5"], inf=selector)
with pytest.raises(ValueError, match="values for 'inf' and 'nan'"):
fastnumbers.try_array(["5"], nan=selector)
@pytest.mark.parametrize(
"selector",
[
fastnumbers.ALLOWED,
fastnumbers.INPUT,
fastnumbers.DISALLOWED,
fastnumbers.NUMBER_ONLY,
fastnumbers.STRING_ONLY,
],
)
def test_selectors_are_rejected_when_invalid_for_on_fail_and_friends(
selector: object,
) -> None:
msg = "values for 'on_fail', 'on_overflow', and 'on_type_error'"
with pytest.raises(ValueError, match=msg):
fastnumbers.try_array(["5"], on_fail=selector)
with pytest.raises(ValueError, match=msg):
fastnumbers.try_array(["5"], on_overflow=selector)
with pytest.raises(ValueError, match=msg):
fastnumbers.try_array(["5"], on_type_error=selector)
def test_invalid_input_type_gives_type_error() -> None:
"""Giving an invalid output type is rejected"""
given = [0, 1]
expected = "Only numpy ndarray and array.array types for output are "
expected += r"supported, not <class 'list'>"
with pytest.raises(TypeError, match=expected):
fastnumbers.try_array(given, []) # type: ignore
def test_require_output_if_numpy_is_not_installed() -> None:
"""A missing output value requires numpy to construct the output"""
given = [0, 1]
orig = fastnumbers.has_numpy
try:
fastnumbers.has_numpy = False
with pytest.raises(RuntimeError, match="To use fastnumbers"):
fastnumbers.try_array(given)
finally:
fastnumbers.has_numpy = orig
@pytest.mark.parametrize(
"output",
[np.array([[0, 0], [0, 0]]), np.array(0)],
)
def test_require_identically_one_ndarray_dimension(
output: np.ndarray[Any, np.dtype[np.int_]]
) -> None:
with pytest.raises(ValueError, match="Can only accept arrays of dimension 1"):
fastnumbers.try_array([0, 9], output)
def test_require_input_and_output_to_have_equal_size() -> None:
output = array.array("d", [0, 0, 0])
with pytest.raises(ValueError, match="input/output must be of equal size"):
fastnumbers.try_array([0, 9], output)
# Not all dtypes exist on all platforms. Add only the ones to test
# here that exist.
other_dtypes = [
getattr(np, x)
for x in ("float128", "complex128", "half", "bool_", "bytes_", "str_")
if hasattr(np, x)
]
@pytest.mark.parametrize("dtype", other_dtypes)
def test_invalid_numpy_dtypes_raises_correct_type_error(
dtype: np.dtype[Any],
) -> None:
"""Numpy arrays as output with invalid dtypes give an error"""
given = [0, 1]
output = np.array([0, 0], dtype=dtype)
expected = "The only supported numpy dtypes for output are: "
with pytest.raises(TypeError, match=expected):
fastnumbers.try_array(given, output)
class TestCPPProtections:
"""
These tests check internal C++ error handling that should not
be possible to encounter from the python interface
"""
def test_non_memorybuffer_type_raises_correct_type_error(self) -> None:
"""Ensure we only accept well-behaved memory views as input"""
with pytest.raises(TypeError, match="not 'list'"):
fastnumbers._array([0, 1], [0, 0]) # type: ignore
@pytest.mark.parametrize("dtype", other_dtypes)
def test_invalid_memorybuffer_type_raises_correct_type_error(
self, dtype: np.dtype[Any]
) -> None:
"""Ensure we only accept well-behaved memory views as input"""
given = [0, 1]
output = np.array([0, 0], dtype=dtype)
exception = r"Unknown buffer format '\S+' for object"
with pytest.raises(TypeError, match=exception):
fastnumbers._array(given, output) # type: ignore
kwargs = ["inf", "nan", "on_fail", "on_overflow", "on_type_error"]
KwargsType = TypedDict(
"KwargsType",
{"inf": int, "nan": int, "on_fail": int, "on_overflow": int, "on_type_error": int},
total=False,
)
class TestReplacements:
"""Test that the replacement mechanism works and raises intelligent exceptions"""
@pytest.mark.parametrize("data_type", data_types)
@pytest.mark.parametrize("kwarg", kwargs)
def test_string_replacement_type_gives_type_error(
self, data_type: str, kwarg: str
) -> None:
given = [0, 1]
result = array.array(formats[data_type], [0, 0])
expected = f"The default value of 'not ok' given to option '{kwarg}' "
expected += "has type 'str' which cannot be converted to a numeric value"
with pytest.raises(TypeError, match=expected):
fastnumbers.try_array(given, result, **{kwarg: "not ok"}) # type: ignore
@pytest.mark.parametrize("data_type", int_data_types)
@pytest.mark.parametrize("kwarg", kwargs)
def test_float_replacement_type_for_int_gives_value_error(
self, data_type: str, kwarg: str
) -> None:
given = [0, 1]
result = array.array(formats[data_type], [0, 0])
expected = rf"The default value of 1\.3 given to option '{kwarg}' "
expected += f"cannot be converted to C type '{data_type}'"
with pytest.raises(ValueError, match=expected):
fastnumbers.try_array(given, result, **{kwarg: 1.3}) # type: ignore
@pytest.mark.parametrize("data_type", float_data_types)
@pytest.mark.parametrize(
"kwargs",
[
KwargsType(inf=0),
KwargsType(nan=0),
KwargsType(on_fail=0),
KwargsType(on_overflow=0),
KwargsType(on_type_error=0),
],
)
def test_int_replacement_type_for_float_is_ok(
self, data_type: str, kwargs: KwargsType
) -> None:
given = [0, 1]
result = array.array(formats[data_type], [0, 0])
expected = array.array(formats[data_type], [0.0, 1.0])
fastnumbers.try_array(given, result, **kwargs)
assert result == expected
@pytest.mark.parametrize("data_type", int_data_types)
@pytest.mark.parametrize("kwarg", kwargs)
def test_extreme_replacement_type_for_int_gives_overflow_error(
self, data_type: str, kwarg: str
) -> None:
given = [0, 1]
result = array.array(formats[data_type], [0, 0])
expected = rf"The default value of \S+ given to option '{kwarg}' "
expected += f"cannot be converted to C type '{data_type}' without overflowing"
kwargs: KwargsType = {kwarg: extremes[data_type][0] - 1} # type: ignore[misc]
with pytest.raises(OverflowError, match=expected):
fastnumbers.try_array(given, result, **kwargs)
kwargs = {kwarg: extremes[data_type][1] + 1} # type: ignore[misc]
with pytest.raises(OverflowError, match=expected):
fastnumbers.try_array(given, result, **kwargs)
@pytest.mark.parametrize("data_type", data_types)
def test_replacement_callables_with_invalid_type_gives_type_error(
self, data_type: str
) -> None:
given = ["invalid"]
result = array.array(formats[data_type], [0])
expected = "Callable passed to 'on_fail' with input 'invalid' returned "
expected += "the value '3' that has type 'str' which cannot be converted "
expected += "to a numeric value"
with pytest.raises(TypeError, match=expected):
fastnumbers.try_array(given, result, on_fail=lambda _: "3")
@pytest.mark.parametrize("data_type", int_data_types)
def test_replacement_callables_with_bad_number_gives_value_error(
self, data_type: str
) -> None:
given = ["invalid"]
result = array.array(formats[data_type], [0])
expected = "Callable passed to 'on_fail' with input 'invalid' returned "
expected += f"the value 3.4 that cannot be converted to C type '{data_type}'"
with pytest.raises(ValueError, match=expected):
fastnumbers.try_array(given, result, on_fail=lambda _: 3.4)
@pytest.mark.parametrize("data_type", int_data_types)
def test_replacement_callables_with_extreme_gives_overflow_error(
self, data_type: str
) -> None:
given = ["invalid"]
result = array.array(formats[data_type], [0])
expected = "Callable passed to 'on_fail' with input 'invalid' returned "
expected += r"the value \S+ that cannot be converted to C type "
expected += f"'{data_type}' without overflowing"
with pytest.raises(OverflowError, match=expected):
fastnumbers.try_array(
given, result, on_fail=lambda _: extremes[data_type][1] + 1
)
@pytest.mark.parametrize("data_type", int_data_types)
def test_replacement_callables_some_python_error(self, data_type: str) -> None:
given = ["invalid"]
result = array.array(formats[data_type], [0])
expected = "bad operand type for abs()"
with pytest.raises(TypeError, match=expected):
fastnumbers.try_array(given, result, on_fail=lambda x: abs(x))
@pytest.mark.parametrize("data_type", int_data_types)
def test_replacements_trigger_new_values_for_int(self, data_type: str) -> None:
given = [
"invalid",
[2],
extremes[data_type][0] - 1,
extremes[data_type][1] + 1,
"7",
]
result = array.array(formats[data_type], [0, 0, 0, 0, 0])
expected = array.array(formats[data_type], [2, 3, 1, 1, 7])
fastnumbers.try_array(given, result, on_fail=2, on_overflow=1, on_type_error=3)
assert result == expected
@pytest.mark.parametrize("data_type", float_data_types)
def test_replacements_trigger_new_values_for_float(self, data_type: str) -> None:
given = ["invalid", [2], "inf", "nan", float("inf"), float("nan"), "7"]
result = array.array(formats[data_type], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
expected = array.array(formats[data_type], [2.0, 3.0, 5.0, 4.0, 5.0, 4.0, 7.0])
fastnumbers.try_array(given, result, inf=5, nan=4, on_fail=2, on_type_error=3)
assert result == expected
@pytest.mark.parametrize("data_type", int_data_types)
def test_replacement_callables_trigger_new_values_for_int(
self, data_type: str
) -> None:
given = [
"invalid",
[2],
extremes[data_type][0] - 1,
extremes[data_type][1] + 1,
"7",
]
result = array.array(formats[data_type], [0, 0, 0, 0, 0])
expected = array.array(formats[data_type], [2, 3, 1, 1, 7])
fastnumbers.try_array(
given,
result,
on_fail=lambda _: 2,
on_overflow=lambda _: 1,
on_type_error=lambda _: 3,
)
assert result == expected
@pytest.mark.parametrize("data_type", float_data_types)
def test_replacement_callables_trigger_new_values_for_float(
self, data_type: str
) -> None:
given = ["invalid", [2], "inf", "nan", float("inf"), float("nan"), "7"]
result = array.array(formats[data_type], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
expected = array.array(formats[data_type], [2.0, 3.0, 5.0, 4.0, 5.0, 4.0, 7.0])
fastnumbers.try_array(
given,
result,
inf=lambda _: 5,
nan=lambda _: 4,
on_fail=lambda _: 2,
on_type_error=lambda _: 3,
)
assert result == expected
class DumbFloatClass(object):
def __float__(self) -> NoReturn:
raise ValueError("something here might go wrong")
class DumbIntClass(object):
def __int__(self) -> NoReturn:
raise ValueError("something here might go wrong")
class TestErrors:
"""Test that intelligent exceptions are raised on error"""
@pytest.mark.parametrize("dumb", [DumbFloatClass(), DumbIntClass()])
@pytest.mark.parametrize("data_type", data_types)
@pytest.mark.parametrize("style", [list, iter])
def test_given_junk_float_type_raises_error(
self, data_type: str, dumb: Any, style: Callable[[Any], Any]
) -> None:
given = style([dumb])
result = array.array(formats[data_type], [0])
expected = "Cannot convert"
with pytest.raises(ValueError, match=expected):
fastnumbers.try_array(given, result)
@pytest.mark.parametrize("data_type", data_types)
def test_given_invalid_string_raises_value_error(self, data_type: str) -> None:
given = ["4", "78", "46", "invalid"]
result = array.array(formats[data_type], [0, 0, 0, 0])
expected = f"Cannot convert 'invalid' to C type '{data_type}'"
with pytest.raises(ValueError, match=expected):
fastnumbers.try_array(given, result)
@pytest.mark.parametrize("data_type", int_data_types)
def test_given_extreme_raises_overflow_error(self, data_type: str) -> None:
given = ["4", "78", "46", extremes[data_type][1] + 1]
result = array.array(formats[data_type], [0, 0, 0, 0])
expected = rf"Cannot convert \S+ to C type '{data_type}' without overflowing"
with pytest.raises(OverflowError, match=expected):
fastnumbers.try_array(given, result)
@pytest.mark.parametrize("data_type", data_types)
def test_given_incorrect_type_raises_type_error(self, data_type: str) -> None:
given = ["4", "78", "46", ["6"]]
result = array.array(formats[data_type], [0, 0, 0, 0])
expected = r"The value \['6'\] has type 'list' which cannot "
expected += "be converted to a numeric value"
with pytest.raises(TypeError, match=expected):
fastnumbers.try_array(given, result)
@pytest.mark.parametrize("data_type", data_types)
def test_given_broken_generator_fails(self, data_type: str) -> None:
"""A generator's exception should be returned"""
def broken() -> Iterator[str]:
"""Not a good generator"""
yield "5"
yield "6"
raise ValueError("Fëanor")
output = array.array(formats[data_type], [0, 0, 0, 0])
with pytest.raises(ValueError, match="Fëanor"):
fastnumbers.try_array(broken(), output)
@pytest.mark.parametrize("data_type", data_types)
def test_given_non_iterable_raises_type_error(self, data_type: str) -> None:
output = array.array(formats[data_type], [0, 0, 0, 0])
with pytest.raises(TypeError, match="'int' object is not iterable"):
fastnumbers.try_array(5, output) # type: ignore
@pytest.mark.parametrize("data_type", data_types)
@pytest.mark.parametrize("style", [list, iter])
def test_given_invalid_types_behave_as_expected(
self, data_type: str, style: Callable[[Any], Any]
) -> None:
msg = r"The value \('Fëanor',\) has type 'tuple' which cannot be "
msg += "converted to a numeric value"
with pytest.raises(TypeError, match=msg):
fastnumbers.try_array(style([("Fëanor",)]))
expected = array.array(formats[data_type], [5])
result = array.array(formats[data_type], [0])
fastnumbers.try_array(style([("Fëanor",)]), result, on_type_error=5)
assert result == expected
class TestSuccess:
"""Test that the function does what it says on the tin"""
@pytest.mark.parametrize("data_type", data_types)
@pytest.mark.parametrize("style", [list, tuple, iter])
def test_given_valid_values_returns_correct_results(
self, data_type: str, style: Callable[[Any], Any]
) -> None:
given = style(["4", "78", 46, "⑦"])
result = array.array(formats[data_type], [0, 0, 0, 0])
expected = array.array(formats[data_type], [4, 78, 46, 7])
fastnumbers.try_array(given, result)
assert result == expected
@pytest.mark.parametrize("data_type", data_types)
def test_given_range_returns_correct_results(self, data_type: str) -> None:
given = range(4)
result = array.array(formats[data_type], [0, 0, 0, 0])
expected = array.array(formats[data_type], [0, 1, 2, 3])
fastnumbers.try_array(given, result)
assert result == expected
@pytest.mark.parametrize("data_type", int_data_types)
def test_integer_extremes(self, data_type: str) -> None:
given = [
extremes[data_type][0],
str(extremes[data_type][0]),
extremes[data_type][0] - 1,
str(extremes[data_type][0] - 1),
extremes[data_type][1],
str(extremes[data_type][1]),
extremes[data_type][1] + 1,
str(extremes[data_type][1] + 1),
]
result = array.array(formats[data_type], [0, 0, 0, 0, 0, 0, 0, 0])
expected = array.array(
formats[data_type],
[
extremes[data_type][0],
extremes[data_type][0],
123,
123,
extremes[data_type][1],
extremes[data_type][1],
123,
123,
],
)
fastnumbers.try_array(given, result, on_overflow=123)
assert result == expected
@pytest.mark.parametrize("data_type", float_data_types)
def test_float_extremes(self, data_type: str) -> None:
given = [
float_extremes[data_type][0],
str(float_extremes[data_type][0]),
float_extremes[data_type][0] * 1e-100, # so small, becomes 0.0
str(float_extremes[data_type][0] * 1e-100),
float_extremes[data_type][1],
str(float_extremes[data_type][1]),
float_extremes[data_type][1] * 1e10, # so big, becomes infinity
str(float_extremes[data_type][1] * 1e10),
]
result = array.array(formats[data_type], [0, 0, 0, 0, 0, 0, 0, 0])
expected = array.array(
formats[data_type],
[
float_extremes[data_type][0],
float_extremes[data_type][0],
0.0,
0.0,
float_extremes[data_type][1],
float_extremes[data_type][1],
float("inf"),
float("inf"),
],
)
fastnumbers.try_array(given, result)
assert result == expected
@pytest.mark.parametrize("base", range(2, 37))
@pytest.mark.parametrize("data_type", int_data_types)
def test_integer_bases(self, base: int, data_type: str) -> None:
given = map(
lambda x: base_n(x, base),
[
extremes[data_type][0],
extremes[data_type][0] - 1,
extremes[data_type][1],
extremes[data_type][1] + 1,
0,
100,
],
)
result = array.array(formats[data_type], [0, 0, 0, 0, 0, 0])
expected = array.array(
formats[data_type],
[
extremes[data_type][0],
123,
extremes[data_type][1],
123,
0,
100,
],
)
fastnumbers.try_array(given, result, base=base, on_overflow=123)
assert result == expected
@pytest.mark.parametrize("data_type", data_types)
def test_underscores(self, data_type: str) -> None:
given = ["1_0", "11_0"]
result = array.array(formats[data_type], [0, 0])
expected = array.array(formats[data_type], [10, 110])
fastnumbers.try_array(given, result, allow_underscores=True)
assert result == expected
@pytest.mark.parametrize("data_type", int_data_types)
def test_base_prefix(self, data_type: str) -> None:
given = [
bin(extremes[data_type][0]),
oct(extremes[data_type][0]),
hex(extremes[data_type][0]),
]
result = array.array(formats[data_type], [0, 0, 0])
expected = array.array(formats[data_type], [extremes[data_type][0]] * 3)
fastnumbers.try_array(given, result, base=0)
assert result == expected
@pytest.mark.parametrize("data_type", int_data_types)
def test_base_prefix_and_underscores(self, data_type: str) -> None:
given = ["0b10_01", "0x3_a", "0o5_7", "0b_0", "0x_f", "0o_5"]
result = array.array(formats[data_type], [0, 0, 0, 0, 0, 0])
expected = array.array(formats[data_type], [9, 58, 47, 0, 15, 5])
fastnumbers.try_array(given, result, base=0, allow_underscores=True)
assert result == expected
# Create shortcuts to collections of dtypes to test
signed_dtypes: List[Any] = [
np.int8,
np.int16,
np.int32,
np.int64,
]
unsigned_dtypes: List[Any] = [
np.uint8,
np.uint16,
np.uint32,
np.uint64,
]
int_dtypes = signed_dtypes + unsigned_dtypes
float_dtypes: List[Any] = [np.float32, np.float64]
dtypes = int_dtypes + float_dtypes
dtype_extremes: Dict[Any, Tuple[int, int]] = {
np.int8: signed_size_extreme[1],
np.uint8: unsigned_size_extreme[1],
np.int16: signed_size_extreme[2],
np.uint16: unsigned_size_extreme[2],
np.int32: signed_size_extreme[4],
np.uint32: unsigned_size_extreme[4],
np.int64: signed_size_extreme[8],
np.uint64: unsigned_size_extreme[8],
}
dtype_float_extremes: Dict[Any, Tuple[float, float]] = {
np.float32: (1.17549e-38, 3.40282e38),
np.float64: (2.22507e-308, 1.79769e308),
}
class TestNumpy:
"""Ensure that try_array well supports numpy arrays"""
def test_default_dtype_is_float64(self) -> None:
given = [4, 4.5, "5", "5.6", "nan", "inf"]
expected = np.array([4, 4.5, 5, 5.6, np.nan, np.inf], dtype=np.float64)
result = fastnumbers.try_array(given)
assert result.dtype == np.float64
assert np.array_equal(result, expected, equal_nan=True)
def test_accepts_iterable_as_input(self) -> None:
given = iter([4, 4.5, "5", "5.6", "nan", "inf"])
expected = np.array([4, 4.5, 5, 5.6, np.nan, np.inf], dtype=np.float64)
result = fastnumbers.try_array(given)
assert np.array_equal(result, expected, equal_nan=True)
@pytest.mark.parametrize("dtype", dtypes)
def test_supported_dtypes(
self, dtype: Union[np.dtype[np.int_], np.dtype[np.float_]]
) -> None:
given = [4, "5", "⑦"]
expected = np.array([4, 5, 7], dtype=dtype)
result = fastnumbers.try_array(given, dtype=dtype)
assert result.dtype == dtype
assert np.array_equal(result, expected)
@pytest.mark.parametrize("dtype", int_dtypes)
def test_integer_extremes(self, dtype: np.dtype[np.int_]) -> None:
given = [
dtype_extremes[dtype][0],
str(dtype_extremes[dtype][0]),
dtype_extremes[dtype][0] - 1,
str(dtype_extremes[dtype][0] - 1),
dtype_extremes[dtype][1],
str(dtype_extremes[dtype][1]),
dtype_extremes[dtype][1] + 1,
str(dtype_extremes[dtype][1] + 1),
]
expected = np.array(
[
dtype_extremes[dtype][0],
dtype_extremes[dtype][0],
123,
123,
dtype_extremes[dtype][1],
dtype_extremes[dtype][1],
123,
123,
],
dtype=dtype,
)
result = fastnumbers.try_array(given, dtype=dtype, on_overflow=123)
assert np.array_equal(result, expected)
@pytest.mark.parametrize("dtype", float_dtypes)
def test_float_extremes(self, dtype: np.dtype[np.float_]) -> None:
given = [
dtype_float_extremes[dtype][0],
str(dtype_float_extremes[dtype][0]),
dtype_float_extremes[dtype][0] * 1e-100, # so small, becomes 0.0
str(dtype_float_extremes[dtype][0] * 1e-100),
dtype_float_extremes[dtype][1],
str(dtype_float_extremes[dtype][1]),
dtype_float_extremes[dtype][1] * 1e10, # so big, becomes infinity
str(dtype_float_extremes[dtype][1] * 1e10),
]
expected = np.array(
[
dtype_float_extremes[dtype][0],
dtype_float_extremes[dtype][0],
0.0,
0.0,
dtype_float_extremes[dtype][1],
dtype_float_extremes[dtype][1],
float("inf"),
float("inf"),
],
dtype=dtype,
)
result = fastnumbers.try_array(given, dtype=dtype)
assert np.array_equal(result, expected)
@pytest.mark.parametrize("dtype", dtypes)
def test_accepts_output_array(
self, dtype: Union[np.dtype[np.int_], np.dtype[np.float_]]
) -> None:
given = [4, "5", "⑦"]
result = np.array([0, 0, 0], dtype=dtype)
expected = np.array([4, 5, 7], dtype=dtype)
fastnumbers.try_array(given, result)
assert np.array_equal(result, expected)
def test_slice(self) -> None:
given = [4, "5", "⑦"]
result = np.array([0, 0, 0, 0, 0])
expected = np.array([0, 4, 5, 7, 0])
fastnumbers.try_array(given, result[1:4])
assert np.array_equal(result, expected)
def test_strides(self) -> None:
given = [4, "5", "⑦"]
result = np.array([0, 0, 0, 0, 0, 0])
expected = np.array([4, 0, 5, 0, 7, 0])
fastnumbers.try_array(given, result[::2])
assert np.array_equal(result, expected)
def test_strides_offset(self) -> None:
given = [4, "5", "⑦"]
result = np.array([0, 0, 0, 0, 0, 0])
expected = np.array([0, 4, 0, 5, 0, 7])
fastnumbers.try_array(given, result[1::2])
assert np.array_equal(result, expected)
def test_negative_strides(self) -> None:
given = [4, "5", "⑦"]
result = np.array([0, 0, 0, 0, 0, 0])
expected = np.array([0, 7, 0, 5, 0, 4])
fastnumbers.try_array(given, result[::-2])
assert np.array_equal(result, expected)
def test_slice_2d(self) -> None:
given = [4, "5", "⑦"]
result = np.array([[0, 0, 0], [0, 0, 0], [0, 0, 0]])
expected = np.array([[0, 0, 0], [4, 5, 7], [0, 0, 0]])
fastnumbers.try_array(given, result[1, :])
assert np.array_equal(result, expected)
def test_slice_2d_column(self) -> None:
given = [4, "5", "⑦"]
result = np.array([[0, 0, 0], [0, 0, 0], [0, 0, 0]])
expected = np.array([[0, 4, 0], [0, 5, 0], [0, 7, 0]])
fastnumbers.try_array(given, result[:, 1])
assert np.array_equal(result, expected)
def test_stride_2d(self) -> None:
given = [4, "5", "⑦"]
result = np.array([[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]])
expected = np.array(
[[0, 0, 0, 0, 0, 0], [4, 0, 5, 0, 7, 0], [0, 0, 0, 0, 0, 0]]
)
fastnumbers.try_array(given, result[1, ::2])
assert np.array_equal(result, expected)
def test_stride_2d_column(self) -> None:
given = [4, "5", "⑦"]
result = np.array(
[[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]]
)
expected = np.array(
[[0, 0, 0], [0, 4, 0], [0, 0, 0], [0, 5, 0], [0, 0, 0], [0, 7, 0]]
)
fastnumbers.try_array(given, result[1::2, 1])
assert np.array_equal(result, expected)
@hyp_given(
lists(
floats() | integers() | text() | binary() | lists(integers(), max_size=1),
max_size=50,
)
)
@pytest.mark.parametrize("dtype", int_dtypes)
def test_all_the_things_for_ints(dtype: np.dtype[np.int_], x: List[Any]) -> None:
# Using try_array should give the same results
# as try_int with map=list then converted to an array.
# Under-the-hood, the on_fail, etc. replacements use a different code path
# so this test is not just wasting time.
expected_pre = fastnumbers.try_int(
x,
on_fail=lambda x: 5 if isinstance(x, str) else 6,
on_type_error=7,
map=list,
)
expected_pre = [9 if x < dtype_extremes[dtype][0] else x for x in expected_pre]
expected_pre = [9 if x > dtype_extremes[dtype][1] else x for x in expected_pre]
expected = np.array(expected_pre, dtype=dtype)
result = fastnumbers.try_array(
x,
dtype=dtype,
on_fail=lambda x: 5 if isinstance(x, str) else 6,
on_type_error=7,
on_overflow=9,
)
np.array_equal(result, expected)
@hyp_given(
lists(
floats() | integers() | text() | binary() | lists(integers(), max_size=1),
max_size=50,
)
)
@pytest.mark.parametrize("dtype", float_dtypes)
@pytest.mark.filterwarnings("ignore:overflow encountered in cast")
def test_all_the_things_for_floats(dtype: np.dtype[np.float_], x: List[Any]) -> None:
# Using try_array should give the same results
# as try_float with map=list then converted to an array.
# Under-the-hood, the on_fail, etc. replacements use a different code path
# so this test is not just wasting time.
expected_pre = fastnumbers.try_float(
x,
on_fail=lambda x: 5.0 if isinstance(x, str) else 6.0,
on_type_error=7.0,
map=list,
)
expected = np.array(expected_pre, dtype=dtype)
result = fastnumbers.try_array(
x,
dtype=dtype,
on_fail=lambda x: 5.0 if isinstance(x, str) else 6.0,
on_type_error=7.0,
)
np.array_equal(result, expected, equal_nan=True)
@hyp_given(
lists(
floats() | integers() | text() | binary() | lists(integers(), max_size=1),
max_size=50,
)
)
@pytest.mark.parametrize("dtype", float_dtypes)
@pytest.mark.filterwarnings("ignore:overflow encountered in cast")
def test_all_the_things_for_floats_with_nan_inf_replacement(
dtype: np.dtype[np.float_], x: List[Any]
) -> None:
# Using try_array should give the same results
# as try_float with map=list then converted to an array.
# Under-the-hood, the on_fail, etc. replacements use a different code path
# so this test is not just wasting time.
expected_pre = fastnumbers.try_float(
x,
inf=1.0,
nan=3.0,
on_fail=lambda x: 5.0 if isinstance(x, str) else 6.0,
on_type_error=7.0,
map=list,
)
expected = np.array(expected_pre, dtype=dtype)
result = fastnumbers.try_array(
x,
dtype=dtype,
inf=1.0,
nan=3.0,
on_fail=lambda x: 5.0 if isinstance(x, str) else 6.0,
on_type_error=7.0,
)
np.array_equal(result, expected)
def test_sanity() -> None:
"""Make sure explictly passing dtypes makes mypy happy"""
fastnumbers.try_array([0, 1], dtype=np.int8)
fastnumbers.try_array([0, 1], dtype=np.int16)
fastnumbers.try_array([0, 1], dtype=np.int32)
fastnumbers.try_array([0, 1], dtype=np.int64)
fastnumbers.try_array([0, 1], dtype=np.uint8)
fastnumbers.try_array([0, 1], dtype=np.uint16)
fastnumbers.try_array([0, 1], dtype=np.uint32)
fastnumbers.try_array([0, 1], dtype=np.uint64)
fastnumbers.try_array([0, 1], dtype=np.float32)
fastnumbers.try_array([0, 1], dtype=np.float64)
| [
"seth.m.morton@gmail.com"
] | seth.m.morton@gmail.com |
2e21c92d54845d183d2aaebd0eee1a77a39208fc | f6bba50fccc6fb0dae2f046193434cfb4b9d32d5 | /m-solutions2020/c/main.py | 0bc2f50f93f9895ce3d48877d517d39eb5471a3e | [] | no_license | seven320/AtCoder | 4c26723d20004fe46ce118b882faabc05066841c | 45e301e330e817f1ace4be4088d3babe18588170 | refs/heads/master | 2021-11-22T22:57:32.290504 | 2021-10-24T09:15:12 | 2021-10-24T09:15:12 | 162,827,473 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 745 | py | #!/usr/bin/env python3
# encoding:utf-8
import copy
import random
import bisect #bisect_left これで二部探索の大小検索が行える
import fractions #最小公倍数などはこっち
import math
import sys
import collections
from decimal import Decimal # 10進数で考慮できる
mod = 10**9+7
sys.setrecursionlimit(mod) # 再帰回数上限はでdefault1000
d = collections.deque()
def LI(): return list(map(int, sys.stdin.readline().split()))
# N, K = LI()
# A = LI()
# for i in range(N - K):
# if A[i] < A[i + K]:
# ans = "Yes"
# else:
# ans = "No"
# print(ans)
N, K = LI()
A = LI()
for i in range(N - K):
if A[i] < A[i + K]:
ans = "Yes"
else:
ans = "No"
print(ans) | [
"yosyuaomenw@yahoo.co.jp"
] | yosyuaomenw@yahoo.co.jp |
01b21899501be1f686675a277d7c04a97b49c2a6 | 9c019fe81794fdecd027c9f3b6d57fb95dc646e7 | /waters/dev/pep3d_writer2.py | 6c67eadbcf283419d104844264604eb0845ffb4d | [] | no_license | MatteoLacki/waters | e29c3def911d494df5240ead114239082181f149 | 767d7d45fbe62acf0af75bbefc9bdeb4f65889e6 | refs/heads/master | 2021-07-07T11:47:25.758922 | 2020-09-20T11:24:36 | 2020-09-20T11:24:36 | 192,311,088 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,271 | py | %load_ext autoreload
%autoreload 2
from pathlib import Path
import pandas as pd
pd.set_option('display.max_columns', 100)
pd.set_option('display.max_rows', 5)
import numpy as np
import numpy as np
from platform import system
from waters.parsers import XMLparser, iaDBsXMLparser, Pep3Dparser, Apex3Dparser, df2text, col2format
if system() == 'Linux':
data_f = Path('~/Projects/WatersData/O190303_78').expanduser()
data_f = Path('/home/matteo/Projects/WatersData/O200114_03').expanduser()
else:
data_f = Path(r"Y:\TESTRES2\5P\S170317_04__v1")
pep3d = next(data_f.glob('*_Pep3D_Spectrum.xml'))
P3D = Pep3Dparser(pep3d)
P3D.get_all_tag_counts()
le = P3D.LE
he = P3D.HE
le['ADCResponse'] = 10000
P3D.LE = le
he['ADCResponse'] = 10000
P3D.HE = he
P3D.write(pep3d.parent/(pep3d.stem + "_ADCResponse10000.xml"))
# compare outputs: with check sums:
from syncFiles.syncFiles import check_sum
from waters.parsers import iaDBsXMLparser
ia_workflows = list(data_f.glob('*_IA_Workflow*.xml'))
for iw in ia_workflows:
print(check_sum(iw))
# check sums do differ: what about the data?
orig = ia_workflows[0]
mod = ia_workflows[1]
parsed = [iaDBsXMLparser(i) for i in ia_workflows]
prots = [i.proteins() for i in parsed]
prods = [i.products() for i in parsed]
| [
"matteo.lacki@gmail.com"
] | matteo.lacki@gmail.com |
ce1cbee1c8760bed1f92f6c835a5075202aaff8c | 6f8de50a5b51e938aca5ffe9b33bc312d68b2de0 | /tests/test_sleeping.py | 0014f207b373c6b2f459a14c16295cd8501ed1e6 | [
"MIT"
] | permissive | pawelkopka/kopf | 0621150d9cdf286a2763c1482082d4868293c6c3 | 51a3a70e09a17cf3baec2946b64b125a90595cf4 | refs/heads/master | 2021-02-27T09:42:19.108740 | 2020-02-20T16:05:16 | 2020-02-20T16:05:16 | 245,597,653 | 0 | 0 | MIT | 2020-03-07T08:46:33 | 2020-03-07T08:46:33 | null | UTF-8 | Python | false | false | 1,222 | py | import asyncio
from kopf.engines.sleeping import sleep_or_wait
async def test_sleep_or_wait_by_delay_reached(timer):
event = asyncio.Event()
with timer:
unslept = await asyncio.wait_for(sleep_or_wait(0.10, event), timeout=1.0)
assert 0.10 <= timer.seconds < 0.11
assert unslept is None
async def test_sleep_or_wait_by_event_set(timer):
event = asyncio.Event()
asyncio.get_running_loop().call_later(0.07, event.set)
with timer:
unslept = await asyncio.wait_for(sleep_or_wait(0.10, event), timeout=1.0)
assert 0.06 <= timer.seconds <= 0.08
assert 0.02 <= unslept <= 0.04
async def test_sleep_or_wait_with_zero_time_and_event_cleared(timer):
event = asyncio.Event()
event.clear()
with timer:
unslept = await asyncio.wait_for(sleep_or_wait(0, event), timeout=1.0)
assert timer.seconds <= 0.01
assert unslept is None
async def test_sleep_or_wait_with_zero_time_and_event_preset(timer):
event = asyncio.Event()
event.set()
with timer:
unslept = await asyncio.wait_for(sleep_or_wait(0, event), timeout=1.0)
assert timer.seconds <= 0.01
assert not unslept # 0/None; undefined for such case: both goals reached.
| [
"sergey.vasilyev@zalando.de"
] | sergey.vasilyev@zalando.de |
a0277aef821c0c0df03be709cad993832130f137 | f4b8c90c1349c8740c1805f7b6b0e15eb5db7f41 | /test/test_incident_clery_geography_item.py | a2e42aee295d69416c816a915582b860257990dd | [] | no_license | CalPolyResDev/StarRezAPI | 012fb8351159f96a81352d6c7bfa36cd2d7df13c | b184e1863c37ff4fcf7a05509ad8ea8ba825b367 | refs/heads/master | 2021-01-25T10:29:37.966602 | 2018-03-15T01:01:35 | 2018-03-15T01:01:35 | 123,355,501 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,134 | py | # coding: utf-8
"""
StarRez API
This is a way to connect with the StarRez API. We are not the developers of the StarRez API, we are just an organization that uses it and wanted a better way to connect to it. # noqa: E501
OpenAPI spec version: 1.0.0
Contact: resdev@calpoly.edu
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import starrez_client
from starrez_client.models.incident_clery_geography_item import IncidentCleryGeographyItem # noqa: E501
from starrez_client.rest import ApiException
class TestIncidentCleryGeographyItem(unittest.TestCase):
"""IncidentCleryGeographyItem unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testIncidentCleryGeographyItem(self):
"""Test IncidentCleryGeographyItem"""
# FIXME: construct object with mandatory attributes with example values
# model = starrez_client.models.incident_clery_geography_item.IncidentCleryGeographyItem() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"fedorareis@gmail.com"
] | fedorareis@gmail.com |
bb558e12b38a611e6ef8ebb0b193f6cc9978081e | a74216bf9183c9124b900e2c49aed9c2df324fab | /conversion/hinter_merge_gt_poses.py | c37362ae75e4363c7bab06107b23815c702c6e5c | [
"MIT"
] | permissive | EricCousineau-TRI/sixd_toolkit | 1900bef231024ccc1b528202f01c40ac398b96ec | dd610c46ae75d09aa68d21e56505936de05d44bb | refs/heads/master | 2020-06-06T03:13:37.924124 | 2019-06-18T23:01:36 | 2019-06-18T23:01:36 | 192,622,561 | 0 | 0 | MIT | 2019-06-18T22:52:22 | 2019-06-18T22:52:22 | null | UTF-8 | Python | false | false | 1,255 | py | # Author: Tomas Hodan (hodantom@cmp.felk.cvut.cz)
# Center for Machine Perception, Czech Technical University in Prague
# Copies a selected part of a dataset.
import os
import sys
import yaml
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
gt_poses_1_path = '/local/datasets/tlod/hinterstoisser/backup/scene_02_separated_gts/scene_gt.yml'
gt_poses_2_path = '/local/datasets/tlod/hinterstoisser/backup/scene_02_separated_gts/scene_gt_brachmann.yml'
gt_poses_out_path = '/local/datasets/tlod/hinterstoisser/test/02/scene_gt.yml'
with open(gt_poses_1_path, 'r') as f:
gt_poses_1 = yaml.load(f, Loader=yaml.CLoader)
with open(gt_poses_2_path, 'r') as f:
gt_poses_2 = yaml.load(f, Loader=yaml.CLoader)
assert(sorted(gt_poses_1.keys()) == (sorted(gt_poses_2.keys())))
gt_poses_out = {}
for im_id in sorted(gt_poses_1.keys()):
gt_poses_out[im_id] = sorted(gt_poses_1[im_id] + gt_poses_2[im_id], key=lambda x: x['obj_id'])
def float_representer(dumper, value):
text = '{0:.8f}'.format(value)
return dumper.represent_scalar(u'tag:yaml.org,2002:float', text)
yaml.add_representer(float, float_representer)
# Store metadata
with open(gt_poses_out_path, 'w') as f:
yaml.dump(gt_poses_out, f, width=10000)
| [
"tom.hodan@gmail.com"
] | tom.hodan@gmail.com |
2ede47e112d7ee53b28b6b8ed26381669043ab46 | dddd89637373f455a476431f4fcb7e17b4e9dd57 | /py/mymodule.py | 7ee8fd4f09f5ecb889c7703003fa5ef98e954cfa | [] | no_license | DhirManish/Python | 35304eb47dea61934426fb6fc5094e1a83517cf3 | 10df7245d0964340d6c8d14cf26a9cf8f93ecf5d | refs/heads/master | 2020-06-05T07:09:41.856780 | 2015-03-07T12:53:10 | 2015-03-07T12:53:10 | 20,372,496 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 918 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# mymodule.py
#
# Copyright 2014 Ajay Bhatia <ajay@dumb-box>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
def sayhello():
print("Hi! This is mymodule")
__version__ = '0.1'
| [
"prof.ajaybhatia@gmail.com"
] | prof.ajaybhatia@gmail.com |
c6db344a0c7800fb1e1c26ef0f5e8dde7f3c93bd | b2e37446c5ef3602f1f4029d16104c397f9c46ae | /troubleshoot_step3.py | 1d1549c1de8469684789cc1017feb34d4dfee661 | [] | no_license | imapex-training/mod_apic_em | 6c7344edd8bf7e816bdba7f1ee7401b9e9e0d1c0 | 0d53a1e9d4eac74d7958b38144b07e60a85b1055 | refs/heads/master | 2021-07-16T08:09:41.467208 | 2017-10-23T16:32:51 | 2017-10-23T16:32:51 | 108,008,896 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,885 | py | #! /usr/bin/env python
"""
Learning Series: Network Programmability Basics
Module: Network Controllers
Lesson: Program your own DNA with APIC-EM APIs
Author: Hank Preston <hapresto@cisco.com>
troubleshoot_full.py
Illustrate the following concepts:
- Automating common information gathering used in troubleshooting
- Replicating "runbook logic" in code
- Leveraging REST APIs in Python
- Using details from one request in the next
"""
__author__ = "Hank Preston"
__author_email__ = "hapresto@cisco.com"
__copyright__ = "Copyright (c) 2016 Cisco Systems, Inc."
__license__ = "MIT"
from device_info import apicem
from time import sleep
import json
import requests
import sys
import urllib3
# python2 compatibility
reload(sys)
sys.setdefaultencoding('utf8')
# Silence the insecure warning due to SSL Certificate
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
headers = {
'content-type': "application/json",
'x-auth-token': ""
}
def apic_login(apic, username, password):
"""
Use the REST API to Log into an APIC-EM and retrieve ticket
"""
url = "https://{}/api/v1/ticket".format(apic)
payload = {"username": username, "password": password}
# Make Login request and return the response body
response = requests.request("POST", url, data=json.dumps(payload),
headers=headers, verify=False)
return response.json()["response"]
def host_list(apic, ticket, ip=None, mac=None, name=None):
"""
Use the REST API to retrieve the list of hosts.
Optional parameters to filter by:
IP address
MAC address
Hostname
"""
url = "https://{}/api/v1/host".format(apic)
headers["x-auth-token"] = ticket
filters = []
# Add filters if provided
if ip:
filters.append("hostIp={}".format(ip))
if mac:
filters.append("hostMac={}".format(mac))
if name:
filters.append("hostName={}".format(name))
if len(filters) > 0:
url += "?" + "&".join(filters)
# Make API request and return the response body
response = requests.request("GET", url, headers=headers, verify=False)
return response.json()["response"]
def verify_single_host(host, ip):
"""
Simple function to verify only a single host returned from query.
If no hosts, or multiple hosts are returned, an error message is printed
and the program exits.
"""
if len(host) == 0:
print("Error: No host with IP address {} was found".format(ip))
sys.exit(1)
if len(host) > 1:
print("Error: Multiple hosts with IP address {} were found".format(ip))
print(json.dumps(host, indent=2))
sys.exit(1)
def print_host_details(host):
"""
Print to screen interesting details about a given host.
Input Paramters are:
host_desc: string to describe this host. Example "Source"
host: dictionary object of a host returned from APIC-EM
Standard Output Details:
Host Name (hostName) - If available
Host IP (hostIp)
Host MAC (hostMac)
Network Type (hostType) - wired/wireless
Host Sub Type (subType)
VLAN (vlanId)
Connected Network Device (connectedNetworkDeviceIpAddress)
Wired Host Details:
Connected Interface Name (connectedInterfaceName)
Wireless Host Details:
Connected AP Name (connectedAPName)
"""
# If optional host details missing, add as "Unavailable"
if "hostName" not in host.keys():
host["hostName"] = "Unavailable"
# Print Standard Details
print("Host Name: {}".format(host["hostName"]))
print("Network Type: {}".format(host["hostType"]))
print("Connected Network Device: {}".format(host["connectedNetworkDeviceIpAddress"])) # noqa: E501
# Print Wired/Wireless Details
if host["hostType"] == "wired":
print("Connected Interface Name: {}".format(host["connectedInterfaceName"])) # noqa: E501
if host["hostType"] == "wireless":
print("Connected AP Name: {}".format(host["connectedAPName"]))
# Print More Standard Details
print("VLAN: {}".format(host["vlanId"]))
print("Host IP: {}".format(host["hostIp"]))
print("Host MAC: {}".format(host["hostMac"]))
print("Host Sub Type: {}".format(host["subType"]))
# Blank line at the end
print("")
def network_device_list(apic, ticket, id=None):
"""
Use the REST API to retrieve the list of network devices.
If a device id is provided, return only that device
"""
url = "https://{}/api/v1/network-device".format(apic)
headers["x-auth-token"] = ticket
# Change URL to single device given an id
if id:
url += "/{}".format(id)
# Make API request and return the response body
response = requests.request("GET", url, headers=headers, verify=False)
# Always return a list object, even if single device for consistency
if id:
return [response.json()["response"]]
return response.json()["response"]
def interface_details(apic, ticket, id):
"""
Use the REST API to retrieve details about an interface based on id.
"""
url = "https://{}/api/v1/interface/{}".format(apic, id)
headers["x-auth-token"] = ticket
response = requests.request("GET", url, headers=headers, verify=False)
return response.json()["response"]
def print_network_device_details(network_device):
"""
Print to screen interesting details about a network device.
Input Paramters are:
network_device: dict object of a network device returned from APIC-EM
Standard Output Details:
Device Hostname (hostname)
Management IP (managementIpAddress)
Device Location (locationName)
Device Type (type)
Platform Id (platformId)
Device Role (role)
Serial Number (serialNumber)
Software Version (softwareVersion)
Up Time (upTime)
Reachability Status (reachabilityStatus)
Error Code (errorCode)
Error Description (errorDescription)
"""
# Print Standard Details
print("Device Hostname: {}".format(network_device["hostname"]))
print("Management IP: {}".format(network_device["managementIpAddress"]))
print("Device Location: {}".format(network_device["locationName"]))
print("Device Type: {}".format(network_device["type"]))
print("Platform Id: {}".format(network_device["platformId"]))
print("Device Role: {}".format(network_device["role"]))
print("Serial Number: {}".format(network_device["serialNumber"]))
print("Software Version: {}".format(network_device["softwareVersion"]))
print("Up Time: {}".format(network_device["upTime"]))
print("Reachability Status: {}".format(network_device["reachabilityStatus"])) # noqa: E501
print("Error Code: {}".format(network_device["errorCode"]))
print("Error Description: {}".format(network_device["errorDescription"]))
# Blank line at the end
print("")
def print_interface_details(interface):
"""
Print to screen interesting details about an interface.
Input Paramters are:
interface: dictionary object of an interface returned from APIC-EM
Standard Output Details:
Port Name - (portName)
Interface Type (interfaceType) - Physical/Virtual
Admin Status - (adminStatus)
Operational Status (status)
Media Type - (mediaType)
Speed - (speed)
Duplex Setting (duplex)
Port Mode (portMode) - access/trunk/routed
Interface VLAN - (vlanId)
Voice VLAN - (voiceVlan)
"""
# Print Standard Details
print("Port Name: {}".format(interface["portName"]))
print("Interface Type: {}".format(interface["interfaceType"]))
print("Admin Status: {}".format(interface["adminStatus"]))
print("Operational Status: {}".format(interface["status"]))
print("Media Type: {}".format(interface["mediaType"]))
print("Speed: {}".format(interface["speed"]))
print("Duplex Setting: {}".format(interface["duplex"]))
print("Port Mode: {}".format(interface["portMode"]))
print("Interface VLAN: {}".format(interface["vlanId"]))
print("Voice VLAN: {}".format(interface["voiceVlan"]))
# Blank line at the end
print("")
def run_flow_analysis(apic, ticket, source_ip, destination_ip):
"""
Use the REST API to initiate a Flow Analysis (Path Trace) from a given
source_ip to destination_ip. Function will wait for analysis to complete,
and return the results.
"""
base_url = "https://{}/api/v1/flow-analysis".format(apic)
headers["x-auth-token"] = ticket
# initiate flow analysis
body = {"destIP": destination_ip, "sourceIP": source_ip}
initiate_response = requests.post(base_url, headers=headers, verify=False,
json=body)
# Verify successfully initiated. If not error and exit
if initiate_response.status_code != 202:
print("Error: Flow Analysis Initiation Failed")
print(initiate_response.text)
sys.exit(1)
# Check status of analysis and wait until completed
flowAnalysisId = initiate_response.json()["response"]["flowAnalysisId"]
detail_url = base_url + "/{}".format(flowAnalysisId)
detail_response = requests.get(detail_url, headers=headers, verify=False)
while not detail_response.json()["response"]["request"]["status"] == "COMPLETED": # noqa: E501
print("Flow analysis not complete yet, waiting 5 seconds")
sleep(5)
detail_response = requests.get(detail_url, headers=headers,
verify=False)
# Return the flow analysis details
return detail_response.json()["response"]
def print_flow_analysis_details(flow_analysis):
"""
Print to screen interesting details about the flow analysis.
Input Parameters are:
flow_analysis: dictionary object of a flow analysis returned from APIC-EM
"""
hops = flow_analysis["networkElementsInfo"]
print("Number of Hops from Source to Destination: {}".format(len(hops)))
print()
# Print Details per hop
print("Flow Details: ")
# Hop 1 (index 0) and the last hop (index len - 1) represent the endpoints
for i, hop in enumerate(hops):
if i == 0 or i == len(hops) - 1:
continue
print("*" * 40)
print("Hop {}: Network Device {}".format(i, hop["name"]))
# If the hop is "UNKNOWN" continue along
if hop["name"] == "UNKNOWN":
print()
continue
print("Device IP: {}".format(hop["ip"]))
print("Device Role: {}".format(hop["role"]))
# If type is an Access Point, skip interface details
if hop["type"] == "Unified AP":
continue
print()
print("Ingress Interface: {}".format(hop["ingressInterface"]["physicalInterface"]["name"])) # noqa: E501
print("Egress Interface: {}".format(hop["egressInterface"]["physicalInterface"]["name"])) # noqa: E501
# Print blank line at end
print("")
# Entry point for program
if __name__ == '__main__':
# Setup Arg Parse for Command Line parameters
import argparse
parser = argparse.ArgumentParser()
# Command Line Parameters for Source and Destination IP
parser.add_argument("source_ip", help = "Source IP Address")
parser.add_argument("destination_ip", help = "Destination IP Address")
args = parser.parse_args()
# Get Source and Destination IPs from Command Line
source_ip = args.source_ip
destination_ip = args.destination_ip
# Print Starting message
print("Running Troubleshooting Script for ")
print(" Source IP: {} ".format(source_ip))
print(" Destination IP: {}".format(destination_ip))
print("")
# Log into the APIC-EM Controller to get Ticket
login = apic_login(apicem["host"], apicem["username"], apicem["password"])
# Step 1: Identify involved hosts
# Retrieve Host Details from APIC-EM
source_host = host_list(apicem["host"], login["serviceTicket"],
ip=source_ip)
destination_host = host_list(apicem["host"], login["serviceTicket"],
ip=destination_ip)
# Verify single host found for each IP
verify_single_host(source_host, source_ip)
verify_single_host(destination_host, destination_ip)
# Print Out Host details
print("Source Host Details:")
print("-" * 25)
print_host_details(source_host[0])
print("Destination Host Details:")
print("-" * 25)
print_host_details(destination_host[0])
# Step 2: Where are they in the network?
# Retrieve and Print Source Device Details from APIC-EM
source_host_net_device = network_device_list(apicem["host"],
login["serviceTicket"],
id=source_host[0]["connectedNetworkDeviceId"]) # noqa: E501
print("Source Host Network Connection Details:")
print("-" * 45)
print_network_device_details(source_host_net_device[0])
# If Host is wired, collect interface details
if source_host[0]["hostType"] == "wired":
source_host_interface = interface_details(apicem["host"],
login["serviceTicket"],
id=source_host[0]["connectedInterfaceId"]) # noqa: E501
print("Attached Interface:")
print("-" * 20)
print_interface_details(source_host_interface)
destination_host_net_device = network_device_list(apicem["host"],
login["serviceTicket"],
id=destination_host[0]["connectedNetworkDeviceId"]) # noqa: E501
print("Destination Host Network Connection Details:")
print("-" * 45)
print_network_device_details(destination_host_net_device[0])
# If Host is wired, collect interface details
if destination_host[0]["hostType"] == "wired":
destination_host_interface = interface_details(apicem["host"],
login["serviceTicket"],
id=destination_host[0]["connectedInterfaceId"]) # noqa: E501
print("Attached Interface:")
print("-" * 20)
print_interface_details(destination_host_interface)
# Step 3: What path does the traffic take?
# Run a Flow Analysis for Source/Destionation
print("Running Flow Analysis from {} to {}".format(source_ip, destination_ip)) # noqa: E501
print("-" * 55)
flow_analysis = run_flow_analysis(apicem["host"], login["serviceTicket"],
source_ip,
destination_ip)
# Print Out Details
print_flow_analysis_details(flow_analysis)
| [
"kecorbin@cisco.com"
] | kecorbin@cisco.com |
a00358f8ab1fa0c2fdc688d9b88603e9d41fb568 | 95a1f698b1d8b7c2578d5306481f506751b0452e | /dino_history/heritage/views.py | d89f40459cc1c9e5951c7eacf0e1b85138f2b91b | [] | no_license | hyesungoh/dino_history | 45130bf8aa984282c90fa2b241401d0c038968e6 | e5f50a8b83ff445c627302af2e6ca893ef8a4af2 | refs/heads/master | 2023-01-07T14:43:28.643160 | 2020-10-26T12:48:01 | 2020-10-26T12:48:01 | 284,934,718 | 0 | 0 | null | 2020-08-24T12:10:33 | 2020-08-04T09:26:13 | Python | UTF-8 | Python | false | false | 4,231 | py | from django.shortcuts import render
from .models import Heritage
from user.models import Student
# from django.contrib.staticfiles.templatetags.staticfiles import static
# from django.contrib.staticfiles.storage import staticfiles_storage
import os
# Create your views here.
def main(request):
if request.user.is_authenticated:
user_now = request.user
dino_url = dino_img(user_now.dino_level, user_now.dino_class)
rank_dict = return_my_ranking(user_now)
return render(request, 'heritage/main.html', {'user_now': user_now,
'dino_url': dino_url,
'total': rank_dict['총'],
'gh': rank_dict['근현대'],
'chs': rank_dict['조선시대']
})
else:
return render(request, 'heritage/main_nosigned.html')
def return_my_ranking(current_user):
total_list = Student.objects.all().order_by('-cor_num')
total = list(total_list).index(current_user) + 1
gh_list = Student.objects.all().order_by('-gh_num')
gh = list(gh_list).index(current_user) + 1
chs_list = Student.objects.all().order_by('-chs_num')
chs = list(chs_list).index(current_user) + 1
sg_list = Student.objects.all().order_by('-sg_num')
sg = list(sg_list).index(current_user) + 1
ss_list = Student.objects.all().order_by('-ss_num')
ss = list(ss_list).index(current_user) + 1
rank_dict = {}
rank_dict['총'] = total
rank_dict['근현대'] = gh
rank_dict['조선시대'] = chs
rank_dict['삼국시대'] = sg
rank_dict['선사시대'] = ss
return rank_dict
def map(request):
return render(request, 'heritage/map.html')
def result(request):
name = request.GET["name"]
# 검색창에 무언가를 썼을 때
if name:
# 문화재 모델의 이름 기준으로 무언가가 포함된 오브젝트들을 가지고 옴
heritages = Heritage.objects.filter(name__contains=name)[0:7]
if len(heritages) < 1:
msg = str(name) + "이/가 들어간 문화재가 없어용 ㅜ"
return error(request, msg)
else:
# 무언가를 안썼을 때 상위 10개만 가지고 옴
heritages = Heritage.objects.all()[0:7]
return render(request, 'heritage/result.html', {'name': name, 'heritages': heritages})
def error(request, error_msg):
return render(request, 'user/error.html', {'error_msg': error_msg})
def map_result(request):
location = request.GET["location"]
if location:
heritages = Heritage.objects.filter(location__contains=location)[0:7]
else:
heritages = Heritage.objects.all()[0:7]
return render(request, 'heritage/map_result.html', {'location': location, 'heritages': heritages})
def save_heritage(request):
# txt_file_url = static('heritage/txt/heritage.txt')
# txt_file_url = staticfiles_storage.url('heritage/txt/heritage.txt')
# 문화재명1, 위치_도 + 위치_시, 이미지, 내용, 시대, 경도, 위도
if len(Heritage.objects.all()) > 10:
pass
else:
# module_dir = os.path.dirname(__file__)
# file_path = os.path.join(module_dir, '/Users/ohyeseong/Documents/django/dino_history/dino_history/heritage/heritage.txt')
module_dir = os.path.dirname(__file__)
file_path = os.path.join(module_dir, 'heritage.txt')
heritage_txt = open(file_path, 'r')
while True:
line = heritage_txt.readline()
if not line: break
this_heritage = eval(line)
temp_heritage = Heritage()
temp_heritage.name = this_heritage['문화재명1']
temp_heritage.location = this_heritage['위치_도'] + this_heritage['위치_시']
temp_heritage.dynasty = this_heritage['시대']
temp_heritage.img_url = this_heritage['이미지']
temp_heritage.content = this_heritage['내용']
temp_heritage.longitude = this_heritage['경도']
temp_heritage.latitude = this_heritage['위도']
temp_heritage.save()
return render(request, 'heritage/save_test.html')
def dino_img(level, cls):
if level == 0 or level == 1:
return level
else:
u = str(level) + '_' + str(cls)
return u
| [
"haesungoh414@gmail.com"
] | haesungoh414@gmail.com |
f8bbfce20a767ccc811dab546f25a1739002b4c9 | 8d4c6f902880f5f6b792c12a78d09afcb3cdd3d7 | /KafkaProducer.py | 2aea02ccb7d84c882c0fcad279c58b6fb77f3237 | [] | no_license | DataMarvel/DataFury | e1ae14a66253a2b67b60de8babf4e6d3fc08ea2d | 553023a8bb0a5f1f01f32d463563a39425cff21d | refs/heads/master | 2020-04-06T17:30:22.752209 | 2018-11-26T10:18:57 | 2018-11-26T10:18:57 | 157,661,805 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,550 | py | # -*- coding: UTF-8 -*-
"""
Created on 2018年11月20日
@author: Leo
@file: KafkaProducer
"""
# Python内部库
import json
from collections import OrderedDict
# Python第三方库
import pykafka
# 项目内部库
from DataVision.LoggerHandler.logger import VisionLogger
from DataFury.MessagePipe.KafkaPipe import KafkaMessageClient
# 日志路径
LOGGER_PATH = '../DataVision/LoggerConfig/logger_config.yaml'
# 日志
logger = VisionLogger(LOGGER_PATH)
class KafkaProducer(object):
def __init__(self,
topic_name: str,
json_config: bool = False,
**kafka_config):
"""
Kafka生产者
:param topic_name: topic名
:param json_config: 从json配置读取
:param kafka_config: kafka配置(具体看KafkaMessageClient里)
"""
if topic_name == "":
logger.vision_logger(level="ERROR", log_msg="Kafka Topic不能为空!")
return
else:
self._topic_name = topic_name.encode("UTF-8")
# 获取Kafka Client
kafka_message_client = KafkaMessageClient(json_config=json_config, **kafka_config)
# 如果使用的json的话就从json配置文件中获取topic名
if json_config:
self._topic_name = kafka_message_client.get_topic().encode("UTF-8")
# 获取client对象
self._client = kafka_message_client.get_client()
# 获取topic对象
self._topic = self._create_topic()
def _create_topic(self) -> pykafka.Topic:
"""
创建或获取topic对象
:return: 返回一个topic的实例
"""
try:
return self._client.topics[self._topic_name]
except Exception as err:
logger.vision_logger(level="ERROR", log_msg=str(err))
def get_producer(self, producer_type: str = 'sync', **producer_config) -> pykafka.Producer:
"""
创建producer对象
:param producer_type: 生产者类型(common和sync)
:return: producer对象
"""
if self._topic is None:
logger.vision_logger(level="ERROR", log_msg="创建Producer失败")
else:
if producer_type in ['common', 'sync']:
if producer_type == "common":
return self._topic.get_producer(**producer_config)
elif producer_type == "sync":
return self._topic.get_sync_producer(**producer_config)
else:
logger.vision_logger(level="ERROR", log_msg="创建Producer失败, Producer类型错误")
def produce(self, producer: pykafka.Producer, data):
"""
生产数据
:param producer: 生产者类型
:param data: 数据
"""
if isinstance(data, (dict, OrderedDict)):
try:
data = json.dumps(data).encode("UTF-8")
except Exception as err:
logger.vision_logger(level="ERROR", log_msg=str(err))
elif isinstance(data, str):
data = data.encode("UTF-8")
elif isinstance(data, (int, float)):
data = bytes(data)
else:
logger.vision_logger(level="ERROR", log_msg="暂时不支持此类型的数据进行发送!")
# 发送数据
with producer as pd:
pd.produce(data)
if __name__ == '__main__':
p = KafkaProducer(topic_name="192.168.30.243", host_port="120.77.209.23:19001")
p.produce(producer=p.get_producer(), data="123")
| [
"379978424@qq.com"
] | 379978424@qq.com |
865791be0b87e07cdc5e56f2b20f7a15a28bdab7 | 747f759311d404af31c0f80029e88098193f6269 | /addons/newsletter_base_contact/partner.py | 8b586562e8642bd853fcf8d68e44717197864037 | [] | no_license | sgeerish/sirr_production | 9b0d0f7804a928c0c582ddb4ccb7fcc084469a18 | 1081f3a5ff8864a31b2dcd89406fac076a908e78 | refs/heads/master | 2020-05-19T07:21:37.047958 | 2013-09-15T13:03:36 | 2013-09-15T13:03:36 | 9,648,444 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 72 | py | /home/openerp/production/extra-addons/newsletter_base_contact/partner.py | [
"geerish@omerp.net"
] | geerish@omerp.net |
a06c079bd1cdb8fbb900392ff3e8152c2958d9e3 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03549/s890781358.py | 607888d01b59816fdc141f2257c87d505e430154 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 53 | py | n,m=map(int,input().split());print((n+m*18)*100*2**m) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
a4225989c567f4133e52a90ae410c30dfee20c2e | 1bd2e09ce498c6bbb63e1854471941b777096bdb | /paleomix/tools/zonkey/parts/admixture.py | 8df7bd212c985e38798e8df5f83a3e60dfc8a063 | [] | no_license | muslih14/paleomix | 3f1e6f43178a132b05525b5eb296d8cf1c5d72c4 | 254fb1c4151e03217c19715a6e989dc79b1edcc5 | refs/heads/master | 2021-01-21T08:33:04.995061 | 2016-04-05T14:32:21 | 2016-04-05T14:32:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,712 | py | #!/usr/bin/python
#
# Copyright (c) 2016 Mikkel Schubert <MSchubert@snm.ku.dk>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
"""
Parsing and validation of admixture results.
"""
import collections
CUTOFF = 0.001
class AdmixtureError(RuntimeError):
pass
def read_admixture_results(filename, data, k_groups, cutoff=CUTOFF):
key = "Group(%i)" % (k_groups,)
names = tuple(data.sample_order) + ("-",)
table = _admixture_read_results(filename, names)
_admixture_validate_ancestral_groups(data, table, k_groups, cutoff)
ancestral_groups = [[set(), value] for value in table["-"]]
for sample, row in table.iteritems():
if sample == '-':
continue
group = data.samples[sample][key]
for index, value in enumerate(row):
if value >= cutoff:
ancestral_groups[index][0].add(group)
return ancestral_groups
def get_percentiles(data, sample1, sample2, nreads, k_groups, has_ts, value):
results = {'Sample1': sample1,
'Sample2': sample2}
nreads_lower = set(row['NReads'] for row in data.simulations
if row['NReads'] <= nreads)
nreads_upper = set(row['NReads'] for row in data.simulations
if row['NReads'] >= nreads)
if nreads_lower:
selection = _select_simulations(data=data,
sample1=sample1,
sample2=sample2,
nreads=max(nreads_lower),
k_groups=k_groups,
has_ts=has_ts)
lower_bound, upper_bound = _get_percentile_range(selection, value)
results['Lower'] = {'NReads': max(nreads_lower),
'Lower': lower_bound,
'Upper': upper_bound}
if nreads_upper:
selection = _select_simulations(data=data,
sample1=sample1,
sample2=sample2,
nreads=min(nreads_upper),
k_groups=k_groups,
has_ts=has_ts)
lower_bound, upper_bound = _get_percentile_range(selection, value)
results['Upper'] = {'NReads': min(nreads_upper),
'Lower': lower_bound,
'Upper': upper_bound}
return results
def _select_simulations(data, sample1, sample2, nreads, k_groups, has_ts):
selection = []
samples = frozenset((sample1, sample2))
for row in data.simulations:
if row['K'] != k_groups or row['HasTS'] != has_ts:
continue
elif row['NReads'] != nreads:
continue
elif frozenset((row['Sample1'], row['Sample2'])) != samples:
continue
selection.append(row)
return selection
def _get_percentile_range(selection, value):
selection = [(row['Percentile'], row['Value'])
for row in selection]
selection.sort()
lower_bound = 0.0
upper_bound = 1.0
for cur_pct, cur_value in selection:
if cur_value > value:
break
lower_bound = cur_pct
for cur_pct, cur_value in reversed(selection):
if cur_value < value:
break
upper_bound = cur_pct
return lower_bound, upper_bound
def _admixture_read_results(filename, samples):
with open(filename) as handle:
lines = handle.readlines()
if len(samples) != len(lines):
raise AdmixtureError("TODO")
result = {}
for name, line in zip(samples, lines):
result[name] = [float(value) for value in line.split()]
return result
def _admixture_validate_ancestral_groups(data, table, k_groups, cutoff):
key = "Group(%i)" % (k_groups,)
groups = collections.defaultdict(dict)
for sample, row in table.iteritems():
if sample not in data.samples:
continue
group = data.samples[sample][key]
for index, value in enumerate(row):
if value >= cutoff:
groups[group][index] = True
mixed_groups = []
for group, memberships in sorted(groups.iteritems()):
count = len(memberships)
if count > 1:
mixed_groups.append("member(s) of reference group %s assigned to "
"%i ancestral populations" % (group, count))
if mixed_groups:
raise AdmixtureError("Inconsistent ADMIXTURE results: %s; "
"cannot determine ancestry!"
% ("; ".join(mixed_groups)))
| [
"MikkelSch@gmail.com"
] | MikkelSch@gmail.com |
4a095744e7e81eb74d3e2a32b8d9364eb64bc23f | 41608dcb12ce6b16ad70d55bf0f155c1877de3a5 | /src/Utils/Valuator/BSM.py | ae17d6a56f9da1e637fd97e0c594cc5dadf048f8 | [] | no_license | frankma/Finance | eb68567e827e9045e1f4f3baaead6757aefb5168 | c6fe293895e1c295b7625f051625ba0b64efada1 | refs/heads/master | 2021-01-17T03:21:14.146946 | 2017-11-25T16:49:39 | 2017-11-25T16:49:39 | 33,056,941 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,669 | py | import logging
from math import log, exp, sqrt
import numpy as np
from scipy.stats import norm
from src.Utils.Solver.Brent import Brent
from src.Utils.Solver.IVariateFunction import IUnivariateFunction
from src.Utils.Solver.NewtonRaphson import NewtonRaphson
from src.Utils.Types import OptionType
__author__ = 'frank.ma'
logger = logging.getLogger(__name__)
class BSM(object):
@staticmethod
def calc_d1(s: float, k: float, tau: float, r: float, q: float, sig: float):
return (log(s / k) + (r - q + 0.5 * sig ** 2) * tau) / (sig * sqrt(tau))
@staticmethod
def calc_d2(s: float, k: float, tau: float, r: float, q: float, sig: float):
return (log(s / k) + (r - q - 0.5 * sig ** 2) * tau) / (sig * sqrt(tau))
@staticmethod
def price(s: float, k: float, tau: float, r: float, q: float, sig: float, opt_type: OptionType):
eta = opt_type.value
d1 = BSM.calc_d1(s, k, tau, r, q, sig)
d2 = BSM.calc_d2(s, k, tau, r, q, sig)
return eta * (exp(-q * tau) * s * norm.cdf(eta * d1) - exp(-r * tau) * k * norm.cdf(eta * d2))
@staticmethod
def imp_vol(s: float, k: float, tau: float, r: float, q: float, price: float, opt_type: OptionType, method='Brent'):
class PriceFunction(IUnivariateFunction):
def evaluate(self, x):
return BSM.price(s, k, tau, r, q, x, opt_type) - price
class VegaFunction(IUnivariateFunction):
def evaluate(self, x):
return BSM.vega(s, k, tau, r, q, x)
pf = PriceFunction()
vf = VegaFunction()
if method == 'Brent':
bt = Brent(pf, 1e-4, 10.0)
vol = bt.solve()
elif method == 'Newton-Raphson':
nr = NewtonRaphson(pf, vf, 0.88)
vol = nr.solve()
else:
raise ValueError('Unrecognized optimization method %s.' % method)
return vol
@staticmethod
def delta(s: float, k: float, tau: float, r: float, q: float, sig: float, opt_type: OptionType):
eta = opt_type.value
d1 = BSM.calc_d1(s, k, tau, r, q, sig)
return eta * exp(-q * tau) * norm.cdf(eta * d1)
@staticmethod
def delta_k(s: float, k: float, tau: float, r: float, q: float, sig: float, opt_type: OptionType):
eta = opt_type.value
d2 = BSM.calc_d2(s, k, tau, r, q, sig)
return -eta * exp(-r * tau) * norm.cdf(eta * d2)
@staticmethod
def gamma(s: float, k: float, tau: float, r: float, q: float, sig: float):
d1 = BSM.calc_d1(s, k, tau, r, q, sig)
return exp(-q * tau) * norm.pdf(d1) / s / sig / sqrt(tau)
@staticmethod
def gamma_k(s: float, k: float, tau: float, r: float, q: float, sig: float):
d2 = BSM.calc_d2(s, k, tau, r, q, sig)
return exp(-r * tau) * norm.pdf(d2) / k / sig / sqrt(tau)
@staticmethod
def vega(s: float, k: float, tau: float, r: float, q: float, sig: float):
d1 = BSM.calc_d1(s, k, tau, r, q, sig)
return s * exp(-q * tau) * norm.pdf(d1) * sqrt(tau)
@staticmethod
def theta(s: float, k: float, tau: float, r: float, q: float, sig: float, opt_type: OptionType):
eta = opt_type.value
d1 = BSM.calc_d1(s, k, tau, r, q, sig)
d2 = BSM.calc_d2(s, k, tau, r, q, sig)
term1 = -exp(-q * tau) * s * norm.pdf(d1) * sig / 2.0 / sqrt(tau)
term2 = eta * q * s * exp(-q * tau) * norm.cdf(eta * d1)
term3 = - eta * r * k * exp(-r * tau) * norm.cdf(eta * d2)
return term1 + term2 + term3
@staticmethod
def rho(s: float, k: float, tau: float, r: float, q: float, sig: float, opt_type: OptionType):
eta = opt_type.value
d2 = BSM.calc_d2(s, k, tau, r, q, sig)
return eta * tau * exp(-r * tau) * norm.cdf(eta * d2)
class BSMVec(BSM):
@staticmethod
def calc_d1(s: np.array, k: np.array, tau: float, r: float, q: float, sig: np.array):
return (np.log(s / k) + (r - q + 0.5 * sig ** 2) * tau) / (sig * sqrt(tau))
@staticmethod
def calc_d2(s: np.array, k: np.array, tau: float, r: float, q: float, sig: np.array):
return (np.log(s / k) + (r - q - 0.5 * sig ** 2) * tau) / (sig * sqrt(tau))
@staticmethod
def price(s: np.array, k: np.array, tau: float, r: float, q: float, sig: np.array, opt_type: OptionType):
if tau < 1e-6:
if tau < 0.0:
logger.warning('negative tau %r is provided in pricing function, return payoff.' % tau)
return BSMVec.payoff(s, k, opt_type)
else:
eta = opt_type.value
d_1 = BSMVec.calc_d1(s, k, tau, r, q, sig)
d_2 = BSMVec.calc_d2(s, k, tau, r, q, sig)
return eta * (exp(-q * tau) * s * norm.cdf(eta * d_1) - exp(-r * tau) * k * norm.cdf(eta * d_2))
@staticmethod
def payoff(s: np.array, k: np.array, opt_type: OptionType):
eta = opt_type.value
return np.maximum(eta * (s - k), np.zeros(s.__len__()))
@staticmethod
def delta(s: np.array, k: np.array, tau: float, r: float, q: float, sig: np.array, opt_type: OptionType):
if tau < 1e-6:
if tau < 0.0:
logger.warning('negative tau %r is provided in delta function, return zeros.' % tau)
return np.zeros(s.__len__())
else:
eta = opt_type.value
d_1 = BSMVec.calc_d1(s, k, tau, r, q, sig)
return eta * exp(-q * tau) * norm.cdf(eta * d_1)
@staticmethod
def gamma(s: np.array, k: np.array, tau: float, r: float, q: float, sig: np.array):
d_1 = BSMVec.calc_d1(s, k, tau, r, q, sig)
return exp(-q * tau) * norm.pdf(d_1) / (s * sig * sqrt(tau))
@staticmethod
def vega(s: np.array, k: np.array, tau: float, r: float, q: float, sig: np.array):
d_1 = BSMVec.calc_d1(s, k, tau, r, q, sig)
return s * exp(-q * tau) * norm.pdf(d_1) * sqrt(tau)
@staticmethod
def theta(s: np.array, k: np.array, tau: float, r: float, q: float, sig: np.array, opt_type: OptionType):
eta = opt_type.value
d_1 = BSMVec.calc_d1(s, k, tau, r, q, sig)
d_2 = BSMVec.calc_d2(s, k, tau, r, q, sig)
term1 = -exp(-q * tau) * s * norm.pdf(d_1) / (2.0 * sqrt(tau))
term2 = eta * q * s * exp(-q * tau) * norm.cdf(eta * d_1)
term3 = eta * r * k * exp(-r * tau) * norm.cdf(eta * d_2)
return term1 * term2 * term3
@staticmethod
def rho(s: np.array, k: np.array, tau: float, r: float, q: float, sig: np.array, opt_type: OptionType):
eta = opt_type.value
d_2 = BSMVec.calc_d2(s, k, tau, r, q, sig)
return eta * k * tau * exp(-r * tau) * norm.cdf(eta * d_2)
| [
"guang.y.ma@gmail.com"
] | guang.y.ma@gmail.com |
fd13356be0081170ae569dea23537c4688c513f0 | c77c10d8c6ef24f0dfa64a9824d584d723711f6c | /python/rest-client-samples/image/image_tagging_batch.py | eef3a11ce81454b6c0fc4dfdd5dcde8d206c2c14 | [
"Apache-2.0"
] | permissive | zhd/ais-sdk | daf2e3b3e61740e105cbdbd709a24a7ffd48ead8 | 9976a9595dd72d189b5f63e511e055251ab4e61f | refs/heads/master | 2020-04-02T01:59:44.053044 | 2018-10-20T08:47:24 | 2018-10-20T08:47:24 | 153,885,712 | 1 | 0 | Apache-2.0 | 2018-10-20T08:34:38 | 2018-10-20T08:34:37 | null | UTF-8 | Python | false | false | 2,112 | py | # -*- coding:utf-8 -*-
import sys
import urllib2
import json
import ssl
import base64
from urllib2 import HTTPError, URLError
from gettoken import get_token
reload(sys)
sys.setdefaultencoding('utf8')
def download_url_base64(url):
try:
r = urllib2.urlopen(url)
except HTTPError, e:
resp = e.read()
status_code = e.code
except URLError, e:
resp = e.read()
status_code = e.code
else:
status_code = r.code
resp = r.read()
if status_code != 200:
print "Error get url ", url, status_code
return ""
return base64.b64encode(resp)
def image_tagging(token, image_base64):
_url = 'https://ais.cn-north-1.myhuaweicloud.com/v1.0/image/tagging'
_data = {
"image": image_base64,
"language": "zh",
"limit": 10,
"threshold": 10.0
}
kreq = urllib2.Request( url = _url)
kreq.add_header('Content-Type', 'application/json')
kreq.add_header('X-Auth-Token', token )
kreq.add_data(json.dumps(_data))
resp = None
status_code = None
try:
r = urllib2.urlopen(kreq)
except HTTPError, e:
resp = e.read()
status_code = e.code
except URLError, e:
resp = e.read()
status_code = e.code
else:
status_code = r.code
resp = r.read()
return resp
def url_image_tagging(token, url):
image = download_url_base64(url)
if len(image) == 0:
print "%s\t%s" %(url, "ERRORdownload")
return
resp = image_tagging(token, image)
print "%s\t%s" %(url, resp)
if __name__ == "__main__":
user_name = "XXX"
password = "XXX"
account_name = "XXX"
url_file = sys.argv[1]
# token expire in 24hour
token = get_token(user_name, password, account_name)
if len(token) == 0:
print "Error username password"
sys.exit(-1)
# test urls in file
for line in open(url_file):
url = line.strip()
url_image_tagging(token, url)
## test a sigle url
#url_image_tagging(token, 'http://www.example.com/example.jpg')
| [
"17091412@qq.com"
] | 17091412@qq.com |
66a0abc924d8b12ab7931ec064ca79c6f391ae16 | 795df757ef84073c3adaf552d5f4b79fcb111bad | /r8lib/r8_mod.py | 658d59050a45420aa01faa0175bd7e9e59f4f220 | [] | no_license | tnakaicode/jburkardt-python | 02cb2f9ba817abf158fc93203eb17bf1cb3a5008 | 1a63f7664e47d6b81c07f2261b44f472adc4274d | refs/heads/master | 2022-05-21T04:41:37.611658 | 2022-04-09T03:31:00 | 2022-04-09T03:31:00 | 243,854,197 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,486 | py | #! /usr/bin/env python3
#
def r8_mod ( x, y ):
#*****************************************************************************80
#
## R8_MOD returns the remainder of R8 division.
#
# Formula:
#
# If
# REM = R8_MOD ( X, Y )
# RMULT = ( X - REM ) / Y
# then
# X = Y * RMULT + REM
# where REM has the same sign as X, and abs ( REM ) < Y.
#
# Example:
#
# X Y R8_MOD R8_MOD Factorization
#
# 107 50 7 107 = 2 * 50 + 7
# 107 -50 7 107 = -2 * -50 + 7
# -107 50 -7 -107 = -2 * 50 - 7
# -107 -50 -7 -107 = 2 * -50 - 7
#
# Licensing:
#
# This code is distributed under the GNU LGPL license.
#
# Modified:
#
# 25 July 2014
#
# Author:
#
# John Burkardt
#
# Parameters:
#
# Input, real X, the number to be divided.
#
# Input, real Y, the number that divides X.
#
# Output, real VALUE, the remainder when X is divided by Y.
#
from sys import exit
if ( y == 0.0 ):
print ( '' )
print ( 'R8_MOD - Fatal error!' )
print ( ' R8_MOD ( X, Y ) called with Y = 0.' )
exit ( 'R8_MOD - Fatal error!' )
value = x - int ( x / y ) * y
if ( x < 0.0 and 0.0 < value ):
value = value - abs ( y )
elif ( 0.0 < x and value < 0.0 ):
value = value + abs ( y )
return value
def r8_mod_test ( ):
#*****************************************************************************80
#
## R8_MOD_TEST tests R8_MOD.
#
# Licensing:
#
# This code is distributed under the GNU LGPL license.
#
# Modified:
#
# 25 July 2014
#
# Author:
#
# John Burkardt
#
import platform
from r8_uniform_ab import r8_uniform_ab
test_num = 10
print ( '' )
print ( 'R8_MOD_TEST' )
print ( ' Python version: %s' % ( platform.python_version ( ) ) )
print ( ' R8_MOD returns the remainder after division.' )
print ( '' )
print ( ' X Y (X%Y) R8_MOD(X,Y)' )
print ( '' )
x_lo = -10.0
x_hi = +10.0
seed = 123456789
for test in range ( 0, test_num ):
x, seed = r8_uniform_ab ( x_lo, x_hi, seed )
y, seed = r8_uniform_ab ( x_lo, x_hi, seed )
z1 = x % y
z2 = r8_mod ( x, y )
print ( ' %12f %12f %12f %12f' % ( x, y, z1, z2 ) )
#
# Terminate.
#
print ( '' )
print ( 'R8_MOD_TEST' )
print ( ' Normal end of execution.' )
return
if ( __name__ == '__main__' ):
from timestamp import timestamp
timestamp ( )
r8_mod_test ( )
timestamp ( )
| [
"tnakaicode@gmail.com"
] | tnakaicode@gmail.com |
1019622c1a9cba7665d3b14f47a713a3ead043b3 | e10a6d844a286db26ef56469e31dc8488a8c6f0e | /supcon/utils_test.py | e218abb01e8712e15a6b7250dd27baff5ee6dc80 | [
"Apache-2.0",
"CC-BY-4.0"
] | permissive | Jimmy-INL/google-research | 54ad5551f97977f01297abddbfc8a99a7900b791 | 5573d9c5822f4e866b6692769963ae819cb3f10d | refs/heads/master | 2023-04-07T19:43:54.483068 | 2023-03-24T16:27:28 | 2023-03-24T16:32:17 | 282,682,170 | 1 | 0 | Apache-2.0 | 2020-07-26T15:50:32 | 2020-07-26T15:50:31 | null | UTF-8 | Python | false | false | 27,833 | py | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for supcon.utils."""
from absl.testing import parameterized
import numpy as np
import tensorflow.compat.v1 as tf
from supcon import blocks
from supcon import enums
from supcon import utils
class CreateTrainOpTest(tf.test.TestCase):
def setUp(self):
super(CreateTrainOpTest, self).setUp()
np.random.seed(0)
# Create an easy training set:
self._inputs = np.random.rand(16, 4).astype(np.float32)
self._labels = np.random.randint(0, 2, size=(16, 1)).astype(np.float32)
def batchnorm_classifier(self, inputs):
inputs = blocks.batch_norm()(inputs, True)
return tf.keras.layers.Dense(1, activation=tf.nn.sigmoid)(inputs)
def testTrainOpInCollection(self):
with tf.Graph().as_default():
tf_inputs = tf.constant(self._inputs, dtype=tf.dtypes.float32)
tf_labels = tf.constant(self._labels, dtype=tf.dtypes.float32)
tf_predictions = self.batchnorm_classifier(tf_inputs)
loss = tf.losses.log_loss(tf_labels, tf_predictions)
optimizer = tf.train.GradientDescentOptimizer(learning_rate=1.0)
train_op = utils.create_train_op(loss, optimizer)
# Make sure the training op was recorded in the proper collection
self.assertIn(train_op, tf.get_collection(tf.GraphKeys.TRAIN_OP))
def testUseUpdateOps(self):
with tf.Graph().as_default():
tf_inputs = tf.constant(self._inputs, dtype=tf.dtypes.float32)
tf_labels = tf.constant(self._labels, dtype=tf.dtypes.float32)
expected_mean = np.mean(self._inputs, axis=(0))
expected_var = np.var(self._inputs, axis=(0))
tf_predictions = self.batchnorm_classifier(tf_inputs)
loss = tf.losses.log_loss(tf_labels, tf_predictions)
optimizer = tf.train.GradientDescentOptimizer(learning_rate=1.0)
train_op = utils.create_train_op(loss, optimizer)
moving_mean = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES,
'.*moving_mean:')[0]
moving_variance = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES,
'.*moving_variance:')[0]
with self.cached_session() as session:
# Initialize all variables
session.run(tf.global_variables_initializer())
mean, variance = session.run([moving_mean, moving_variance])
# After initialization moving_mean == 0 and moving_variance == 1.
self.assertAllClose(mean, [0] * 4)
self.assertAllClose(variance, [1] * 4)
for _ in range(200):
session.run(train_op)
mean = moving_mean.eval()
variance = moving_variance.eval()
# After 10 updates with decay 0.1 moving_mean == expected_mean and
# moving_variance == expected_var.
self.assertAllClose(mean, expected_mean)
self.assertAllClose(variance, expected_var)
def testEmptyUpdateOps(self):
with tf.Graph().as_default():
tf_inputs = tf.constant(self._inputs, dtype=tf.dtypes.float32)
tf_labels = tf.constant(self._labels, dtype=tf.dtypes.float32)
tf_predictions = self.batchnorm_classifier(tf_inputs)
loss = tf.losses.log_loss(tf_labels, tf_predictions)
optimizer = tf.train.GradientDescentOptimizer(learning_rate=1.0)
train_op = utils.create_train_op(loss, optimizer, update_ops=[])
moving_mean = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES,
'.*moving_mean:')[0]
moving_variance = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES,
'.*moving_variance:')[0]
with self.cached_session() as session:
# Initialize all variables
session.run(tf.global_variables_initializer())
mean, variance = session.run([moving_mean, moving_variance])
# After initialization moving_mean == 0 and moving_variance == 1.
self.assertAllClose(mean, [0] * 4)
self.assertAllClose(variance, [1] * 4)
for _ in range(10):
session.run(train_op)
mean = moving_mean.eval()
variance = moving_variance.eval()
# Since we skip update_ops the moving_vars are not updated.
self.assertAllClose(mean, [0] * 4)
self.assertAllClose(variance, [1] * 4)
def testGlobalStepIsIncrementedByDefault(self):
with tf.Graph().as_default():
tf_inputs = tf.constant(self._inputs, dtype=tf.dtypes.float32)
tf_labels = tf.constant(self._labels, dtype=tf.dtypes.float32)
tf_predictions = self.batchnorm_classifier(tf_inputs)
loss = tf.losses.log_loss(tf_labels, tf_predictions)
optimizer = tf.train.GradientDescentOptimizer(learning_rate=1.0)
train_op = utils.create_train_op(loss, optimizer)
global_step = tf.train.get_or_create_global_step()
with self.cached_session() as session:
# Initialize all variables
session.run(tf.global_variables_initializer())
for _ in range(10):
session.run(train_op)
# After 10 updates global_step should be 10.
self.assertAllClose(global_step.eval(), 10)
def testGlobalStepNotIncrementedWhenSetToNone(self):
with tf.Graph().as_default():
tf_inputs = tf.constant(self._inputs, dtype=tf.dtypes.float32)
tf_labels = tf.constant(self._labels, dtype=tf.dtypes.float32)
tf_predictions = self.batchnorm_classifier(tf_inputs)
loss = tf.losses.log_loss(tf_labels, tf_predictions)
optimizer = tf.train.GradientDescentOptimizer(learning_rate=1.0)
train_op = utils.create_train_op(loss, optimizer, global_step=None)
global_step = tf.train.get_or_create_global_step()
with self.cached_session() as session:
# Initialize all variables
session.run(tf.global_variables_initializer())
for _ in range(10):
session.run(train_op)
# Since train_op don't use global_step it shouldn't change.
self.assertAllClose(global_step.eval(), 0)
def construct_tests_with_dtypes():
dtypes_names = {
tf.half: 'half',
tf.float32: 'float32',
tf.float64: 'float64',
}
return map(lambda x: (x[1], x[0]), dtypes_names.items())
class LARSOptimizerTest(tf.test.TestCase, parameterized.TestCase):
@parameterized.named_parameters(*construct_tests_with_dtypes())
def testBasic(self, dtype):
with self.cached_session():
var0 = tf.Variable([1.0, 2.0], dtype=dtype)
var1 = tf.Variable([3.0, 4.0], dtype=dtype)
grads0 = tf.constant([0.1, 0.1], dtype=dtype)
grads1 = tf.constant([0.01, 0.01], dtype=dtype)
optimizer = utils.LARSOptimizer(3.0)
lars_op = optimizer.apply_gradients(
zip([grads0, grads1], [var0, var1]))
tf.global_variables_initializer().run()
# Fetch params to validate initial values
self.assertAllCloseAccordingToType([1.0, 2.0], self.evaluate(var0))
self.assertAllCloseAccordingToType([3.0, 4.0], self.evaluate(var1))
# Run 1 step of sgd
lars_op.run()
# Validate updated params
self.assertAllCloseAccordingToType(
[1.0 - 3.0 * (0.001 * (np.sqrt(5.) / np.sqrt(2.))),
2.0 - 3.0 * (0.001 * (np.sqrt(5.) / np.sqrt(2.)))],
self.evaluate(var0))
self.assertAllCloseAccordingToType(
[3.0 - 3.0 * (0.001 * (5. / np.sqrt(2.))),
4.0 - 3.0 * (0.001 * (5. / np.sqrt(2.)))],
self.evaluate(var1))
self.assertEmpty(list(optimizer.variables()))
@parameterized.named_parameters(*construct_tests_with_dtypes())
def testBasicCallableParams(self, dtype):
with self.cached_session():
var0 = tf.Variable([1.0, 2.0], dtype=dtype)
var1 = tf.Variable([3.0, 4.0], dtype=dtype)
grads0 = tf.constant([0.1, 0.1], dtype=dtype)
grads1 = tf.constant([0.01, 0.01], dtype=dtype)
lr = lambda: 3.0
lars_op = utils.LARSOptimizer(lr).apply_gradients(
zip([grads0, grads1], [var0, var1]))
tf.global_variables_initializer().run()
# Fetch params to validate initial values
self.assertAllCloseAccordingToType([1.0, 2.0], self.evaluate(var0))
self.assertAllCloseAccordingToType([3.0, 4.0], self.evaluate(var1))
# Run 1 step of sgd
lars_op.run()
# Validate updated params
self.assertAllCloseAccordingToType(
[1.0 - 3.0 * (0.001 * (np.sqrt(5.) / np.sqrt(2.))),
2.0 - 3.0 * (0.001 * (np.sqrt(5.) / np.sqrt(2.)))],
self.evaluate(var0))
self.assertAllCloseAccordingToType(
[3.0 - 3.0 * (0.001 * (5. / np.sqrt(2.))),
4.0 - 3.0 * (0.001 * (5. / np.sqrt(2.)))],
self.evaluate(var1))
@parameterized.named_parameters(*construct_tests_with_dtypes())
def testTensorLearningRate(self, dtype):
with self.cached_session():
var0 = tf.Variable([1.0, 2.0], dtype=dtype)
var1 = tf.Variable([3.0, 4.0], dtype=dtype)
grads0 = tf.constant([0.1, 0.1], dtype=dtype)
grads1 = tf.constant([0.01, 0.01], dtype=dtype)
lrate = tf.constant(3.0)
lars_op = utils.LARSOptimizer(
lrate).apply_gradients(zip([grads0, grads1], [var0, var1]))
tf.global_variables_initializer().run()
# Fetch params to validate initial values
self.assertAllCloseAccordingToType([1.0, 2.0], self.evaluate(var0))
self.assertAllCloseAccordingToType([3.0, 4.0], self.evaluate(var1))
# Run 1 step of sgd
lars_op.run()
# Validate updated params
self.assertAllCloseAccordingToType(
[1.0 - 3.0 * (0.001 * (np.sqrt(5.) / np.sqrt(2.))),
2.0 - 3.0 * (0.001 * (np.sqrt(5.) / np.sqrt(2.)))],
self.evaluate(var0))
self.assertAllCloseAccordingToType(
[3.0 - 3.0 * (0.001 * (5. / np.sqrt(2.))),
4.0 - 3.0 * (0.001 * (5. / np.sqrt(2.)))],
self.evaluate(var1))
@parameterized.named_parameters(*construct_tests_with_dtypes())
def testGradWrtRef(self, dtype):
with self.cached_session():
opt = utils.LARSOptimizer(3.0)
values = [1.0, 3.0]
vars_ = [tf.Variable([v], dtype=dtype) for v in values]
grads_and_vars = opt.compute_gradients(vars_[0] + vars_[1], vars_)
tf.global_variables_initializer().run()
for grad, _ in grads_and_vars:
self.assertAllCloseAccordingToType([1.0], self.evaluate(grad))
@parameterized.named_parameters(*construct_tests_with_dtypes())
def testWithGlobalStep(self, dtype):
with self.cached_session():
global_step = tf.Variable(0, trainable=False)
var0 = tf.Variable([1.0, 2.0], dtype=dtype)
var1 = tf.Variable([3.0, 4.0], dtype=dtype)
grads0 = tf.constant([0.1, 0.1], dtype=dtype)
grads1 = tf.constant([0.01, 0.01], dtype=dtype)
lars_op = utils.LARSOptimizer(3.0).apply_gradients(
zip([grads0, grads1], [var0, var1]), global_step=global_step)
tf.global_variables_initializer().run()
# Fetch params to validate initial values
self.assertAllCloseAccordingToType([1.0, 2.0], self.evaluate(var0))
self.assertAllCloseAccordingToType([3.0, 4.0], self.evaluate(var1))
# Run 1 step of sgd
lars_op.run()
# Validate updated params and global_step
self.assertAllCloseAccordingToType(
[1.0 - 3.0 * (0.001 * (np.sqrt(5.) / np.sqrt(2.))),
2.0 - 3.0 * (0.001 * (np.sqrt(5.) / np.sqrt(2.)))],
self.evaluate(var0))
self.assertAllCloseAccordingToType(
[3.0 - 3.0 * (0.001 * (5. / np.sqrt(2.))),
4.0 - 3.0 * (0.001 * (5. / np.sqrt(2.)))],
self.evaluate(var1))
self.assertAllCloseAccordingToType(1, self.evaluate(global_step))
class CrossReplicaConcatTest(tf.test.TestCase, parameterized.TestCase):
def testCrossReplicaConcat(self):
# Test takes tensor_to_communicate,
# splits it amongst the two cores (1x1 topology),
# then asserts each core returns the complete combined tensor.
total_input_length = 8
num_cores = 2
# Initialize variables
numpy_tensor_to_communicate = np.arange(total_input_length, dtype=np.int32)
tensor_to_communicate = tf.convert_to_tensor(numpy_tensor_to_communicate)
replica0_input, replica1_input = tf.split(
tensor_to_communicate, num_cores, 0)
# Create TPU operations
tpu_compute_function = tf.tpu.replicate(
utils.cross_replica_concat,
[[replica0_input], [replica1_input]])
with self.cached_session() as session:
session.run(tf.tpu.initialize_system())
concat_result = session.run(tpu_compute_function)
# Concatenation result has shape [num_cores, 1, total_input_length]
self.assertAllEqual(concat_result[0][0], tensor_to_communicate)
self.assertAllEqual(concat_result[1][0], tensor_to_communicate)
session.run(tf.tpu.shutdown_system())
@parameterized.named_parameters(
('Scalar', tuple()),
('1D', (8, 4)),
('2D', (6, 6)),
('3D', (8, 8, 3)))
def testCrossReplicaConcatBatchedTensor(self, tensor_shape):
# Test takes tensor_to_communicate,
# splits it amongst the two cores (1x1 topology),
# then asserts each core returns the complete combined tensor.
batch_size_per_core = 4
num_cores = 2
# Initialize variables
numpy_tensor_to_communicate = np.zeros(
[batch_size_per_core * num_cores] + list(tensor_shape),
dtype=np.int32)
tensor_to_communicate = tf.convert_to_tensor(numpy_tensor_to_communicate)
replica0_input, replica1_input = tf.split(
tensor_to_communicate, num_cores, 0)
# Create TPU operations
tpu_compute_function = tf.tpu.replicate(
utils.cross_replica_concat,
[[replica0_input], [replica1_input]])
with self.cached_session() as session:
session.run(tf.tpu.initialize_system())
concat_result = session.run(tpu_compute_function)
# Concatenation result has shape [num_cores, 1] +
# [batch_size * num_cores] + `tensor_shape`
self.assertAllEqual(concat_result[0][0], tensor_to_communicate)
self.assertAllEqual(concat_result[1][0], tensor_to_communicate)
session.run(tf.tpu.shutdown_system())
class BuildLearningRateScheduleTest(tf.test.TestCase, parameterized.TestCase):
def testZeroEpochSchedule(self):
global_step = tf.placeholder_with_default(0, [], 'global_step')
learning_rate = utils.build_learning_rate_schedule(
learning_rate=1.,
decay_type=enums.DecayType.EXPONENTIAL,
warmup_start_epoch=24,
max_learning_rate_epoch=24,
decay_end_epoch=24,
global_step=global_step,
steps_per_epoch=100)
self.assertEqual(1., learning_rate)
def testZeroEpochScheduleWithWarmup(self):
global_step = tf.placeholder_with_default(0, [], 'global_step')
with self.assertRaises(ValueError):
utils.build_learning_rate_schedule(
learning_rate=1.,
decay_type=enums.DecayType.EXPONENTIAL,
warmup_start_epoch=12,
max_learning_rate_epoch=24,
decay_end_epoch=24,
global_step=global_step,
steps_per_epoch=100)
DEFAULT_EPOCHS_PER_DECAY = 2.4
DEFAULT_DECAY_FACTOR = 0.97
@parameterized.parameters(range(1, 10))
def testExponentialDecayWithoutWarmup(self, decay_index):
global_step = tf.placeholder_with_default(0, [], 'global_step')
learning_rate = utils.build_learning_rate_schedule(
learning_rate=1.,
decay_type=enums.DecayType.EXPONENTIAL,
warmup_start_epoch=0,
max_learning_rate_epoch=0,
decay_end_epoch=24,
global_step=global_step,
steps_per_epoch=100)
with self.cached_session() as sess:
initial_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 0})
self.assertAlmostEqual(1., initial_learning_rate)
staircase_drop_step = 100 * decay_index * self.DEFAULT_EPOCHS_PER_DECAY
before_staircase_drop_learning_rate = sess.run(
learning_rate, feed_dict={global_step: staircase_drop_step})
after_staircase_drop_learning_rate = sess.run(
learning_rate, feed_dict={global_step: staircase_drop_step + 1})
self.assertAlmostEqual(
self.DEFAULT_DECAY_FACTOR**(
np.floor(staircase_drop_step /
(self.DEFAULT_EPOCHS_PER_DECAY * 100)) - 1),
before_staircase_drop_learning_rate,
places=5)
self.assertAlmostEqual(
self.DEFAULT_DECAY_FACTOR**(np.floor(
staircase_drop_step / (self.DEFAULT_EPOCHS_PER_DECAY * 100))),
after_staircase_drop_learning_rate,
places=5)
final_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 2400})
self.assertAlmostEqual(
self.DEFAULT_DECAY_FACTOR**(np.floor(
2400 / (self.DEFAULT_EPOCHS_PER_DECAY * 100)) - 1.),
final_learning_rate,
places=5)
def testExponentialDecayWithWarmup(self):
global_step = tf.placeholder_with_default(0, [], 'global_step')
learning_rate = utils.build_learning_rate_schedule(
learning_rate=1.,
decay_type=enums.DecayType.EXPONENTIAL,
warmup_start_epoch=0,
max_learning_rate_epoch=5,
decay_end_epoch=10,
global_step=global_step,
steps_per_epoch=100)
with self.cached_session() as sess:
initial_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 0})
self.assertAlmostEqual(0., initial_learning_rate)
max_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 500})
self.assertAlmostEqual(1., max_learning_rate)
intermediate_warmup_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 250})
self.assertAlmostEqual(0.5, intermediate_warmup_learning_rate)
final_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 1000})
self.assertAlmostEqual(
self.DEFAULT_DECAY_FACTOR**np.floor(
500 / (self.DEFAULT_EPOCHS_PER_DECAY * 100)),
final_learning_rate,
places=5)
def testExponentialDecayWithEpochsPerDecay(self):
global_step = tf.placeholder_with_default(0, [], 'global_step')
learning_rate = utils.build_learning_rate_schedule(
learning_rate=1.,
decay_type=enums.DecayType.EXPONENTIAL,
warmup_start_epoch=0,
max_learning_rate_epoch=5,
decay_end_epoch=10,
global_step=global_step,
steps_per_epoch=100,
epochs_per_decay=3.4)
with self.cached_session() as sess:
initial_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 0})
self.assertAlmostEqual(0., initial_learning_rate)
max_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 500})
self.assertAlmostEqual(1., max_learning_rate)
final_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 1000})
self.assertAlmostEqual((0.97)**np.floor((500) / (3.4 * 100)),
final_learning_rate,
places=5)
def testExponentialDecayWithDecayRate(self):
global_step = tf.placeholder_with_default(0, [], 'global_step')
learning_rate = utils.build_learning_rate_schedule(
learning_rate=1.,
decay_type=enums.DecayType.EXPONENTIAL,
warmup_start_epoch=0,
max_learning_rate_epoch=5,
decay_end_epoch=10,
global_step=global_step,
steps_per_epoch=100,
decay_rate=0.99)
with self.cached_session() as sess:
initial_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 0})
self.assertAlmostEqual(0., initial_learning_rate)
max_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 500})
self.assertAlmostEqual(1., max_learning_rate)
final_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 1000})
self.assertAlmostEqual((0.99)**np.floor((500) / (2.4 * 100)),
final_learning_rate,
places=5)
def testCosineDecayWithoutWarmup(self):
global_step = tf.placeholder_with_default(0, [], 'global_step')
learning_rate = utils.build_learning_rate_schedule(
learning_rate=1.,
decay_type=enums.DecayType.COSINE,
warmup_start_epoch=0,
max_learning_rate_epoch=0,
decay_end_epoch=10,
global_step=global_step,
steps_per_epoch=100)
with self.cached_session() as sess:
initial_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 0})
self.assertAlmostEqual(1., initial_learning_rate)
quarter_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 250})
self.assertAlmostEqual((1. + np.cos(np.pi / 4.)) / 2.,
quarter_learning_rate)
halfway_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 500})
self.assertAlmostEqual(0.5, halfway_learning_rate)
final_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 1000})
self.assertAlmostEqual(0., final_learning_rate)
def testCosineDecayWithWarmup(self):
global_step = tf.placeholder_with_default(0, [], 'global_step')
learning_rate = utils.build_learning_rate_schedule(
learning_rate=1.,
decay_type=enums.DecayType.COSINE,
warmup_start_epoch=0,
max_learning_rate_epoch=5,
decay_end_epoch=10,
global_step=global_step,
steps_per_epoch=100)
with self.cached_session() as sess:
initial_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 0})
self.assertAlmostEqual(0., initial_learning_rate)
intermediate_warmup_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 250})
self.assertAlmostEqual(0.5, intermediate_warmup_learning_rate)
max_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 500})
self.assertAlmostEqual(1., max_learning_rate)
final_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 1000})
self.assertAlmostEqual(0., final_learning_rate)
DEFAULT_BOUNDARY_EPOCHS = [30, 60, 80, 90]
DEFAULT_DECAY_RATES = [1, 0.1, 0.01, 0.001, 1e-4]
def testPiecewiseLinearDecayWithoutWarmup(self):
global_step = tf.placeholder_with_default(0, [], 'global_step')
steps_per_epoch = 100
learning_rate = utils.build_learning_rate_schedule(
learning_rate=1.,
decay_type=enums.DecayType.PIECEWISE_LINEAR,
warmup_start_epoch=0,
max_learning_rate_epoch=0,
decay_end_epoch=1000,
global_step=global_step,
steps_per_epoch=steps_per_epoch)
with self.cached_session() as sess:
initial_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 1})
self.assertAlmostEqual(1., initial_learning_rate)
for i, epoch in enumerate(self.DEFAULT_BOUNDARY_EPOCHS):
before_decay_learning_rate = sess.run(
learning_rate, feed_dict={global_step: epoch * steps_per_epoch})
after_decay_learning_rate = sess.run(
learning_rate, feed_dict={global_step: epoch * steps_per_epoch + 1})
self.assertAlmostEqual(before_decay_learning_rate,
self.DEFAULT_DECAY_RATES[i])
self.assertAlmostEqual(after_decay_learning_rate,
self.DEFAULT_DECAY_RATES[i + 1])
final_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 1000 * steps_per_epoch})
self.assertAlmostEqual(1e-4, final_learning_rate, places=7)
def testPiecewiseLinearDecayWithWarmup(self):
global_step = tf.placeholder_with_default(0, [], 'global_step')
max_learning_rate_epoch = 5
steps_per_epoch = 100
learning_rate = utils.build_learning_rate_schedule(
learning_rate=1.,
decay_type=enums.DecayType.PIECEWISE_LINEAR,
warmup_start_epoch=0,
max_learning_rate_epoch=max_learning_rate_epoch,
decay_end_epoch=1000,
global_step=global_step,
steps_per_epoch=steps_per_epoch)
with self.cached_session() as sess:
initial_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 0})
self.assertAlmostEqual(0., initial_learning_rate)
max_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 500})
self.assertAlmostEqual(1., max_learning_rate)
for i, epoch in enumerate(self.DEFAULT_BOUNDARY_EPOCHS):
before_decay_learning_rate = sess.run(
learning_rate,
feed_dict={
global_step: (epoch + max_learning_rate_epoch) * steps_per_epoch
})
after_decay_learning_rate = sess.run(
learning_rate,
feed_dict={
global_step:
((epoch + max_learning_rate_epoch) * steps_per_epoch) + 1
})
self.assertAlmostEqual(before_decay_learning_rate,
self.DEFAULT_DECAY_RATES[i])
self.assertAlmostEqual(after_decay_learning_rate,
self.DEFAULT_DECAY_RATES[i + 1])
final_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 1000 * steps_per_epoch})
self.assertAlmostEqual(1e-4, final_learning_rate, places=7)
def testPiecewiseLinearDecayWithBoundaries(self):
global_step = tf.placeholder_with_default(0, [], 'global_step')
boundary_epochs = [1, 2, 3, 4]
decay_rate = 1e-1
max_learning_rate_epoch = 5
steps_per_epoch = 100
learning_rate = utils.build_learning_rate_schedule(
learning_rate=1.,
decay_type=enums.DecayType.PIECEWISE_LINEAR,
warmup_start_epoch=0,
max_learning_rate_epoch=max_learning_rate_epoch,
decay_end_epoch=1000,
global_step=global_step,
steps_per_epoch=steps_per_epoch,
boundary_epochs=boundary_epochs,
decay_rate=decay_rate)
with self.cached_session() as sess:
initial_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 0})
self.assertAlmostEqual(0., initial_learning_rate)
max_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 500})
self.assertAlmostEqual(1., max_learning_rate)
for i, epoch in enumerate(boundary_epochs):
before_decay_learning_rate = sess.run(
learning_rate,
feed_dict={
global_step: (epoch + max_learning_rate_epoch) * steps_per_epoch
})
after_decay_learning_rate = sess.run(
learning_rate,
feed_dict={
global_step:
((epoch + max_learning_rate_epoch) * steps_per_epoch) + 1
})
self.assertAlmostEqual(before_decay_learning_rate, decay_rate**i)
self.assertAlmostEqual(after_decay_learning_rate, decay_rate**(i + 1))
final_learning_rate = sess.run(
learning_rate, feed_dict={global_step: 1000})
self.assertAlmostEqual(1e-4,
final_learning_rate,
places=5)
if __name__ == '__main__':
tf.test.main()
| [
"copybara-worker@google.com"
] | copybara-worker@google.com |
c45a9cdc1199bbada87416704b20196c0de40f81 | bf534da18426b49dbee0a0b1870f5f3a85922855 | /ex049tabparaqlqrnumero.py | 7a82f9bdfefab90ed265b895293c51a526aed53e | [] | no_license | kcpedrosa/Python-exercises | 0d20a72e7e68d9fc9714e3aabf4850fdbeb7d1f8 | ae35dfad869ceb3aac186fce5161cef8a77a7579 | refs/heads/master | 2021-05-20T08:46:29.318242 | 2020-04-01T15:44:36 | 2020-04-01T15:44:36 | 252,205,326 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 342 | py | tabuada = int(input('Digite o numero da tabuada: '))
n = int(input('Digite o inicio da tabuada: '))
print ("Tabuada de %d" %tabuada)
if n <= 10:
while n <= 10:
print ("%d x %d = %d" %(tabuada, n, tabuada * n))
n = n + 1
else : print ('Você está de brincadeira. Digite um numero menor ou igual a 10') | [
"kattine.costa@gmail.com"
] | kattine.costa@gmail.com |
32f32044f17e55e730595c1e880cbc6ad0dc5ede | c9f9a4efae174ef89071f4e3cdbb4f4bba3b2eac | /test7/test7/settings.py | 9859d2eaaa1e565ac1b6b019161a100bc6717fdf | [] | no_license | rj8928/projects | 8618bf40392a140bdf6b6264fd86cae3c6321ede | 5fe78f839b918a5cf1d0db52c9b6237ffdae08c9 | refs/heads/master | 2021-01-01T18:40:12.316231 | 2017-08-06T02:24:08 | 2017-08-06T02:24:08 | 98,400,133 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,864 | py | """
Django settings for test7 project.
Generated by 'django-admin startproject' using Django 1.8.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 't(%^u!#=-oy^s5=@%cq+enhs*3v@caddry0=m3-fh*hc8w9#^v'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ['*',]
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'booktest',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'test7.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR,'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'test7.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME':'test3',
'USER':'root',
'PASSWORD':'jxust8928',
'HOST':'localhost',
'PORT':'3306',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR,'static'),
]
STATIC_ROOT = '/var/www/test7/static/'
| [
"rj8928@gmail.com"
] | rj8928@gmail.com |
4087256ac94244ef67a6445e2c928565d99a813d | 368be25e37bafa8cc795f7c9f34e4585e017091f | /.history/app_fav_books/views_20201113172600.py | 40f5096e93ababd00b378c44b61e32e0bdd41f54 | [] | no_license | steven-halla/fav_books_proj | ebcfbfda0e7f3cdc49d592c86c633b1d331da513 | 512005deb84ac906c9f24d4ab0939bd0db096716 | refs/heads/master | 2023-03-30T09:37:38.016063 | 2021-04-02T20:27:22 | 2021-04-02T20:27:22 | 354,125,658 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 454 | py | from django.shortcuts import render, redirect
from .models import *
from django.contrib import messages
def index(request):
return render(request, "index.html")
def register_New_User(request):
errors = User.objects.basic_validator(request.POST)
if len(errors) > 0:
for key, value in errors.items():
messages.error(request, value)
return redirect("/")
else:
first_name_from_post = request.POST
| [
"69405488+steven-halla@users.noreply.github.com"
] | 69405488+steven-halla@users.noreply.github.com |
5031fbd2b72ba7e0b98fd3e398eeba981cd8543b | 8fcc27160f8700be46296568260fa0017a0b3004 | /client/yamlext/bluepy.py | 6741b4f1dd254b01051e099a6668754eb6d323ab | [] | no_license | connoryang/dec-eve-serenity | 5d867f4eedfa896a4ef60f92556356cafd632c96 | b670aec7c8b4514fc47cd52e186d7ccf3aabb69e | refs/heads/master | 2021-01-22T06:33:16.303760 | 2016-03-16T15:15:32 | 2016-03-16T15:15:32 | 56,389,750 | 1 | 0 | null | 2016-04-16T15:05:24 | 2016-04-16T15:05:24 | null | UTF-8 | Python | false | false | 4,483 | py | #Embedded file name: e:\jenkins\workspace\client_SERENITY\branches\release\SERENITY\packages\yamlext\bluepy.py
from collections import OrderedDict
import yaml
from . import PyIO
cyaml_supported = hasattr(yaml, 'CLoader')
def isNumber(string):
try:
int(string)
return True
except ValueError:
try:
float(string)
return True
except ValueError:
pass
return False
class BlueRepresenter(yaml.representer.Representer):
def __init__(self, default_style = None, default_flow_style = None):
yaml.representer.Representer.__init__(self, default_style, default_flow_style)
def represent_sequence(self, tag, sequence, flow_style = None):
node = yaml.representer.Representer.represent_sequence(self, tag, sequence, flow_style)
firstElement = sequence[0]
if not isinstance(firstElement, (dict, OrderedDict, list)):
node.flow_style = True
for listItem in node.value:
if isinstance(listItem.value, (str, unicode)) and not isNumber(listItem.value):
listItem.style = '"'
return node
def represent_mapping(self, tag, mapping, flow_style = None):
node = yaml.representer.Representer.represent_mapping(self, tag, mapping, flow_style)
for nodeKey, nodeValue in node.value:
keyValue = nodeKey.value
valueValue = nodeValue.value
if keyValue != 'type' and isinstance(valueValue, (str, unicode)) and not isNumber(valueValue):
nodeValue.style = '"'
return node
if cyaml_supported:
from _yaml import CEmitter
class BlueDumper(CEmitter, yaml.serializer.Serializer, BlueRepresenter, yaml.resolver.Resolver):
def __init__(self, stream, default_style = None, default_flow_style = None, canonical = None, indent = None, width = None, allow_unicode = None, line_break = None, encoding = None, explicit_start = None, explicit_end = None, version = None, tags = None):
CEmitter.__init__(self, stream, canonical=canonical, indent=indent, width=width, encoding=encoding, allow_unicode=allow_unicode, line_break=line_break, explicit_start=explicit_start, explicit_end=explicit_end, version=version, tags=tags)
BlueRepresenter.__init__(self, default_style=default_style, default_flow_style=False)
yaml.resolver.Resolver.__init__(self)
class BlueLoader(yaml.CLoader):
pass
else:
class BlueDumper(yaml.emitter.Emitter, yaml.serializer.Serializer, BlueRepresenter, yaml.resolver.Resolver):
def __init__(self, stream, default_style = None, default_flow_style = None, canonical = None, indent = None, width = None, allow_unicode = None, line_break = None, encoding = None, explicit_start = None, explicit_end = None, version = None, tags = None):
yaml.emitter.Emitter.__init__(self, stream, canonical=canonical, indent=indent, width=width, allow_unicode=allow_unicode, line_break=line_break)
yaml.serializer.Serializer.__init__(self, encoding=encoding, explicit_start=explicit_start, explicit_end=explicit_end, version=version, tags=tags)
BlueRepresenter.__init__(self, default_style=default_style, default_flow_style=True)
yaml.resolver.Resolver.__init__(self)
class BlueLoader(yaml.Loader):
pass
def _construct_mapping(loader, node):
loader.flatten_mapping(node)
return OrderedDict(loader.construct_pairs(node))
def _dict_representer(dumper, d):
return dumper.represent_mapping(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, d.items())
BlueLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, _construct_mapping)
BlueDumper.add_representer(OrderedDict, _dict_representer)
class _BlueIO(PyIO):
def __init__(self):
PyIO.__init__(self)
self._loader = self._dumper = None
self._loader = BlueLoader
self._dumper = BlueDumper
def loads(s):
return _BlueIO().loads(s)
def loadfile(path):
return _BlueIO().loadfile(path)
def load(stream):
return _BlueIO().load(stream)
def dumps(obj, **kwargs):
return _BlueIO().dumps(obj, **kwargs)
def dump(obj, stream, **kwargs):
return _BlueIO().dump(obj, stream, **kwargs)
def dumpfile(obj, path, **kwargs):
return _BlueIO().dumpfile(obj, path, **kwargs)
| [
"masaho.shiro@gmail.com"
] | masaho.shiro@gmail.com |
dd1efcc51937fbd3a8a07281534017d5801039fb | 38efe804f2a070737984b5f4306ef9f09cbdf9c2 | /clusterDist.py | 1b14885ba62ef3ddc5e7485a39c7ad778ea9cb43 | [] | no_license | krm9c/HierarchicalDimensionReduction | 0bd59248035159c3d9fb8c9c407a3cecba7635d8 | 6af1455be32abf2e06667e7c264f52c76329ffee | refs/heads/master | 2020-05-27T21:10:03.027472 | 2018-02-04T22:49:25 | 2018-02-04T22:49:25 | 83,605,507 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,605 | py | #!/usr/bin/env python
# kmeans.py using any of the 20-odd metrics in scipy.spatial.distance
# kmeanssample 2 pass, first sample sqrt(N)
from __future__ import division
import random
import numpy as np
from scipy.spatial.distance import cdist # $scipy/spatial/distance.py
# http://docs.scipy.org/doc/scipy/reference/spatial.html
from scipy.sparse import issparse # $scipy/sparse/csr.py
__date__ = "2011-11-17 Nov denis"
# X sparse, any cdist metric: real app ?
# centres get dense rapidly, metrics in high dim hit distance whiteout
# vs unsupervised / semi-supervised svm
#...............................................................................
def kmeans( X, centres, delta=.001, maxiter=10, metric="euclidean", p=2, verbose=1 ):
""" centres, Xtocentre, distances = kmeans( X, initial centres ... )
in:
X N x dim may be sparse
centres k x dim: initial centres, e.g. random.sample( X, k )
delta: relative error, iterate until the average distance to centres
is within delta of the previous average distance
maxiter
metric: any of the 20-odd in scipy.spatial.distance
"chebyshev" = max, "cityblock" = L1, "minkowski" with p=
or a function( Xvec, centrevec ), e.g. Lqmetric below
p: for minkowski metric -- local mod cdist for 0 < p < 1 too
verbose: 0 silent, 2 prints running distances
out:
centres, k x dim
Xtocentre: each X -> its nearest centre, ints N -> k
distances, N
see also: kmeanssample below, class Kmeans below.
"""
if not issparse(X):
X = np.asanyarray(X) # ?
centres = centres.todense() if issparse(centres) \
else centres.copy()
N, dim = X.shape
k, cdim = centres.shape
if dim != cdim:
raise ValueError( "kmeans: X %s and centres %s must have the same number of columns" % (
X.shape, centres.shape ))
if verbose:
print "kmeans: X %s centres %s delta=%.2g maxiter=%d metric=%s" % (
X.shape, centres.shape, delta, maxiter, metric)
allx = np.arange(N)
prevdist = 0
for jiter in range( 1, maxiter+1 ):
D = cdist_sparse( X, centres, metric=metric, p=p ) # |X| x |centres|
xtoc = D.argmin(axis=1) # X -> nearest centre
distances = D[allx,xtoc]
avdist = distances.mean() # median ?
if verbose >= 2:
print "kmeans: av |X - nearest centre| = %.4g" % avdist
if (1 - delta) * prevdist <= avdist <= prevdist \
or jiter == maxiter:
break
prevdist = avdist
for jc in range(k): # (1 pass in C)
c = np.where( xtoc == jc )[0]
if len(c) > 0:
centres[jc] = X[c].mean( axis=0 )
if verbose:
print "kmeans: %d iterations cluster sizes:" % jiter, np.bincount(xtoc)
if verbose >= 2:
r50 = np.zeros(k)
r90 = np.zeros(k)
for j in range(k):
dist = distances[ xtoc == j ]
if len(dist) > 0:
r50[j], r90[j] = np.percentile( dist, (50, 90) )
print "kmeans: cluster 50 % radius", r50.astype(int)
print "kmeans: cluster 90 % radius", r90.astype(int)
# scale L1 / dim, L2 / sqrt(dim) ?
return centres, xtoc, distances
#...............................................................................
def kmeanssample( X, k, nsample=0, **kwargs ):
""" 2-pass kmeans, fast for large N:
1) kmeans a random sample of nsample ~ sqrt(N) from X
2) full kmeans, starting from those centres
"""
# merge w kmeans ? mttiw
# v large N: sample N^1/2, N^1/2 of that
# seed like sklearn ?
N, dim = X.shape
if nsample == 0:
nsample = max( 2*np.sqrt(N), 10*k )
Xsample = randomsample( X, int(nsample) )
pass1centres = randomsample( X, int(k) )
samplecentres = kmeans( Xsample, pass1centres, **kwargs )[0]
return kmeans( X, samplecentres, **kwargs )
def cdist_sparse( X, Y, **kwargs ):
""" -> |X| x |Y| cdist array, any cdist metric
X or Y may be sparse -- best csr
"""
# todense row at a time, v slow if both v sparse
sxy = 2*issparse(X) + issparse(Y)
if sxy == 0:
return cdist( X, Y, **kwargs )
d = np.empty( (X.shape[0], Y.shape[0]), np.float64 )
if sxy == 2:
for j, x in enumerate(X):
d[j] = cdist( x.todense(), Y, **kwargs ) [0]
elif sxy == 1:
for k, y in enumerate(Y):
d[:,k] = cdist( X, y.todense(), **kwargs ) [0]
else:
for j, x in enumerate(X):
for k, y in enumerate(Y):
d[j,k] = cdist( x.todense(), y.todense(), **kwargs ) [0]
return d
def randomsample( X, n ):
""" random.sample of the rows of X
X may be sparse -- best csr
"""
sampleix = random.sample( xrange( X.shape[0] ), int(n) )
return X[sampleix]
def nearestcentres( X, centres, metric="euclidean", p=2 ):
""" each X -> nearest centre, any metric
euclidean2 (~ withinss) is more sensitive to outliers,
cityblock (manhattan, L1) less sensitive
"""
D = cdist( X, centres, metric=metric, p=p ) # |X| x |centres|
return D.argmin(axis=1)
def Lqmetric( x, y=None, q=.5 ):
# yes a metric, may increase weight of near matches; see ...
return (np.abs(x - y) ** q) .mean() if y is not None \
else (np.abs(x) ** q) .mean()
#...............................................................................
class Kmeans:
""" km = Kmeans( X, k= or centres=, ... )
in: either initial centres= for kmeans
or k= [nsample=] for kmeanssample
out: km.centres, km.Xtocentre, km.distances
iterator:
for jcentre, J in km:
clustercentre = centres[jcentre]
J indexes e.g. X[J], classes[J]
"""
def __init__( self, X, k=0, centres=None, nsample=0, **kwargs ):
self.X = X
if centres is None:
self.centres, self.Xtocentre, self.distances = kmeanssample(
X, k=k, nsample=nsample, **kwargs )
else:
self.centres, self.Xtocentre, self.distances = kmeans(
X, centres, **kwargs )
def __iter__(self):
for jc in range(len(self.centres)):
yield jc, (self.Xtocentre == jc)
#...............................................................................
if __name__ == "__main__":
import random
import sys
from time import time
N = 10000
dim = 10
ncluster = 10
kmsample = 100 # 0: random centres, > 0: kmeanssample
kmdelta = .001
kmiter = 10
metric = "cityblock" # "chebyshev" = max, "cityblock" L1, Lqmetric
seed = 1
exec( "\n".join( sys.argv[1:] )) # run this.py N= ...
np.set_printoptions( 1, threshold=200, edgeitems=5, suppress=True )
np.random.seed(seed)
random.seed(seed)
print "N %d dim %d ncluster %d kmsample %d metric %s" % (
N, dim, ncluster, kmsample, metric)
X = np.random.exponential( size=(N,dim) )
# cf scikits-learn datasets/
t0 = time()
if kmsample > 0:
centres, xtoc, dist = kmeanssample( X, ncluster, nsample=kmsample,
delta=kmdelta, maxiter=kmiter, metric=metric, verbose=2 )
else:
randomcentres = randomsample( X, ncluster )
centres, xtoc, dist = kmeans( X, randomcentres,
delta=kmdelta, maxiter=kmiter, metric=metric, verbose=2 )
print "%.0f msec" % ((time() - t0) * 1000)
# also ~/py/np/kmeans/test-kmeans.py
| [
"krm9c@mst.edu"
] | krm9c@mst.edu |
5a821ae3b4527b4c618c4a37daee6aa675cb2fde | 2ee8b831f228791ce5f5bb02298ce399b301e5f5 | /virtual/bin/markdown2 | 26877791374b6c569706f45e02b86bb59f0afe02 | [
"MIT"
] | permissive | amoskipz/Blog-App | 7865c7cc5e9fcafdbe9593b4a912a8f7663315c2 | 16c2ac60cd61a1240ec32a00cafb16491be4be3a | refs/heads/master | 2023-03-15T03:59:26.140437 | 2021-03-14T09:38:25 | 2021-03-14T09:38:25 | 346,115,985 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 510 | #!/home/moringa/moringa/Blog-app/virtual/bin/python3
import sys
from os.path import join, dirname, exists
# Use the local markdown2.py if we are in the source tree.
source_tree_markdown2 = join(dirname(__file__), "..", "lib", "markdown2.py")
if exists(source_tree_markdown2):
sys.path.insert(0, dirname(source_tree_markdown2))
try:
from markdown2 import main
finally:
del sys.path[0]
else:
from markdown2 import main
if __name__ == "__main__":
sys.exit( main(sys.argv) )
| [
"oduorthomas14@gmail.com"
] | oduorthomas14@gmail.com | |
17e755c681f2af7c0e9da66f57bca19b8ada049b | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_4898.py | f24d6c7ea8b779aa4a7eadbb0bddac6bd0b5b7e0 | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 119 | py | # Problems with regex matching
for tag in soup.findAll('a', href = re.compile('^/l[0-9]+/.*$')):
print tag['href']
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
4e6f9e02a2bc25af9974e264a2cb211510c04cb5 | 9d278285f2bc899ac93ec887b1c31880ed39bf56 | /ondoc/doctor/migrations/0219_hospitaltiming.py | 141550a13b01289578de2f6301c2d453fb57bf04 | [] | no_license | ronit29/docprime | 945c21f8787387b99e4916cb3ba1618bc2a85034 | 60d4caf6c52a8b70174a1f654bc792d825ba1054 | refs/heads/master | 2023-04-01T14:54:10.811765 | 2020-04-07T18:57:34 | 2020-04-07T18:57:34 | 353,953,576 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,663 | py | # Generated by Django 2.0.5 on 2019-03-11 10:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('doctor', '0218_hospitalimage_cover_image'),
]
operations = [
migrations.CreateModel(
name='HospitalTiming',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('day', models.PositiveSmallIntegerField(choices=[(0, 'Monday'), (1, 'Tuesday'), (2, 'Wednesday'), (3, 'Thursday'), (4, 'Friday'), (5, 'Saturday'), (6, 'Sunday')])),
('start', models.DecimalField(choices=[(5.0, '5 AM'), (5.5, '5:30 AM'), (6.0, '6 AM'), (6.5, '6:30 AM'), (7.0, '7:00 AM'), (7.5, '7:30 AM'), (8.0, '8:00 AM'), (8.5, '8:30 AM'), (9.0, '9:00 AM'), (9.5, '9:30 AM'), (10.0, '10:00 AM'), (10.5, '10:30 AM'), (11.0, '11:00 AM'), (11.5, '11:30 AM'), (12.0, '12:00 PM'), (12.5, '12:30 PM'), (13.0, '1:00 PM'), (13.5, '1:30 PM'), (14.0, '2:00 PM'), (14.5, '2:30 PM'), (15.0, '3:00 PM'), (15.5, '3:30 PM'), (16.0, '4:00 PM'), (16.5, '4:30 PM'), (17.0, '5:00 PM'), (17.5, '5:30 PM'), (18.0, '6:00 PM'), (18.5, '6:30 PM'), (19.0, '7:00 PM'), (19.5, '7:30 PM'), (20.0, '8:00 PM'), (20.5, '8:30 PM'), (21.0, '9:00 PM'), (21.5, '9:30 PM'), (22.0, '10:00 PM'), (22.5, '10:30 PM'), (23.0, '11 PM'), (23.5, '11:30 PM')], decimal_places=1, max_digits=3)),
('end', models.DecimalField(choices=[(5.0, '5 AM'), (5.5, '5:30 AM'), (6.0, '6 AM'), (6.5, '6:30 AM'), (7.0, '7:00 AM'), (7.5, '7:30 AM'), (8.0, '8:00 AM'), (8.5, '8:30 AM'), (9.0, '9:00 AM'), (9.5, '9:30 AM'), (10.0, '10:00 AM'), (10.5, '10:30 AM'), (11.0, '11:00 AM'), (11.5, '11:30 AM'), (12.0, '12:00 PM'), (12.5, '12:30 PM'), (13.0, '1:00 PM'), (13.5, '1:30 PM'), (14.0, '2:00 PM'), (14.5, '2:30 PM'), (15.0, '3:00 PM'), (15.5, '3:30 PM'), (16.0, '4:00 PM'), (16.5, '4:30 PM'), (17.0, '5:00 PM'), (17.5, '5:30 PM'), (18.0, '6:00 PM'), (18.5, '6:30 PM'), (19.0, '7:00 PM'), (19.5, '7:30 PM'), (20.0, '8:00 PM'), (20.5, '8:30 PM'), (21.0, '9:00 PM'), (21.5, '9:30 PM'), (22.0, '10:00 PM'), (22.5, '10:30 PM'), (23.0, '11 PM'), (23.5, '11:30 PM')], decimal_places=1, max_digits=3)),
('hospital', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='hosp_availability', to='doctor.Hospital')),
],
options={
'db_table': 'hospital_timing',
},
),
]
| [
"shashanks@policybazaar.com"
] | shashanks@policybazaar.com |
a01b01cf1ea7a55d51d37206222684274074d0c6 | 2b42b40ae2e84b438146003bf231532973f1081d | /spec/mgm4458702.3.spec | 57e62c125a77bac683f7d31539272f9a92d57009 | [] | no_license | MG-RAST/mtf | 0ea0ebd0c0eb18ec6711e30de7cc336bdae7215a | e2ddb3b145068f22808ef43e2bbbbaeec7abccff | refs/heads/master | 2020-05-20T15:32:04.334532 | 2012-03-05T09:51:49 | 2012-03-05T09:51:49 | 3,625,755 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 14,314 | spec | {
"id": "mgm4458702.3",
"metadata": {
"mgm4458702.3.metadata.json": {
"format": "json",
"provider": "metagenomics.anl.gov"
}
},
"providers": {
"metagenomics.anl.gov": {
"files": {
"100.preprocess.info": {
"compression": null,
"description": null,
"size": 736,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/100.preprocess.info"
},
"100.preprocess.passed.fna.gz": {
"compression": "gzip",
"description": null,
"size": 189115,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/100.preprocess.passed.fna.gz"
},
"100.preprocess.passed.fna.stats": {
"compression": null,
"description": null,
"size": 309,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/100.preprocess.passed.fna.stats"
},
"100.preprocess.removed.fna.gz": {
"compression": "gzip",
"description": null,
"size": 4498,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/100.preprocess.removed.fna.gz"
},
"100.preprocess.removed.fna.stats": {
"compression": null,
"description": null,
"size": 303,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/100.preprocess.removed.fna.stats"
},
"205.screen.h_sapiens_asm.info": {
"compression": null,
"description": null,
"size": 448,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/205.screen.h_sapiens_asm.info"
},
"299.screen.info": {
"compression": null,
"description": null,
"size": 410,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/299.screen.info"
},
"299.screen.passed.fna.gcs": {
"compression": null,
"description": null,
"size": 1706,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/299.screen.passed.fna.gcs"
},
"299.screen.passed.fna.gz": {
"compression": "gzip",
"description": null,
"size": 123398,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/299.screen.passed.fna.gz"
},
"299.screen.passed.fna.lens": {
"compression": null,
"description": null,
"size": 459,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/299.screen.passed.fna.lens"
},
"299.screen.passed.fna.stats": {
"compression": null,
"description": null,
"size": 309,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/299.screen.passed.fna.stats"
},
"440.cluster.rna97.fna.gz": {
"compression": "gzip",
"description": null,
"size": 18568,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/440.cluster.rna97.fna.gz"
},
"440.cluster.rna97.fna.stats": {
"compression": null,
"description": null,
"size": 306,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/440.cluster.rna97.fna.stats"
},
"440.cluster.rna97.info": {
"compression": null,
"description": null,
"size": 947,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/440.cluster.rna97.info"
},
"440.cluster.rna97.mapping": {
"compression": null,
"description": null,
"size": 224447,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/440.cluster.rna97.mapping"
},
"440.cluster.rna97.mapping.stats": {
"compression": null,
"description": null,
"size": 48,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/440.cluster.rna97.mapping.stats"
},
"450.rna.expand.lca.gz": {
"compression": "gzip",
"description": null,
"size": 150980,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/450.rna.expand.lca.gz"
},
"450.rna.expand.rna.gz": {
"compression": "gzip",
"description": null,
"size": 40271,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/450.rna.expand.rna.gz"
},
"450.rna.sims.filter.gz": {
"compression": "gzip",
"description": null,
"size": 25959,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/450.rna.sims.filter.gz"
},
"450.rna.sims.gz": {
"compression": "gzip",
"description": null,
"size": 265333,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/450.rna.sims.gz"
},
"900.abundance.function.gz": {
"compression": "gzip",
"description": null,
"size": 13877,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/900.abundance.function.gz"
},
"900.abundance.lca.gz": {
"compression": "gzip",
"description": null,
"size": 10018,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/900.abundance.lca.gz"
},
"900.abundance.md5.gz": {
"compression": "gzip",
"description": null,
"size": 19103,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/900.abundance.md5.gz"
},
"900.abundance.ontology.gz": {
"compression": "gzip",
"description": null,
"size": 43,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/900.abundance.ontology.gz"
},
"900.abundance.organism.gz": {
"compression": "gzip",
"description": null,
"size": 29066,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/900.abundance.organism.gz"
},
"900.loadDB.sims.filter.seq": {
"compression": null,
"description": null,
"size": 2201109,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/900.loadDB.sims.filter.seq"
},
"900.loadDB.source.stats": {
"compression": null,
"description": null,
"size": 121,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/900.loadDB.source.stats"
},
"999.done.COG.stats": {
"compression": null,
"description": null,
"size": 1,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/999.done.COG.stats"
},
"999.done.KO.stats": {
"compression": null,
"description": null,
"size": 1,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/999.done.KO.stats"
},
"999.done.NOG.stats": {
"compression": null,
"description": null,
"size": 1,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/999.done.NOG.stats"
},
"999.done.Subsystems.stats": {
"compression": null,
"description": null,
"size": 1,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/999.done.Subsystems.stats"
},
"999.done.class.stats": {
"compression": null,
"description": null,
"size": 723,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/999.done.class.stats"
},
"999.done.domain.stats": {
"compression": null,
"description": null,
"size": 38,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/999.done.domain.stats"
},
"999.done.family.stats": {
"compression": null,
"description": null,
"size": 2562,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/999.done.family.stats"
},
"999.done.genus.stats": {
"compression": null,
"description": null,
"size": 3365,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/999.done.genus.stats"
},
"999.done.order.stats": {
"compression": null,
"description": null,
"size": 1220,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/999.done.order.stats"
},
"999.done.phylum.stats": {
"compression": null,
"description": null,
"size": 281,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/999.done.phylum.stats"
},
"999.done.rarefaction.stats": {
"compression": null,
"description": null,
"size": 23213,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/999.done.rarefaction.stats"
},
"999.done.sims.stats": {
"compression": null,
"description": null,
"size": 79,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/999.done.sims.stats"
},
"999.done.species.stats": {
"compression": null,
"description": null,
"size": 10654,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458702.3/file/999.done.species.stats"
}
},
"id": "mgm4458702.3",
"provider": "metagenomics.anl.gov",
"providerId": "mgm4458702.3"
}
},
"raw": {
"mgm4458702.3.fna.gz": {
"compression": "gzip",
"format": "fasta",
"provider": "metagenomics.anl.gov",
"url": "http://api.metagenomics.anl.gov/reads/mgm4458702.3"
}
}
} | [
"jared.wilkening@gmail.com"
] | jared.wilkening@gmail.com |
b4c92b2d75d4f543d8df6161ea6c2627d29c7cb9 | 56fd2d92b8327cfb7d8f95b89c52e1700343b726 | /odin/utilities/odin_init.py | f1c8be660e7bc9f44f1ebd0eed2725426c4967c8 | [
"MIT"
] | permissive | stjordanis/Odin | fecb640ccf4f2e6eb139389d25cbe37da334cdb6 | e2e9d638c68947d24f1260d35a3527dd84c2523f | refs/heads/master | 2020-04-15T09:13:17.850126 | 2017-02-09T00:25:55 | 2017-02-09T00:25:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,520 | py | import os
from .params import IOFiles
def odin_init(sname):
"""This function creates a directory with the necessary substructure for
Odin to run a trading algorithm within it. Specifically, it creates a folder
with the desired strategy name within the current directory. It then creates
a subdirectory 'history' that contains relevant data on the portfolio; it
also creates a file 'main.py' that is executed in order to perform trading.
Usage
-----
This code can be used from the command line as follows:
python3 -c "from odin.utilities import odin_init ; odin_init('strat')"
Parameters
----------
sname: String.
A string giving an identifier to the directory that will house the
implementation of the strategy and dependency files.
"""
path = "./" + sname + "/"
main = path + IOFiles.main_file.value
handlers = path + IOFiles.handlers_file.value
settings = path + IOFiles.settings_file.value
strategy = path + IOFiles.strategy_file.value
fund = path + IOFiles.fund_file.value
# Create files and directories.
if not os.path.isdir(path):
os.mkdir(path)
if not os.path.isfile(main):
open(main, "a").close()
if not os.path.isfile(handlers):
open(handlers, "a").close()
if not os.path.isfile(settings):
open(settings, "a").close()
if not os.path.isfile(strategy):
open(strategy, "a").close()
if not os.path.isfile(fund):
open(fund, "a").close()
| [
"jamesbrofos@gmail.com"
] | jamesbrofos@gmail.com |
f42664fd88e759076b6aa04fde0d5154ec7fbb09 | b05761d771bb5a85d39d370c649567c1ff3eb089 | /venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/conf/__init__.pyi | bcec01dd19e6ecc86911379e3ebc0292e61b94e6 | [] | no_license | JawshyJ/Coding_Practice | 88c49cab955eab04609ec1003b6b8c20f103fc06 | eb6b229d41aa49b1545af2120e6bee8e982adb41 | refs/heads/master | 2023-02-19T10:18:04.818542 | 2023-02-06T21:22:58 | 2023-02-06T21:22:58 | 247,788,631 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96 | pyi | /home/runner/.cache/pip/pool/88/3f/d6/3448e831c659c15c3f3acbc8afea93d43f96032561c061d445a546f614 | [
"37465112+JawshyJ@users.noreply.github.com"
] | 37465112+JawshyJ@users.noreply.github.com |
1541f0cc0729432960694e2a393e12af4eeb2c20 | f83ef53177180ebfeb5a3e230aa29794f52ce1fc | /opencv/opencv-2.4.13.6/modules/python/test/test_houghcircles.py | 32b474fc855db5f85cd7a31f56d2dcec13488b95 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | msrLi/portingSources | fe7528b3fd08eed4a1b41383c88ee5c09c2294ef | 57d561730ab27804a3172b33807f2bffbc9e52ae | refs/heads/master | 2021-07-08T01:22:29.604203 | 2019-07-10T13:07:06 | 2019-07-10T13:07:06 | 196,183,165 | 2 | 1 | Apache-2.0 | 2020-10-13T14:30:53 | 2019-07-10T10:16:46 | null | UTF-8 | Python | false | false | 2,165 | py | #!/usr/bin/python
'''
This example illustrates how to use cv2.HoughCircles() function.
'''
# Python 2/3 compatibility
from __future__ import print_function
import cv2
import numpy as np
import sys
from numpy import pi, sin, cos
from tests_common import NewOpenCVTests
def circleApproximation(circle):
nPoints = 30
phi = 0
dPhi = 2*pi / nPoints
contour = []
for i in range(nPoints):
contour.append(([circle[0] + circle[2]*cos(i*dPhi),
circle[1] + circle[2]*sin(i*dPhi)]))
return np.array(contour).astype(int)
def convContoursIntersectiponRate(c1, c2):
s1 = cv2.contourArea(c1)
s2 = cv2.contourArea(c2)
s, _ = cv2.intersectConvexConvex(c1, c2)
return 2*s/(s1+s2)
class houghcircles_test(NewOpenCVTests):
def test_houghcircles(self):
fn = "samples/cpp/board.jpg"
src = self.get_sample(fn, 1)
img = cv2.cvtColor(src, cv2.COLOR_BGR2GRAY)
img = cv2.medianBlur(img, 5)
circles = cv2.HoughCircles(img, cv2.cv.CV_HOUGH_GRADIENT, 1, 10, np.array([]), 100, 30, 1, 30)[0]
testCircles = [[38, 181, 17.6],
[99.7, 166, 13.12],
[142.7, 160, 13.52],
[223.6, 110, 8.62],
[79.1, 206.7, 8.62],
[47.5, 351.6, 11.64],
[189.5, 354.4, 11.64],
[189.8, 298.9, 10.64],
[189.5, 252.4, 14.62],
[252.5, 393.4, 15.62],
[602.9, 467.5, 11.42],
[222, 210.4, 9.12],
[263.1, 216.7, 9.12],
[359.8, 222.6, 9.12],
[518.9, 120.9, 9.12],
[413.8, 113.4, 9.12],
[489, 127.2, 9.12],
[448.4, 121.3, 9.12],
[384.6, 128.9, 8.62]]
matches_counter = 0
for i in range(len(testCircles)):
for j in range(len(circles)):
tstCircle = circleApproximation(testCircles[i])
circle = circleApproximation(circles[j])
if convContoursIntersectiponRate(tstCircle, circle) > 0.6:
matches_counter += 1
self.assertGreater(float(matches_counter) / len(testCircles), .5)
self.assertLess(float(len(circles) - matches_counter) / len(circles), .75) | [
"lihuibin705@163.com"
] | lihuibin705@163.com |
dbb633ad70ce2edee54c8cc9d8267977bf5cd330 | b23c6c02d9b54c987bca2e36c3506cf80fa28239 | /Python GUI samples progs/bind()_3.py | a45558950b7943b21048f81da502bed094207d39 | [] | no_license | nishikaverma/Python_progs | 21190c88460a79f5ce20bb25d1b35f732fadd642 | 78f0cadde80b85356b4cb7ba518313094715aaa5 | refs/heads/master | 2022-06-12T14:54:03.442837 | 2020-05-08T10:28:58 | 2020-05-08T10:28:58 | 262,293,571 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 386 | py | from tkinter import *
def fun(e):
key = e.char
if key=="r":
obj['bg']="red"
if key=="b":
obj['bg']="blue"
if key=="g":
obj['bg']="green"
obj=Tk()
obj.geometry("400x400")
obj.bind("<Key>",fun)
#obj.bind("r",lambda e:obj.config(bg="red"))
#obj.bind("b",lambda e:obj.config(bg="blue"))
#obj.bind("g",lambda e:obj.config(bg="green"))
obj.mainloop() | [
"nishika.verma@live.com"
] | nishika.verma@live.com |
fac0664e6843c613d99bfe54a34977ea1d1fb7f6 | 34599596e145555fde0d4264a1d222f951f49051 | /pcat2py/class/20cec02a-5cc5-11e4-af55-00155d01fe08.py | 43c152203e7fd04e7a2001b174788fa197d466ee | [
"MIT"
] | permissive | phnomcobra/PCAT2PY | dc2fcbee142ce442e53da08476bfe4e68619346d | 937c3b365cdc5ac69b78f59070be0a21bdb53db0 | refs/heads/master | 2021-01-11T02:23:30.669168 | 2018-02-13T17:04:03 | 2018-02-13T17:04:03 | 70,970,520 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 963 | py | #!/usr/bin/python
################################################################################
# 20cec02a-5cc5-11e4-af55-00155d01fe08
#
# Justin Dierking
# justindierking@hardbitsolutions.com
# phnomcobra@gmail.com
#
# 10/24/2014 Original Construction
################################################################################
class Finding:
def __init__(self):
self.output = []
self.is_compliant = False
self.uuid = "20cec02a-5cc5-11e4-af55-00155d01fe08"
def check(self, cli):
# Initialize Compliance
self.is_compliant = True
# Get Accounts
usernames = cli.get_secedit_account('SeServiceLogonRight')
# Output Lines
self.output = [("SeServiceLogonRight=")] + usernames
# Recommended MultiSZ
rec_usernames = ("")
for user in usernames:
if user.lower() not in rec_usernames.lower():
self.is_compliant = False
return self.is_compliant
| [
"phnomcobra@gmail.com"
] | phnomcobra@gmail.com |
ab812bb7efcb5ea4da28c1482e801a1d9950ffff | d32bc79eb8631d6bc4ab20498631ba516db4d5f7 | /654_constructMaximumBinaryTree.py | e748b488968487a50b8649fc0aaa22486be027d1 | [] | no_license | Anirban2404/LeetCodePractice | 059f382d17f71726ad2d734b9579f5bab2bba93c | 786075e0f9f61cf062703bc0b41cc3191d77f033 | refs/heads/master | 2021-10-08T04:41:36.163328 | 2021-09-28T02:16:47 | 2021-09-28T02:16:47 | 164,513,056 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,102 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Apr 20 13:14:08 2019
@author: anirban-mac
"""
"""
654. Maximum Binary Tree
Given an integer array with no duplicates. A maximum tree building on this
array is defined as follow:
The root is the maximum number in the array.
The left subtree is the maximum tree constructed from left part subarray
divided by the maximum number.
The right subtree is the maximum tree constructed from right part subarray
divided by the maximum number.
Construct the maximum tree by the given array and output the root node of
this tree.
Example 1:
Input: [3,2,1,6,0,5]
Output: return the tree root node representing the following tree:
6
/ \
3 5
\ /
2 0
\
1
Note:
The size of the given array will be in the range [1,1000].
"""
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def constructMaximumBinaryTree(self, nums):
"""
:type nums: List[int]
:rtype: TreeNode
"""
if not nums:
return
if len(nums) == 1:
return TreeNode(nums[0])
maxval = max(nums)
max_i = nums.index(maxval)
root = TreeNode(int(maxval))
root.left = self.constructMaximumBinaryTree(nums[:max_i])
root.right = self.constructMaximumBinaryTree(nums[max_i+1:])
return root
def prettyPrintTree(self, node, prefix="", isLeft=True):
if not node:
print("Empty Tree")
return
if node.right:
self.prettyPrintTree(node.right, prefix + ("│ " if isLeft else " "), False)
print(prefix + ("└── " if isLeft else "┌── ") + str(node.val))
if node.left:
self.prettyPrintTree(node.left, prefix + (" " if isLeft else "│ "), True)
treelist = [3,2,1,6,0,5]
treeNode = Solution().constructMaximumBinaryTree(treelist)
Solution().prettyPrintTree(treeNode,"",True) | [
"anirban-mac@Anirbans-MacBook-Pro.local"
] | anirban-mac@Anirbans-MacBook-Pro.local |
821a27a6f20fe9d4a2cfabbae9f68ed0bca88dfd | b4972d81804f0095c72da5d08574afecdc4725f1 | /config/wsgi.py | 94ac6fdcac7ec32ff53473877f00d63276533ccf | [] | no_license | NLPDev/Solar_gentelella | edf2ae9c919ad91603e2661b0d251567d4a44486 | b9414990da148687b56fe4564ae933378c22ca5c | refs/heads/master | 2022-10-08T19:57:52.268335 | 2020-06-12T03:02:06 | 2020-06-12T03:02:06 | 271,695,336 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,678 | py | """
WSGI config for solar project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import sys
from django.core.wsgi import get_wsgi_application
# This allows easy placement of apps within the interior
# solar directory.
app_path = os.path.abspath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir)
)
sys.path.append(os.path.join(app_path, "solar"))
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| [
"vasile123andronic@gmail.com"
] | vasile123andronic@gmail.com |
d2cdfeaa05389cfeef257de3aa9711d8a69f9f6f | d7da288db4fd9fc0bb1c60c5074f290b5f70c8ef | /Aulas Python/Conteúdo das Aulas/020/Exercícios/Exercício 2.py | 185ddecbe92dcd8251ec5a48a006df70f0350f8b | [] | no_license | luizdefranca/Curso-Python-IgnoranciaZero | dbf4cf342b3f3efea6fb3b8cf27bf39ed92927e9 | 9fbf2f25e3e6fce1f1582af0bd6bc7dbc5b9f588 | refs/heads/master | 2020-04-09T07:17:00.735378 | 2016-09-12T10:51:37 | 2016-09-12T10:51:37 | 67,999,169 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 802 | py | """
Faça um programa que calcule as raízes de uma equação do segundo grau,
na forma ax2 + bx + c. O programa deverá pedir os valores de a, b e c e fazer
as consistências, informando ao usuário nas seguintes situações:
a. Se o usuário informar o valor de A igual a zero, a equação não é do
segundo grau e o programa não deve fazer pedir os demais valores,
sendo encerrado;
b. Se o delta calculado for negativo, a equação não possui raizes reais.
Informe ao usuário e encerre o programa;
c. Se o delta calculado for igual a zero a equação possui apenas uma raiz
real; informe-a ao usuário;
d. Se o delta for positivo, a equação possui duas raiz reais; informe-as
ao usuário;
delta = b**2 - 4*a*c
raiz = (-b +ou-(delta**(1/2)))/(2*a)
"""
| [
"luizramospe@hotmail.com"
] | luizramospe@hotmail.com |
bb87629628ed14fb42b26c5249d5785feefed7b6 | 8d3fd439c6d5a52eda578847545234b2ebdc4f3b | /机器学习百科/pycode/ML-NLP/MachineLearning/Linear Regression/housing_price.py | 44536b78d7158b6ca2a4262ab10c88fedf26ab6e | [] | no_license | gm-p/practice_demo | d1530dcdb3de95832f1fa5b6e30c75e7ca6acc05 | 7eaa825fc634ad21aea48713133c0266a44ac54a | refs/heads/main | 2023-03-31T23:01:02.302579 | 2021-04-04T13:29:50 | 2021-04-04T13:29:50 | 354,193,496 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,723 | py | import os
import numpy as np
import pandas as pd
np.random.seed(36)
import matplotlib
import seaborn
import matplotlib.pyplot as plt
from sklearn import datasets
housing = pd.read_csv('kc_train.csv', header=None)
target = housing.iloc[:, 1]
housing.drop([1], inplace=True, axis=1)
housing.columns = range(housing.shape[1])
print(housing.head())
print(target.head())
t = pd.read_csv('kc_test.csv', header=None)
# 数据预处理
housing.info()
# 特征缩放
from sklearn.preprocessing import MinMaxScaler
minmax_scaler = MinMaxScaler()
minmax_scaler.fit(housing)
scaler_housing = minmax_scaler.transform(housing)
scaler_housing = pd.DataFrame(scaler_housing, columns=housing.columns)
mm = MinMaxScaler()
mm.fit(t)
scaler_t = mm.transform(t)
scaler_t = pd.DataFrame(scaler_t, columns=t.columns)
# 选择基于梯度下降的线性回归模型
from sklearn.linear_model import LinearRegression
lr_reg = LinearRegression()
lr_reg.fit(scaler_housing, target)
# 使用均方误差用于评估模型好坏
from sklearn.metrics import mean_squared_error
preds = lr_reg.predict(scaler_housing) # 输入数据进行预测得到结果
mse = mean_squared_error(preds, target) # 使用均方误差评价模型好坏,可以输出mse进行查看评价值
# 绘图进行比较
plt.figure(figsize=(10, 7)) # 画布大小
num = 100
x = np.arange(1, num+1) # 取100个点进行比较
plt.plot(x, target[:num], label='target') # 目标取值
plt.plot(x, preds[:num], label='preds') # 预测取值
plt.legend(loc='upper right') # 线条显示位置
plt.show()
# 输出测试数据预测结果
result = lr_reg.predict(scaler_t)
df_result = pd.DataFrame(result)
df_result.to_csv('result.csv', index=False, header=False) | [
"abc"
] | abc |
926bf1581f5a2e5ac346eaa2f9f98bdf3d9c79b8 | cd850c0544004a15ae1a59cbc4dc0f953db80cb3 | /ALLCools/integration/cca.py | fdee0c491d747243934639b7cb446b9e87835557 | [
"MIT"
] | permissive | lhqing/ALLCools | 94508537be4f3620c8a9f230a4f077c6a43b996b | 7c88363aec3e2010d6bb8619e015747a2ec552e8 | refs/heads/master | 2023-08-30T19:52:35.254611 | 2023-08-18T15:29:27 | 2023-08-18T15:29:27 | 189,878,706 | 22 | 14 | MIT | 2023-08-15T22:58:12 | 2019-06-02T18:02:35 | Python | UTF-8 | Python | false | false | 12,534 | py | import numpy as np
from scipy.sparse import issparse
from sklearn.decomposition import TruncatedSVD
from sklearn.preprocessing import StandardScaler
from sklearn.utils.extmath import safe_sparse_dot
from ..clustering.lsi import tf_idf
def top_features_idx(data, n_features):
"""
Select top features with the highest importance in CCs.
Parameters
----------
data
data.shape = (n_cc, total_features)
n_features
number of features to select
Returns
-------
features_idx : np.array
"""
# data.shape = (n_cc, total_features)
n_cc = data.shape[0]
n_features_per_dim = n_features * 10 // n_cc
n_features_per_dim = min(n_features_per_dim, data.shape[1] - 1)
sample_range = np.arange(n_cc)[:, None]
# get idx of n_features_per_dim features with the highest absolute loadings
data = np.abs(data)
idx = np.argpartition(-data, n_features_per_dim, axis=1)[:, :n_features_per_dim]
# idx.shape = (n_cc, n_features_per_dim)
# make sure the order of first n_features_per_dim is ordered by loadings
idx = idx[sample_range, np.argsort(-data[sample_range, idx], axis=1)]
for i in range(n_features // n_cc + 1, n_features_per_dim):
features_idx = np.unique(idx[:, :i].flatten())
if len(features_idx) > n_features:
return features_idx
else:
features_idx = np.unique(idx[:, :n_features_per_dim].flatten())
return features_idx
def cca(
data1,
data2,
scale1=True,
scale2=True,
n_components=50,
max_cc_cell=20000,
chunk_size=50000,
random_state=0,
svd_algorithm="randomized",
k_filter=None,
n_features=200,
):
np.random.seed(random_state)
tf_data1, tf_data2, scaler1, scaler2 = downsample(
data1=data1,
data2=data2,
todense=True,
scale1=scale1,
scale2=scale2,
max_cc_cell=max_cc_cell,
random_state=random_state,
)
# CCA decomposition
model = TruncatedSVD(n_components=n_components, algorithm=svd_algorithm, random_state=random_state)
U = model.fit_transform(tf_data1.dot(tf_data2.T))
# select dimensions with non-zero singular values
sel_dim = model.singular_values_ != 0
print("non zero dims", sel_dim.sum())
V = model.components_[sel_dim].T
U = U[:, sel_dim] / model.singular_values_[sel_dim]
# compute ccv feature loading
if k_filter:
high_dim_feature = top_features_idx(
np.concatenate([U, V], axis=0).T.dot(np.concatenate([tf_data1, tf_data2], axis=0)), n_features=n_features
)
else:
high_dim_feature = None
# transform CC
if data2.shape[0] > max_cc_cell:
V = []
for chunk_start in np.arange(0, data2.shape[0], chunk_size):
if issparse(data2):
tmp = data2[chunk_start : (chunk_start + chunk_size)].toarray()
else:
tmp = data2[chunk_start : (chunk_start + chunk_size)]
if scale2:
tmp = scaler2.transform(tmp)
V.append(np.dot(np.dot(U.T, tf_data1), tmp.T).T)
V = np.concatenate(V, axis=0)
V = V / model.singular_values_[sel_dim]
if data1.shape[0] > max_cc_cell:
U = []
for chunk_start in np.arange(0, data1.shape[0], chunk_size):
if issparse(data1):
tmp = data1[chunk_start : (chunk_start + chunk_size)].toarray()
else:
tmp = data1[chunk_start : (chunk_start + chunk_size)]
if scale1:
tmp = scaler1.transform(tmp)
U.append(np.dot(tmp, np.dot(model.components_[sel_dim], tf_data2).T))
U = np.concatenate(U, axis=0)
U = U / model.singular_values_[sel_dim]
return U, V, high_dim_feature
def adata_cca(adata, group_col, separate_scale=True, n_components=50, random_state=42):
groups = adata.obs[group_col].unique()
if len(groups) != 2:
raise ValueError(f"CCA only handle 2 groups, " f"adata.obs[{group_col}] has {len(groups)} different groups.")
group_a, group_b = groups
a = adata[adata.obs[group_col] == group_a, :].X
b = adata[adata.obs[group_col] == group_b, :].X
pc, loading, _ = cca(
data1=a,
data2=b,
scale1=separate_scale,
scale2=separate_scale,
n_components=n_components,
random_state=random_state,
)
total_cc = np.concatenate([pc, loading], axis=0)
adata.obsm["X_cca"] = total_cc
return
# def incremental_cca(a, b, max_chunk_size=10000, random_state=0):
# """
# Perform Incremental CCA by chunk dot product and IncrementalPCA
#
# Parameters
# ----------
# a
# dask.Array of dataset a
# b
# dask.Array of dataset b
# max_chunk_size
# Chunk size for Incremental fit and transform, the larger the better as long as MEM is enough
# random_state
#
# Returns
# -------
# Top CCA components
# """
# raise NotImplementedError
# # TODO PC is wrong
# pca = dIPCA(n_components=50,
# whiten=False,
# copy=True,
# batch_size=None,
# svd_solver='auto',
# iterated_power=0,
# random_state=random_state)
#
# # partial fit
# n_sample = a.shape[0]
# n_chunks = n_sample // max_chunk_size + 1
# chunk_size = int(n_sample / n_chunks) + 1
# for chunk_start in range(0, n_sample, chunk_size):
# print(chunk_start)
# X_chunk = a[chunk_start:chunk_start + chunk_size, :].dot(b.T)
# pca.partial_fit(X_chunk)
#
# # transform
# pcs = []
# for chunk_start in range(0, n_sample, chunk_size):
# print(chunk_start)
# X_chunk = a[chunk_start:chunk_start + chunk_size, :].dot(b.T)
# pc_chunk = pca.transform(X_chunk).compute()
# pcs.append(pc_chunk)
# pcs = np.concatenate(pcs)
#
# # concatenate CCA
# total_cc = np.concatenate([pcs, pca.components_.T])
# return total_cc
def lsi_cca(
data1,
data2,
scale_factor=100000,
n_components=50,
max_cc_cell=20000,
chunk_size=50000,
svd_algorithm="randomized",
min_cov_filter=5,
random_state=0,
):
np.random.seed(random_state)
# down sample data1 and data2 to run tf_idf and CCA
if max_cc_cell < data1.shape[0]:
sel1 = np.sort(np.random.choice(np.arange(data1.shape[0]), max_cc_cell, False))
tf_data1 = data1[sel1, :]
else:
tf_data1 = data1
if max_cc_cell < data2.shape[0]:
sel2 = np.sort(np.random.choice(np.arange(data2.shape[0]), max_cc_cell, False))
tf_data2 = data2[sel2, :]
else:
tf_data2 = data2
# filter bin to make sure the min_cov_filter is satisfied
col_sum1 = tf_data1.sum(axis=0).A1
col_sum2 = tf_data2.sum(axis=0).A1
# the same bin_filter will also be used
# in the chunk transfer below
bin_filter = np.logical_and(col_sum1 > min_cov_filter, col_sum2 > min_cov_filter)
tf1, idf1 = tf_idf(tf_data1[:, bin_filter], scale_factor=scale_factor)
tf2, idf2 = tf_idf(tf_data2[:, bin_filter], scale_factor=scale_factor)
# CCA part
model = TruncatedSVD(n_components=n_components, algorithm=svd_algorithm, random_state=0)
tf = tf1.dot(tf2.T)
U = model.fit_transform(tf)
# select non-zero singular values
# transform the whole dataset 2 to get V
sel_dim = model.singular_values_ != 0
nnz_singular_values = model.singular_values_[sel_dim]
nnz_components = model.components_[sel_dim]
if max_cc_cell > data2.shape[0]:
V = nnz_components.T
else:
# use the safe_sparse_dot to avoid memory error
# safe_sparse_dot take both sparse and dense matrix,
# for dense matrix, it just uses normal numpy dot product
V = np.concatenate(
[
safe_sparse_dot(
safe_sparse_dot(U.T[sel_dim], tf1),
tf_idf(
data2[chunk_start : (chunk_start + chunk_size)][:, bin_filter],
scale_factor=scale_factor,
idf=idf2,
)[
0
].T, # [0] is the tf
).T
for chunk_start in np.arange(0, data2.shape[0], chunk_size)
],
axis=0,
)
V = V / np.square(nnz_singular_values)
# transform the whole dataset 1 to get U
if max_cc_cell > data1.shape[0]:
U = U[:, sel_dim] / nnz_singular_values
else:
U = np.concatenate(
[
safe_sparse_dot(
tf_idf(
data1[chunk_start : (chunk_start + chunk_size)][:, bin_filter],
scale_factor=scale_factor,
idf=idf1,
)[
0
], # [0] is the tf
safe_sparse_dot(nnz_components, tf2).T,
)
for chunk_start in np.arange(0, data1.shape[0], chunk_size)
],
axis=0,
)
U = U / nnz_singular_values
return U, V
class LSI:
def __init__(
self,
scale_factor=100000,
n_components=100,
algorithm="arpack",
random_state=0,
idf=None,
model=None,
):
self.scale_factor = scale_factor
if idf is not None:
self.idf = idf.copy()
if idf is not None:
self.model = model
else:
self.model = TruncatedSVD(n_components=n_components, algorithm=algorithm, random_state=random_state)
def fit(self, data):
tf, idf = tf_idf(data, self.scale_factor)
self.idf = idf.copy()
n_rows, n_cols = tf.shape
self.model.n_components = min(n_rows, n_cols, self.model.n_components)
self.model.fit(tf)
return self
def fit_transform(self, data):
tf, idf = tf_idf(data, self.scale_factor)
self.idf = idf.copy()
n_rows, n_cols = tf.shape
self.model.n_components = min(n_rows, n_cols, self.model.n_components)
tf_reduce = self.model.fit_transform(tf)
return tf_reduce / self.model.singular_values_
def transform(self, data, chunk_size=50000, scaler=None):
tf_reduce = []
for chunk_start in np.arange(0, data.shape[0], chunk_size):
tf, _ = tf_idf(data[chunk_start : (chunk_start + chunk_size)], self.scale_factor, self.idf)
tf_reduce.append(self.model.transform(tf))
return np.concatenate(tf_reduce, axis=0) / self.model.singular_values_
class SVD:
def __init__(
self,
n_components=100,
algorithm="randomized",
random_state=0,
):
self.model = TruncatedSVD(n_components=n_components, algorithm=algorithm, random_state=random_state)
def fit(self, data):
self.model.fit(data)
return self
def fit_transform(self, data):
return self.model.fit_transform(data)
def transform(self, data, chunk_size=50000, scaler=None):
tf_reduce = []
for chunk_start in np.arange(0, data.shape[0], chunk_size):
if issparse(data):
tmp = data[chunk_start : (chunk_start + chunk_size)].toarray()
else:
tmp = data[chunk_start : (chunk_start + chunk_size)]
if scaler:
tmp = scaler.transform(tmp)
tf_reduce.append(self.model.transform(tmp))
return np.concatenate(tf_reduce, axis=0)
def downsample(data1, data2, scale1, scale2, todense, max_cc_cell=20000, random_state=0):
scaler1, scaler2 = [None, None]
np.random.seed(random_state)
if data1.shape[0] > max_cc_cell:
sel1 = np.random.choice(np.arange(data1.shape[0]), min(max_cc_cell, data1.shape[0]), False)
tf1 = data1[sel1]
else:
tf1 = data1.copy()
if todense:
if issparse(tf1):
tf1 = tf1.toarray()
if data2.shape[0] > max_cc_cell:
sel2 = np.random.choice(np.arange(data2.shape[0]), min(max_cc_cell, data2.shape[0]), False)
tf2 = data2[sel2]
else:
tf2 = data2.copy()
if todense:
if issparse(tf2):
tf2 = tf2.toarray()
if scale1:
scaler1 = StandardScaler()
tf1 = scaler1.fit_transform(tf1)
if scale2:
scaler2 = StandardScaler()
tf2 = scaler2.fit_transform(tf2)
return tf1, tf2, scaler1, scaler2
| [
"liuhq@ucsd.edu"
] | liuhq@ucsd.edu |
c2236f0a5c489fbc77733aa9bdf7e1b571321af5 | 7bead245354e233f76fff4608938bf956abb84cf | /cloudmersive_convert_api_client/api/zip_archive_api.py | 4e99bf138536d64956368d48ecf32da4f89c60be | [
"Apache-2.0"
] | permissive | Cloudmersive/Cloudmersive.APIClient.Python.Convert | 5ba499937b9664f37cb2700509a4ba93952e9d6c | dba2fe7257229ebdacd266531b3724552c651009 | refs/heads/master | 2021-10-28T23:12:42.698951 | 2021-10-18T03:44:49 | 2021-10-18T03:44:49 | 138,449,321 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 39,152 | py | # coding: utf-8
"""
convertapi
Convert API lets you effortlessly convert file formats and types. # noqa: E501
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from cloudmersive_convert_api_client.api_client import ApiClient
class ZipArchiveApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def zip_archive_zip_create(self, input_file1, **kwargs): # noqa: E501
"""Compress files to create a new zip archive # noqa: E501
Create a new zip archive by compressing input files. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.zip_archive_zip_create(input_file1, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on.
:param file input_file3: Third input file to perform the operation on.
:param file input_file4: Fourth input file to perform the operation on.
:param file input_file5: Fifth input file to perform the operation on.
:param file input_file6: Sixth input file to perform the operation on.
:param file input_file7: Seventh input file to perform the operation on.
:param file input_file8: Eighth input file to perform the operation on.
:param file input_file9: Ninth input file to perform the operation on.
:param file input_file10: Tenth input file to perform the operation on.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.zip_archive_zip_create_with_http_info(input_file1, **kwargs) # noqa: E501
else:
(data) = self.zip_archive_zip_create_with_http_info(input_file1, **kwargs) # noqa: E501
return data
def zip_archive_zip_create_with_http_info(self, input_file1, **kwargs): # noqa: E501
"""Compress files to create a new zip archive # noqa: E501
Create a new zip archive by compressing input files. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.zip_archive_zip_create_with_http_info(input_file1, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on.
:param file input_file3: Third input file to perform the operation on.
:param file input_file4: Fourth input file to perform the operation on.
:param file input_file5: Fifth input file to perform the operation on.
:param file input_file6: Sixth input file to perform the operation on.
:param file input_file7: Seventh input file to perform the operation on.
:param file input_file8: Eighth input file to perform the operation on.
:param file input_file9: Ninth input file to perform the operation on.
:param file input_file10: Tenth input file to perform the operation on.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['input_file1', 'input_file2', 'input_file3', 'input_file4', 'input_file5', 'input_file6', 'input_file7', 'input_file8', 'input_file9', 'input_file10'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method zip_archive_zip_create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'input_file1' is set
if ('input_file1' not in params or
params['input_file1'] is None):
raise ValueError("Missing the required parameter `input_file1` when calling `zip_archive_zip_create`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'input_file1' in params:
local_var_files['inputFile1'] = params['input_file1'] # noqa: E501
if 'input_file2' in params:
local_var_files['inputFile2'] = params['input_file2'] # noqa: E501
if 'input_file3' in params:
local_var_files['inputFile3'] = params['input_file3'] # noqa: E501
if 'input_file4' in params:
local_var_files['inputFile4'] = params['input_file4'] # noqa: E501
if 'input_file5' in params:
local_var_files['inputFile5'] = params['input_file5'] # noqa: E501
if 'input_file6' in params:
local_var_files['inputFile6'] = params['input_file6'] # noqa: E501
if 'input_file7' in params:
local_var_files['inputFile7'] = params['input_file7'] # noqa: E501
if 'input_file8' in params:
local_var_files['inputFile8'] = params['input_file8'] # noqa: E501
if 'input_file9' in params:
local_var_files['inputFile9'] = params['input_file9'] # noqa: E501
if 'input_file10' in params:
local_var_files['inputFile10'] = params['input_file10'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/archive/zip/create', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def zip_archive_zip_create_advanced(self, request, **kwargs): # noqa: E501
"""Compress files and folders to create a new zip archive with advanced options # noqa: E501
Create a new zip archive by compressing input files, folders and leverage advanced options to control the structure of the resulting zip archive. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.zip_archive_zip_create_advanced(request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateZipArchiveRequest request: Input request (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.zip_archive_zip_create_advanced_with_http_info(request, **kwargs) # noqa: E501
else:
(data) = self.zip_archive_zip_create_advanced_with_http_info(request, **kwargs) # noqa: E501
return data
def zip_archive_zip_create_advanced_with_http_info(self, request, **kwargs): # noqa: E501
"""Compress files and folders to create a new zip archive with advanced options # noqa: E501
Create a new zip archive by compressing input files, folders and leverage advanced options to control the structure of the resulting zip archive. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.zip_archive_zip_create_advanced_with_http_info(request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateZipArchiveRequest request: Input request (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method zip_archive_zip_create_advanced" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'request' is set
if ('request' not in params or
params['request'] is None):
raise ValueError("Missing the required parameter `request` when calling `zip_archive_zip_create_advanced`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'request' in params:
body_params = params['request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/json', 'application/xml', 'text/xml', 'application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/archive/zip/create/advanced', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def zip_archive_zip_create_encrypted(self, password, input_file1, **kwargs): # noqa: E501
"""Compress files to create a new, encrypted and password-protected zip archive # noqa: E501
Create a new zip archive by compressing input files, and also applies encryption and password protection to the zip. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.zip_archive_zip_create_encrypted(password, input_file1, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str password: Password to place on the Zip file; the longer the password, the more secure (required)
:param file input_file1: First input file to perform the operation on. (required)
:param str encryption_algorithm: Encryption algorithm to use; possible values are AES-256 (recommended), AES-128, and PK-Zip (not recommended; legacy, weak encryption algorithm). Default is AES-256.
:param file input_file2: Second input file to perform the operation on.
:param file input_file3: Third input file to perform the operation on.
:param file input_file4: Fourth input file to perform the operation on.
:param file input_file5: Fifth input file to perform the operation on.
:param file input_file6: Sixth input file to perform the operation on.
:param file input_file7: Seventh input file to perform the operation on.
:param file input_file8: Eighth input file to perform the operation on.
:param file input_file9: Ninth input file to perform the operation on.
:param file input_file10: Tenth input file to perform the operation on.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.zip_archive_zip_create_encrypted_with_http_info(password, input_file1, **kwargs) # noqa: E501
else:
(data) = self.zip_archive_zip_create_encrypted_with_http_info(password, input_file1, **kwargs) # noqa: E501
return data
def zip_archive_zip_create_encrypted_with_http_info(self, password, input_file1, **kwargs): # noqa: E501
"""Compress files to create a new, encrypted and password-protected zip archive # noqa: E501
Create a new zip archive by compressing input files, and also applies encryption and password protection to the zip. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.zip_archive_zip_create_encrypted_with_http_info(password, input_file1, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str password: Password to place on the Zip file; the longer the password, the more secure (required)
:param file input_file1: First input file to perform the operation on. (required)
:param str encryption_algorithm: Encryption algorithm to use; possible values are AES-256 (recommended), AES-128, and PK-Zip (not recommended; legacy, weak encryption algorithm). Default is AES-256.
:param file input_file2: Second input file to perform the operation on.
:param file input_file3: Third input file to perform the operation on.
:param file input_file4: Fourth input file to perform the operation on.
:param file input_file5: Fifth input file to perform the operation on.
:param file input_file6: Sixth input file to perform the operation on.
:param file input_file7: Seventh input file to perform the operation on.
:param file input_file8: Eighth input file to perform the operation on.
:param file input_file9: Ninth input file to perform the operation on.
:param file input_file10: Tenth input file to perform the operation on.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['password', 'input_file1', 'encryption_algorithm', 'input_file2', 'input_file3', 'input_file4', 'input_file5', 'input_file6', 'input_file7', 'input_file8', 'input_file9', 'input_file10'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method zip_archive_zip_create_encrypted" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'password' is set
if ('password' not in params or
params['password'] is None):
raise ValueError("Missing the required parameter `password` when calling `zip_archive_zip_create_encrypted`") # noqa: E501
# verify the required parameter 'input_file1' is set
if ('input_file1' not in params or
params['input_file1'] is None):
raise ValueError("Missing the required parameter `input_file1` when calling `zip_archive_zip_create_encrypted`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'password' in params:
header_params['password'] = params['password'] # noqa: E501
if 'encryption_algorithm' in params:
header_params['encryptionAlgorithm'] = params['encryption_algorithm'] # noqa: E501
form_params = []
local_var_files = {}
if 'input_file1' in params:
local_var_files['inputFile1'] = params['input_file1'] # noqa: E501
if 'input_file2' in params:
local_var_files['inputFile2'] = params['input_file2'] # noqa: E501
if 'input_file3' in params:
local_var_files['inputFile3'] = params['input_file3'] # noqa: E501
if 'input_file4' in params:
local_var_files['inputFile4'] = params['input_file4'] # noqa: E501
if 'input_file5' in params:
local_var_files['inputFile5'] = params['input_file5'] # noqa: E501
if 'input_file6' in params:
local_var_files['inputFile6'] = params['input_file6'] # noqa: E501
if 'input_file7' in params:
local_var_files['inputFile7'] = params['input_file7'] # noqa: E501
if 'input_file8' in params:
local_var_files['inputFile8'] = params['input_file8'] # noqa: E501
if 'input_file9' in params:
local_var_files['inputFile9'] = params['input_file9'] # noqa: E501
if 'input_file10' in params:
local_var_files['inputFile10'] = params['input_file10'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/archive/zip/create/encrypted', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def zip_archive_zip_create_quarantine(self, password, input_file1, **kwargs): # noqa: E501
"""Create an encrypted zip file to quarantine a dangerous file # noqa: E501
Create a new zip archive by compressing input files, and also applies encryption and password protection to the zip, for the purposes of quarantining the underlyikng file. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.zip_archive_zip_create_quarantine(password, input_file1, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str password: Password to place on the Zip file; the longer the password, the more secure (required)
:param file input_file1: First input file to perform the operation on. (required)
:param str encryption_algorithm: Encryption algorithm to use; possible values are AES-256 (recommended), AES-128, and PK-Zip (not recommended; legacy, weak encryption algorithm). Default is AES-256.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.zip_archive_zip_create_quarantine_with_http_info(password, input_file1, **kwargs) # noqa: E501
else:
(data) = self.zip_archive_zip_create_quarantine_with_http_info(password, input_file1, **kwargs) # noqa: E501
return data
def zip_archive_zip_create_quarantine_with_http_info(self, password, input_file1, **kwargs): # noqa: E501
"""Create an encrypted zip file to quarantine a dangerous file # noqa: E501
Create a new zip archive by compressing input files, and also applies encryption and password protection to the zip, for the purposes of quarantining the underlyikng file. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.zip_archive_zip_create_quarantine_with_http_info(password, input_file1, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str password: Password to place on the Zip file; the longer the password, the more secure (required)
:param file input_file1: First input file to perform the operation on. (required)
:param str encryption_algorithm: Encryption algorithm to use; possible values are AES-256 (recommended), AES-128, and PK-Zip (not recommended; legacy, weak encryption algorithm). Default is AES-256.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['password', 'input_file1', 'encryption_algorithm'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method zip_archive_zip_create_quarantine" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'password' is set
if ('password' not in params or
params['password'] is None):
raise ValueError("Missing the required parameter `password` when calling `zip_archive_zip_create_quarantine`") # noqa: E501
# verify the required parameter 'input_file1' is set
if ('input_file1' not in params or
params['input_file1'] is None):
raise ValueError("Missing the required parameter `input_file1` when calling `zip_archive_zip_create_quarantine`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'password' in params:
header_params['password'] = params['password'] # noqa: E501
if 'encryption_algorithm' in params:
header_params['encryptionAlgorithm'] = params['encryption_algorithm'] # noqa: E501
form_params = []
local_var_files = {}
if 'input_file1' in params:
local_var_files['inputFile1'] = params['input_file1'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/archive/zip/create/quarantine', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def zip_archive_zip_decrypt(self, input_file, zip_password, **kwargs): # noqa: E501
"""Decrypt and remove password protection on a zip file # noqa: E501
Decrypts and removes password protection from an encrypted zip file with the specified password # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.zip_archive_zip_decrypt(input_file, zip_password, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file: Input file to perform the operation on. (required)
:param str zip_password: Required; Password for the input archive (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.zip_archive_zip_decrypt_with_http_info(input_file, zip_password, **kwargs) # noqa: E501
else:
(data) = self.zip_archive_zip_decrypt_with_http_info(input_file, zip_password, **kwargs) # noqa: E501
return data
def zip_archive_zip_decrypt_with_http_info(self, input_file, zip_password, **kwargs): # noqa: E501
"""Decrypt and remove password protection on a zip file # noqa: E501
Decrypts and removes password protection from an encrypted zip file with the specified password # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.zip_archive_zip_decrypt_with_http_info(input_file, zip_password, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file: Input file to perform the operation on. (required)
:param str zip_password: Required; Password for the input archive (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['input_file', 'zip_password'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method zip_archive_zip_decrypt" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'input_file' is set
if ('input_file' not in params or
params['input_file'] is None):
raise ValueError("Missing the required parameter `input_file` when calling `zip_archive_zip_decrypt`") # noqa: E501
# verify the required parameter 'zip_password' is set
if ('zip_password' not in params or
params['zip_password'] is None):
raise ValueError("Missing the required parameter `zip_password` when calling `zip_archive_zip_decrypt`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'zip_password' in params:
header_params['zipPassword'] = params['zip_password'] # noqa: E501
form_params = []
local_var_files = {}
if 'input_file' in params:
local_var_files['inputFile'] = params['input_file'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/json', 'application/xml', 'text/xml']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/archive/zip/decrypt', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def zip_archive_zip_encrypt_advanced(self, encryption_request, **kwargs): # noqa: E501
"""Encrypt and password protect a zip file # noqa: E501
Encrypts and password protects an existing zip file with the specified password and encryption algorithm # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.zip_archive_zip_encrypt_advanced(encryption_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ZipEncryptionAdvancedRequest encryption_request: Encryption request (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.zip_archive_zip_encrypt_advanced_with_http_info(encryption_request, **kwargs) # noqa: E501
else:
(data) = self.zip_archive_zip_encrypt_advanced_with_http_info(encryption_request, **kwargs) # noqa: E501
return data
def zip_archive_zip_encrypt_advanced_with_http_info(self, encryption_request, **kwargs): # noqa: E501
"""Encrypt and password protect a zip file # noqa: E501
Encrypts and password protects an existing zip file with the specified password and encryption algorithm # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.zip_archive_zip_encrypt_advanced_with_http_info(encryption_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ZipEncryptionAdvancedRequest encryption_request: Encryption request (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['encryption_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method zip_archive_zip_encrypt_advanced" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'encryption_request' is set
if ('encryption_request' not in params or
params['encryption_request'] is None):
raise ValueError("Missing the required parameter `encryption_request` when calling `zip_archive_zip_encrypt_advanced`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'encryption_request' in params:
body_params = params['encryption_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/json', 'application/xml', 'text/xml']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/json', 'application/xml', 'text/xml', 'application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/archive/zip/encrypt/advanced', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def zip_archive_zip_extract(self, input_file, **kwargs): # noqa: E501
"""Extract, decompress files and folders from a zip archive # noqa: E501
Extracts a zip archive by decompressing files, and folders. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.zip_archive_zip_extract(input_file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file: Input file to perform the operation on. (required)
:return: ZipExtractResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.zip_archive_zip_extract_with_http_info(input_file, **kwargs) # noqa: E501
else:
(data) = self.zip_archive_zip_extract_with_http_info(input_file, **kwargs) # noqa: E501
return data
def zip_archive_zip_extract_with_http_info(self, input_file, **kwargs): # noqa: E501
"""Extract, decompress files and folders from a zip archive # noqa: E501
Extracts a zip archive by decompressing files, and folders. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.zip_archive_zip_extract_with_http_info(input_file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file: Input file to perform the operation on. (required)
:return: ZipExtractResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['input_file'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method zip_archive_zip_extract" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'input_file' is set
if ('input_file' not in params or
params['input_file'] is None):
raise ValueError("Missing the required parameter `input_file` when calling `zip_archive_zip_extract`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'input_file' in params:
local_var_files['inputFile'] = params['input_file'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/json', 'application/xml', 'text/xml']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/archive/zip/extract', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ZipExtractResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| [
"35204726+Cloudmersive@users.noreply.github.com"
] | 35204726+Cloudmersive@users.noreply.github.com |
f64ef5be1cb2296e37c32480044b9712039ba234 | 7bf82ee19c480e86b904c36dc96bf1fc907c4022 | /app/carpool/views.py | 6ef14c182e8deece303a53b476f55ff89d8e1f31 | [] | no_license | tangphillip/nomad | edf09beaa84d43c1064580f10d81ddfe3e1875e8 | 5757ee9499716fb5ab4844cb49d4fab6a49351f8 | refs/heads/master | 2021-05-16T17:59:16.690193 | 2017-09-11T01:44:24 | 2017-09-11T01:45:17 | 103,076,925 | 0 | 0 | null | 2017-09-11T01:44:13 | 2017-09-11T01:44:13 | null | UTF-8 | Python | false | false | 10,469 | py | import datetime
from flask import (
abort,
current_app,
flash,
jsonify,
redirect,
render_template,
request,
session,
url_for,
)
from flask_login import current_user, login_required
from flask_mail import Message
from geoalchemy2 import func
from geoalchemy2.shape import to_shape, from_shape
from shapely.geometry import mapping, Point
from . import pool_bp
from .forms import (
CancelCarpoolDriverForm,
DriverForm,
RiderForm,
)
from ..models import Carpool, Destination, RideRequest
from .. import db, mail
@pool_bp.route('/', methods=['GET', 'POST'])
def index():
return render_template('index.html')
@pool_bp.route('/carpools/find')
def find():
return render_template('carpools/find.html')
@pool_bp.route('/carpools/starts.geojson')
def start_geojson():
pools = Carpool.query
if request.args.get('ignore_prior') != 'false':
pools = pools.filter(Carpool.leave_time >= datetime.datetime.utcnow())
min_leave_date = request.args.get('min_leave_date')
if min_leave_date:
try:
min_leave_date = datetime.datetime.strptime(
min_leave_date, '%m/%d/%Y')
pools = pools.filter(
func.date(Carpool.leave_time) == min_leave_date)
except ValueError:
abort(400, "Invalid date format for min_leave_date")
near_lat = request.args.get('near.lat')
near_lon = request.args.get('near.lon')
if near_lat and near_lon:
try:
near_lat = float(near_lat)
near_lon = float(near_lon)
except ValueError:
abort(400, "Invalid lat/lon format")
center = from_shape(Point(near_lon, near_lat), srid=4326)
pools = pools.order_by(func.ST_Distance(Carpool.from_point, center))
features = []
for pool in pools:
if pool.from_point is None:
continue
features.append({
'type': 'Feature',
'geometry': mapping(to_shape(pool.from_point)),
'id': url_for('carpool.details', carpool_id=pool.id),
'properties': {
'from_place': pool.from_place,
'to_place': pool.to_place,
'seats_available': pool.seats_available,
'leave_time': pool.leave_time.isoformat(),
'return_time': pool.return_time.isoformat(),
'driver_gender': pool.driver.gender,
},
})
feature_collection = {
'type': 'FeatureCollection',
'features': features
}
return jsonify(feature_collection)
@pool_bp.route('/carpools/mine', methods=['GET', 'POST'])
@login_required
def mine():
carpools = current_user.get_driving_carpools()
return render_template('carpools/mine.html', carpools=carpools)
@pool_bp.route('/carpools/new', methods=['GET', 'POST'])
@login_required
def new():
if not current_user.gender:
flash("Please specify your gender before creating a carpool")
session['next'] = url_for('carpool.new')
return redirect(url_for('auth.profile'))
driver_form = DriverForm()
visible_destinations = Destination.find_all_visible().all()
driver_form.going_to_list.choices = []
driver_form.going_to_list.choices.append((-1, "Select a Destination"))
driver_form.going_to_list.choices.extend([
(r.id, r.name) for r in visible_destinations
])
driver_form.going_to_list.choices.append((-2, "Other..."))
if driver_form.validate_on_submit():
c = Carpool(
from_place=driver_form.leaving_from.data,
from_point='SRID=4326;POINT({} {})'.format(
driver_form.leaving_from_lon.data,
driver_form.leaving_from_lat.data),
to_place=driver_form.going_to_text.data,
to_point='SRID=4326;POINT({} {})'.format(
driver_form.going_to_lon.data,
driver_form.going_to_lat.data),
leave_time=driver_form.depart_time.data,
return_time=driver_form.return_time.data,
max_riders=driver_form.car_size.data,
driver_id=current_user.id,
)
db.session.add(c)
db.session.commit()
flash("Thanks for adding your carpool!", 'success')
return redirect(url_for('carpool.details', carpool_id=c.id))
return render_template(
'carpools/add_driver.html',
form=driver_form,
destinations=visible_destinations,
)
@pool_bp.route('/carpools/<int:carpool_id>', methods=['GET', 'POST'])
def details(carpool_id):
carpool = Carpool.query.get_or_404(carpool_id)
return render_template('carpools/show.html', pool=carpool)
@pool_bp.route('/carpools/<int:carpool_id>/newrider', methods=['GET', 'POST'])
@login_required
def new_rider(carpool_id):
carpool = Carpool.query.get_or_404(carpool_id)
if carpool.current_user_is_driver:
flash("You can't request a ride on a carpool you're driving in")
return redirect(url_for('carpool.details', carpool_id=carpool_id))
if not current_user.gender:
flash("Please specify your gender before creating a carpool request")
session['next'] = url_for('carpool.new_rider', carpool_id=carpool_id)
return redirect(url_for('auth.profile'))
rider_form = RiderForm()
if rider_form.validate_on_submit():
if carpool.seats_available < 1:
flash("There isn't enough space for you on "
"this ride. Try another one?", 'error')
return redirect(url_for('carpool.details', carpool_id=carpool_id))
if carpool.get_current_user_ride_request():
flash("You've already requested a seat on "
"this ride. Try another one or cancel your "
"existing request.", 'error')
return redirect(url_for('carpool.details', carpool_id=carpool_id))
rr = RideRequest(
carpool_id=carpool.id,
person_id=current_user.id,
status='requested',
)
db.session.add(rr)
db.session.commit()
flash("You've been added to the list for this carpool!", 'success')
return redirect(url_for('carpool.details', carpool_id=carpool_id))
return render_template('carpools/add_rider.html', form=rider_form)
@pool_bp.route('/carpools/<int:carpool_id>/request/<int:request_id>/<action>',
methods=['GET', 'POST'])
@login_required
def modify_ride_request(carpool_id, request_id, action):
# carpool = Carpool.query.get_or_404(carpool_id)
request = RideRequest.query.get_or_404(request_id)
# Technically the carpool arg isn't required here,
# but it makes the URL prettier so there.
if request.carpool_id != carpool_id:
return redirect(url_for('carpool.details', carpool_id=carpool_id))
# TODO Check who can modify a ride request. Only:
# 1) the driver modifying their carpool
# 2) the rider modifying their request
# 3) an admin?
# TODO This big messy if block should be a state machine
if request.status == 'requested':
if action == 'approve':
request.status = 'approved'
db.session.add(request)
db.session.commit()
flash("You approved their ride request.")
# TODO Send email notification to rider
elif action == 'deny':
request.status = 'denied'
db.session.add(request)
db.session.commit()
flash("You denied their ride request.")
# TODO Send email notification to rider
elif action == 'cancel':
db.session.delete(request)
db.session.commit()
flash("You cancelled your ride request.")
elif request.status == 'denied':
if action == 'approve':
request.status = 'approved'
db.session.add(request)
db.session.commit()
flash("You approved their ride request.")
# TODO Send email notification to rider
elif request.status == 'approved':
if action == 'deny':
request.status = 'denied'
db.session.add(request)
db.session.commit()
flash("You denied their ride request.")
# TODO Send email notification to rider
else:
flash("You can't do that to the ride request.", "error")
return redirect(url_for('carpool.details', carpool_id=carpool_id))
@pool_bp.route('/carpools/<int:carpool_id>/cancel', methods=['GET', 'POST'])
@login_required
def cancel(carpool_id):
carpool = Carpool.query.get_or_404(carpool_id)
cancel_form = CancelCarpoolDriverForm()
if cancel_form.validate_on_submit():
if cancel_form.submit.data:
_email_carpool_cancelled(
carpool,
cancel_form.reason.data,
not current_app.debug)
db.session.delete(carpool)
db.session.commit()
flash("Your carpool was cancelled", 'success')
return redirect(url_for('carpool.index'))
else:
return redirect(url_for('carpool.details', carpool_id=carpool_id))
return render_template('carpools/cancel.html', form=cancel_form)
def _email_carpool_cancelled(carpool, reason, send=False):
driver = carpool.driver
riders = carpool.riders
if len(riders) == 0:
return
if not reason:
reason = 'Reason not given!'
subject = 'Carpool session on {} cancelled'.format(carpool.leave_time)
# TODO: This should be an HTML template stored elsewhere
body = '''
Hello rider,
Unfortunately, the carpool session for leaving from {} at {} has been
cancelled.
The reason given for the cancellation was: {}.
Please reach out to {} in order to see if they're willing
to reschedule.
'''.format(
carpool.from_place,
carpool.leave_time,
reason,
driver.email)
if send:
with mail.connect() as conn:
for rider in riders:
msg = Message(recipients=[rider.email],
body=body,
subject=subject)
conn.send(msg)
else:
for rider in riders:
current_app.logger.info(
'Sent message to {} with subject {} and body {}'.format(
rider.email, subject, body))
| [
"ian.dees@gmail.com"
] | ian.dees@gmail.com |
e5a016a9d7842de12cf1230ba542c4a962e9c79d | 10b3f8b1bb2d43a053558e2974b1190ec5af9ab3 | /test/functional/rpc_bind.py | 5add6e40b9cff94c7002a194b59a6b71906076cc | [
"MIT"
] | permissive | Satoex/Sato | ff4683226c2cedb14203a86af68ae168e3c45400 | fda51ccc241ca426e838e1ba833c7eea26f1aedd | refs/heads/master | 2022-07-27T23:30:32.734477 | 2022-01-29T17:44:00 | 2022-01-29T17:44:00 | 346,001,467 | 6 | 8 | null | null | null | null | UTF-8 | Python | false | false | 5,058 | py | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Copyright (c) 2017-2020 The Sato Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test running satod with the -rpcbind and -rpcallowip options."""
import socket
import sys
from test_framework.test_framework import SatoTestFramework, SkipTest
from test_framework.util import assert_equal, get_rpc_proxy, rpc_url, get_datadir_path, rpc_port, assert_raises_rpc_error
from test_framework.netutil import addr_to_hex, get_bind_addrs, all_interfaces
class RPCBindTest(SatoTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def setup_network(self):
self.add_nodes(self.num_nodes, None)
def run_bind_test(self, allow_ips, connect_to, addresses, expected):
"""
Start a node with requested rpcallowip and rpcbind parameters,
then try to connect, and check if the set of bound addresses
matches the expected set.
"""
self.log.info("Bind test for %s" % str(addresses))
expected = [(addr_to_hex(addr), port) for (addr, port) in expected]
base_args = ['-disablewallet', '-nolisten']
if allow_ips:
base_args += ['-rpcallowip=' + x for x in allow_ips]
binds = ['-rpcbind='+addr for addr in addresses]
self.nodes[0].rpchost = connect_to
self.start_node(0, base_args + binds)
pid = self.nodes[0].process.pid
assert_equal(set(get_bind_addrs(pid)), set(expected))
self.stop_nodes()
def run_allowip_test(self, allow_ips, rpchost, rpcport):
"""
Start a node with rpcallow IP, and request getnetworkinfo
at a non-localhost IP.
"""
self.log.info("Allow IP test for %s:%d" % (rpchost, rpcport))
base_args = ['-disablewallet', '-nolisten'] + ['-rpcallowip='+x for x in allow_ips]
self.nodes[0].rpchost = None
self.start_nodes([base_args])
# connect to node through non-loopback interface
node = get_rpc_proxy(rpc_url(get_datadir_path(self.options.tmpdir, 0), 0, "%s:%d" % (rpchost, rpcport)), 0, coverage_dir=self.options.coveragedir)
node.getnetworkinfo()
self.stop_nodes()
def run_test(self):
# due to OS-specific network stats queries, this test works only on Linux
if not sys.platform.startswith('linux'):
raise SkipTest("This test can only be run on linux.")
# find the first non-loopback interface for testing
non_loopback_ip = None
for _, ip in all_interfaces():
if ip != '127.0.0.1':
non_loopback_ip = ip
break
if non_loopback_ip is None:
raise SkipTest("This test requires at least one non-loopback IPv4 interface.")
try:
s = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
s.connect(("::1",1))
s.close()
except OSError:
raise SkipTest("This test requires IPv6 support.")
self.log.info("Using interface %s for testing" % non_loopback_ip)
default_port = rpc_port(0)
# check default without rpcallowip (IPv4 and IPv6 localhost)
self.run_bind_test(None, '127.0.0.1', [],
[('127.0.0.1', default_port), ('::1', default_port)])
# check default with rpcallowip (IPv6 any)
self.run_bind_test(['127.0.0.1'], '127.0.0.1', [],
[('::0', default_port)])
# check only IPv4 localhost (explicit)
self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1'],
[('127.0.0.1', default_port)])
# check only IPv4 localhost (explicit) with alternative port
self.run_bind_test(['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171'],
[('127.0.0.1', 32171)])
# check only IPv4 localhost (explicit) with multiple alternative ports on same host
self.run_bind_test(['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171', '127.0.0.1:32172'],
[('127.0.0.1', 32171), ('127.0.0.1', 32172)])
# check only IPv6 localhost (explicit)
self.run_bind_test(['[::1]'], '[::1]', ['[::1]'],
[('::1', default_port)])
# check both IPv4 and IPv6 localhost (explicit)
self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1', '[::1]'],
[('127.0.0.1', default_port), ('::1', default_port)])
# check only non-loopback interface
self.run_bind_test([non_loopback_ip], non_loopback_ip, [non_loopback_ip],
[(non_loopback_ip, default_port)])
# Check that with invalid rpcallowip, we are denied
self.run_allowip_test([non_loopback_ip], non_loopback_ip, default_port)
assert_raises_rpc_error(-342, "non-JSON HTTP response with '403 Forbidden' from server", self.run_allowip_test, ['1.1.1.1'], non_loopback_ip, default_port)
if __name__ == '__main__':
RPCBindTest().main()
| [
"78755872+Satoex@users.noreply.github.com"
] | 78755872+Satoex@users.noreply.github.com |
990e8157dbaa6f55997fe8deb4a9f13dc4544538 | 0061e0c95b8068568a4cfe575748bb188296ccc1 | /backend/users/migrations/0002_auto_20210219_1532.py | e86be94450eb2a079af956aaa4949e1d756b8d34 | [] | no_license | crowdbotics-apps/dimelo-24619 | cd7f1fb3693a8f48ae09f3815478723853d34dab | e17433535325b344128c7dfec7f3b79ae44d9db6 | refs/heads/master | 2023-03-07T00:05:14.598992 | 2021-02-19T15:33:15 | 2021-02-19T15:33:15 | 340,406,342 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,275 | py | # Generated by Django 2.2.19 on 2021-02-19 15:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='user',
name='last_updated',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='user',
name='timestamp_created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AlterField(
model_name='user',
name='email',
field=models.EmailField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='user',
name='first_name',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='user',
name='last_name',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='user',
name='name',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
708d0a15a49ca80a634e116056ef0ecf75446bac | 71012df2815a4666203a2d574f1c1745d5a9c6dd | /4 Django/solutions/blogproj/users/views.py | d1bf844a9039d17be4df416bcd6fbf344ced543f | [] | no_license | PdxCodeGuild/class_mouse | 6c3b85ccf5ed4d0c867aee70c46af1b22d20a9e8 | 40c229947260134a1f9da6fe3d7073bee3ebb3f7 | refs/heads/main | 2023-03-23T14:54:39.288754 | 2021-03-20T01:48:21 | 2021-03-20T01:48:21 | 321,429,925 | 1 | 7 | null | null | null | null | UTF-8 | Python | false | false | 1,900 | py | from django.contrib.auth.models import User
from django.contrib.auth import login, authenticate, logout
from django.http.response import HttpResponseRedirect
from django.shortcuts import render
from django.urls.base import reverse
from blog.models import BlogPost
# Create your views here.
def signup_user(request):
if request.method == "POST":
form = request.POST
username = form['username']
email = form['email']
password = form['password']
password2 = form['password2']
if not User.objects.filter(username=username).exists():
user = User()
user.username = username
user.email = email
user.set_password(password)
user.save()
# user = User.objects.create_user(username, email, password)
login(request, user)
return HttpResponseRedirect(reverse('users:profile'))
else:
print("User exists already.")
return render(request, 'users/signup.html')
def login_user(request):
if request.method == "POST":
form = request.POST
username = form['username']
password = form['password']
user = authenticate(request, username=username, password=password)
print(username, password, user)
if user is not None:
print('logging in user')
login(request, user)
return HttpResponseRedirect(reverse('users:profile'))
return render(request, 'users/login.html')
def logout_user(request):
logout(request)
return HttpResponseRedirect(reverse('users:login'))
def profile(request):
if not request.user.is_authenticated:
return HttpResponseRedirect(reverse('users:login'))
blogs = BlogPost.objects.filter(user=request.user)
context = {
'blogs': blogs
}
return render(request, 'users/profile.html', context)
| [
"anthony@Anthonys-MBP.lan"
] | anthony@Anthonys-MBP.lan |
cde534676dd6318d6ae9af23e4e2b32ca28795e1 | fa93e53a9eee6cb476b8998d62067fce2fbcea13 | /devel/lib/python2.7/dist-packages/temperature_sensor_controller/msg/__init__.py | bb3e963efd2e84de669939e5e72a708b27f82edd | [] | no_license | oyetripathi/ROS_conclusion_project | 2947ee2f575ddf05480dabc69cf8af3c2df53f73 | 01e71350437d57d8112b6cec298f89fc8291fb5f | refs/heads/master | 2023-06-30T00:38:29.711137 | 2021-08-05T09:17:54 | 2021-08-05T09:17:54 | 392,716,311 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 150 | py | /home/sandeepan/tiago_public_ws/devel/.private/temperature_sensor_controller/lib/python2.7/dist-packages/temperature_sensor_controller/msg/__init__.py | [
"sandeepan.ghosh.ece20@itbhu.ac.in"
] | sandeepan.ghosh.ece20@itbhu.ac.in |
67c6b316084571aebd7e1c3631b369677521f80d | 070b8536813d977167dd6305a4707fb381ceb9f0 | /virtual/bin/pip3.7 | 278b3573aed2d6096f05c996cacee477cad21234 | [
"MIT"
] | permissive | Wakarende/NewsBits | 7b986191674a4d6efa4db21fcaa2b838e740ef98 | 27dc8110f4af73300e95d17afb4101d1ed30a49a | refs/heads/master | 2023-04-15T22:48:13.701347 | 2021-04-20T17:35:58 | 2021-04-20T17:35:58 | 358,610,779 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 262 | 7 | #!/Users/joykirii/Desktop/M-IPS/NewsBits/virtual/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"joykirii@gmail.com"
] | joykirii@gmail.com |
37088948ab97d7b4ad95cae9fe4ab71c0642f49b | 10d98fecb882d4c84595364f715f4e8b8309a66f | /hal/experiment_config/__init__.py | 7e2101a9f156beed9225c586e2942ebe94a987af | [
"CC-BY-4.0",
"Apache-2.0"
] | permissive | afcarl/google-research | 51c7b70d176c0d70a5ee31ea1d87590f3d6c6f42 | 320a49f768cea27200044c0d12f394aa6c795feb | refs/heads/master | 2021-12-02T18:36:03.760434 | 2021-09-30T20:59:01 | 2021-09-30T21:07:02 | 156,725,548 | 1 | 0 | Apache-2.0 | 2018-11-08T15:13:53 | 2018-11-08T15:13:52 | null | UTF-8 | Python | false | false | 985 | py | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Init module of experiment config."""
from .image_standard_setting import image_standard_setting
from .state_standard_setting import state_standard_setting
from hal.utils.config import Config
collection = {
'state_standard': state_standard_setting,
'image_standard': image_standard_setting,
}
def get_exp_config(exp_name):
return Config(collection[exp_name]())
| [
"copybara-worker@google.com"
] | copybara-worker@google.com |
e057bda25b33a7f25156d496ddf7aa29c3c7caf3 | 6782a4d4c4406982a66d98466d448b5b8ea2d366 | /tensorflow/compiler/mlir/quantization/tensorflow/python/representative_dataset.py | f7aceee1484d7f21224427847ada64eccd1b1b91 | [
"LicenseRef-scancode-generic-cla",
"Apache-2.0",
"BSD-2-Clause"
] | permissive | MaiKuraki/tensorflow | 7afc33f4bc505c794f51dd6cbbfc44a3233ea986 | 46c4751f554e5e7f249cbe6642f5f874ad7614a4 | refs/heads/master | 2022-07-31T20:48:39.704555 | 2022-06-29T18:01:02 | 2022-06-29T18:06:10 | 183,984,120 | 0 | 0 | Apache-2.0 | 2019-04-29T02:11:07 | 2019-04-29T02:11:07 | null | UTF-8 | Python | false | false | 1,261 | py | # Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Defines types required for representative datasets for quantization."""
from typing import Iterable, Mapping, Tuple, Union
from tensorflow.python.types import core
# A representative sample should be either:
# 1. (signature_key, {input_key -> input_value}) tuple, or
# 2. {input_key -> input_value} mappings.
RepresentativeSample = Union[Tuple[str, Mapping[str, core.TensorLike]],
Mapping[str, core.TensorLike]]
# A representative dataset is an iterable of representative samples.
RepresentativeDataset = Iterable[RepresentativeSample]
| [
"gardener@tensorflow.org"
] | gardener@tensorflow.org |
66f7a3f8de66cba4398c75fddb4dd631c1d28890 | 888f65551bb3fe1b8e84c205796b24678669a649 | /venv/lib/python3.7/site-packages/series/db.py | 19240d5a1c920be0fd897e64a552f1e28e07b4c9 | [] | no_license | chunharrison/NBA-Predictor | e6514c70f2cf26d6db4c14aee225cfbd9d5984a7 | 967951ba34debee012385af63f2bf8031dee51ca | refs/heads/master | 2022-05-04T22:02:03.374496 | 2019-05-15T05:55:34 | 2019-05-15T05:55:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,675 | py | import os
import threading
from pkg_resources import resource_filename
from sqlpharmacy.core import Database as SPDatabase
from sqlalchemy import Column, Integer, Boolean, String, Float
import alembic
from alembic.config import Config
from series.logging import Logging
from series.errors import InvalidDBError
class Database(SPDatabase, Logging):
def __init__(self, root_module, connection_string='sqlite:///',
connect=True, auto_upgrade=True, **kw):
self._root_module = root_module
self.url = connection_string
self.lock = threading.RLock()
Database.register()
self._connect_args = dict(check_same_thread=False)
self._db_args = kw
self._setup_alembic()
self._connected = False
if connect:
self.connect()
if auto_upgrade and self._outdated:
self.upgrade('head')
def _setup_alembic(self):
ini = resource_filename(self._root_module, 'alembic.ini')
self._alembic_cfg = Config(ini)
script = resource_filename(self._root_module, 'alembic')
self._alembic_cfg.set_main_option('script_location', script)
self._alembic_cfg.set_main_option('sqlalchemy.url', self.url)
self._alembic_script = alembic.script.ScriptDirectory.from_config(
self._alembic_cfg)
def connect(self, create=True):
super().__init__(self.url, connect_args=self._connect_args,
**self._db_args)
if create:
self.create_tables()
self._connected = True
def disconnect(self):
self._connected = False
self.session.remove()
def query(self, *a, **kw):
if not self._connected:
self.connect()
return self.session.query(*a, **kw)
def add(self, data):
self.session.add_then_commit(data)
def delete(self, data):
self.session.delete_then_commit(data)
def commit(self):
if self._connected:
self.session.commit()
else:
self.log.error('Tried to commit while not connected!')
def upgrade(self, revision):
if not self._connected:
self.connect()
alembic.command.upgrade(self._alembic_cfg, revision)
alembic.command.upgrade(self._alembic_cfg, revision, sql=True)
@property
def _outdated(self):
return self._current_head != self._current_revision
@property
def _current_head(self):
return self._alembic_script.get_current_head()
@property
def _current_revision(self):
return self._migration_context.get_current_revision()
@property
def _migration_context(self):
if not self._connected:
self.connect()
connection = self.session.connection()
return alembic.migration.MigrationContext.configure(connection)
def revision(self, message):
''' Autogenerate a migration file with upgrade/downgrade info by
connecting to an outdated db without creating tables, applying
all previous migrations (upgrading to 'head') and calling the
alembic command 'revision'.
'''
self.connect(create=False)
self.upgrade('head')
alembic.command.revision(self._alembic_cfg, message=message,
autogenerate=True)
class FileDatabase(Database):
def __init__(self, root_module, _path, **kw):
self._path = _path
self._check_path()
connection_string = 'sqlite:///{}'.format(self._path)
super().__init__(root_module, connection_string,
**kw)
def _check_path(self):
_dir = self._path.parent
if _dir and not _dir.is_dir():
_dir.mkdir(parents=True, exist_ok=True)
if self._path.is_dir():
raise InvalidDBError('Is a directory!')
self._new_db = not self._path.is_file()
def upgrade(self, revision):
''' If a nonexisting file has been specified as db, alembic will
not set the revision number on creation. Thus, a complete
migration history will be attempted on a database with current
head, and fail. Check here if the file had existed before, and
if not, only write the requested revision to the db. Otherwise,
do the upgrade.
'''
if self._new_db:
alembic.command.stamp(self._alembic_cfg, revision)
alembic.command.stamp(self._alembic_cfg, revision, sql=True)
else:
super().upgrade(revision)
__all__ = ('Database', 'Column', 'Integer', 'Boolean', 'FileDatabase', 'String', 'Float')
| [
"wjsdntjr@hotmail.com"
] | wjsdntjr@hotmail.com |
991b6836fb9e29cd210ef5db268f2febb4e34bbb | 41ede4fd3bfba1bff0166bca7aee80dcf21434c6 | /vedat/dist/python-egenix-mx-base/actions.py | c1faa95bac5817768dc6a725b7a2096051d0a330 | [] | no_license | pisilinux/playground | a7db4b42559a21cc72fd4c8649e0231ab6a3eb3c | e4e12fff8a847ba210befc8db7e2af8556c3adf7 | refs/heads/master | 2022-08-12T23:03:27.609506 | 2022-08-11T18:28:19 | 2022-08-11T18:28:19 | 8,429,459 | 16 | 22 | null | 2022-08-11T18:28:20 | 2013-02-26T09:37:11 | Python | UTF-8 | Python | false | false | 1,153 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 3.
# See the file http://www.gnu.org/licenses/gpl.txt
from pisi.actionsapi import pisitools
from pisi.actionsapi import pythonmodules
from pisi.actionsapi import get
MX_DIR = "/usr/lib/%s/site-packages/mx" % get.curPYTHON()
EMPTY_DIRS = ["BeeBase/Doc", "DateTime/Doc", "Doc", "Queue/Doc", "Stack/Doc", "TextTools/Doc", "Tools/Doc", "UID/Doc", \
"URL/Doc", "DateTime/Examples", "TextTools/Examples"]
def install():
pythonmodules.install()
pisitools.dodoc("mx/LICENSE", "mx/COPYRIGHT")
pisitools.dodoc("LICENSE", "COPYRIGHT", "MANIFEST", "README")
# Make dir under docs for examples
pisitools.dodir("%s/%s/Examples/DateTime" % (get.docDIR(), get.srcNAME()))
pisitools.dodir("%s/%s/Examples/TextTools" % (get.docDIR(), get.srcNAME()))
# Move examples from /usr/lib
pisitools.domove("%s/DateTime/Examples/*.py" % MX_DIR, "%s/%s/Examples/DateTime/" % (get.docDIR(), get.srcNAME()))
pisitools.domove("%s/TextTools/Examples/*.py" % MX_DIR, "%s/%s/Examples/TextTools/" % (get.docDIR(), get.srcNAME()))
| [
"vedat@pisi_linux1.0"
] | vedat@pisi_linux1.0 |
05ac92fd75c1c83a14ea3cd112e50e0ddf78225e | 34111dffa8598cd1baf84ecad87248d3b2c62269 | /beartype_test/a00_unit/data/data_type.py | 151e16884dc565252e4bbf85291c1bbb2ee37886 | [
"MIT"
] | permissive | mbeacom/beartype | e6b7dc4490d19e446288a5bea2396204c67d51b1 | ad20c2c3bd2590331ee3f3de702371d86ec8cb55 | refs/heads/main | 2023-07-30T00:14:05.275537 | 2021-09-17T07:21:39 | 2021-09-17T07:21:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,428 | py | #!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2021 Beartype authors.
# See "LICENSE" for further details.
'''
Project-wide **generic types data** submodule.
This submodule predefines low-level class constants exercising known edge
cases on behalf of higher-level unit test submodules.
'''
# ....................{ IMPORTS }....................
import builtins
from beartype._data.mod.datamod import BUILTINS_MODULE_NAME
from contextlib import contextmanager
from sys import exc_info, implementation
from typing import Callable, ContextManager, Generator
# ....................{ CLASSES ~ hierarchy : 1 }....................
# Arbitrary class hierarchy.
class Class(object):
'''
Arbitrary pure-Python class defining an arbitrary method.
'''
def instance_method(self):
'''
Arbitrary pure-Python instance method.
'''
pass
class Subclass(Class):
'''
Arbitrary pure-Python subclass of an arbitrary pure-Python superclass.
'''
pass
class SubclassSubclass(Subclass):
'''
Arbitrary pure-Python subclass of an arbitrary pure-Python subclass of an
arbitrary pure-Python superclass.
'''
pass
# ....................{ CLASSES ~ hierarchy : 2 }....................
# Yet another arbitrary class hierarchy.
class OtherClass(object):
'''
Arbitrary pure-Python class defining an arbitrary method.
'''
def instance_method(self):
'''
Arbitrary pure-Python instance method.
'''
pass
class OtherSubclass(OtherClass):
'''
Arbitrary pure-Python subclass of an arbitrary pure-Python superclass.
'''
pass
class OtherSubclassSubclass(OtherSubclass):
'''
Arbitrary pure-Python subclass of an arbitrary pure-Python subclass of an
arbitrary pure-Python superclass.
'''
pass
# ....................{ CLASSES ~ isinstance }....................
class NonIsinstanceableMetaclass(type):
'''
Metaclass overriding the ``__instancecheck__()`` dunder method to
unconditionally raise an exception, preventing classes with this metaclass
from being passed as the second parameter to the :func:`isinstance`
builtin.
'''
def __instancecheck__(self, obj: object) -> bool:
raise TypeError(
f'{self} not passable as second parameter to isinstance().')
class NonIsinstanceableClass(object, metaclass=NonIsinstanceableMetaclass):
'''
Class whose metaclass overrides the ``__instancecheck__()`` dunder method
to unconditionally raise an exception, preventing this class from being
passed as the second parameter to the :func:`isinstance` builtin.
'''
pass
# ....................{ CLASSES ~ issubclass }....................
class NonIssubclassableMetaclass(type):
'''
Metaclass overriding the ``__subclasscheck__()`` dunder method to
unconditionally raise an exception, preventing classes with this metaclass
from being passed as the second parameter to the :func:`issubclass`
builtin.
'''
def __subclasscheck__(self, obj: object) -> bool:
raise TypeError(
f'{self} not passable as second parameter to issubclass().')
class NonIssubclassableClass(object, metaclass=NonIssubclassableMetaclass):
'''
Class whose metaclass overrides the ``__subclasscheck__()`` dunder method
to unconditionally raise an exception, preventing this class from being
passed as the second parameter to the :func:`issubclass` builtin.
'''
pass
# ....................{ CALLABLES ~ async : factory }....................
# Note that we intentionally avoid declaring a factory function for deprecated
# generator-based coroutines decorated by either the types.coroutine() or
# asyncio.coroutine() decorators. CPython 3.10 removes support for these
# decorators and thus generator-based coroutines. See also:
# https://docs.python.org/3/library/asyncio-task.html#asyncio.coroutine
async def async_generator_factory():
'''
Arbitrary pure-Python asynchronous generator factory function.
'''
yield
async def coroutine_factory():
'''
Arbitrary pure-Python asynchronous non-generator coroutine factory
function.
'''
pass
# ....................{ CALLABLES ~ async : instance }....................
async_generator = async_generator_factory()
'''
Arbitrary pure-Python asynchronous generator.
'''
coroutine = coroutine_factory()
'''
Arbitrary pure-Python asynchronous non-generator coroutine.
'''
# Prevent Python from emitting "ResourceWarning" warnings.
coroutine.close()
# ....................{ CALLABLES ~ sync }....................
def function():
'''
Arbitrary pure-Python function.
'''
pass
def decorator(func: Callable) -> Callable:
'''
**Identity decorator** (i.e., decorator returning the passed callable
unmodified).
This decorator enables logic elsewhere to exercise the
:mod:`beartype.beartype` decorator with respect to nested callables
decorated by both the :mod:`beartype.beartype` decorator and one or more
decorators *not* the :mod:`beartype.beartype` decorator.
'''
return func
@contextmanager
def context_manager_factory(obj: object) -> ContextManager[object]:
'''
Create and return a new **identity context manager** (i.e., context
manager trivially yielding the passed object).
'''
yield obj
# ....................{ CALLABLES ~ sync : generator }....................
def generator_factory() -> Generator[int, None, None]:
'''
Create and return a pure-Python generator yielding a single integer,
accepting nothing, and returning nothing.
'''
yield 1
def generator_factory_yield_int_send_float_return_str() -> (
Generator[int, float, str]):
'''
Create and return a pure-Python generator yielding integers, accepting
floating-point numbers sent to this generator by the caller, and returning
strings.
See Also
----------
https://www.python.org/dev/peps/pep-0484/#id39
``echo_round`` function strongly inspiring this implementation, copied
verbatim from this subsection of :pep:`484`.
'''
# Initial value externally sent to this generator.
res = yield
while res:
res = yield round(res)
# Return a string constant.
return 'Unmarred, scarred revanent remnants'
# ....................{ CALLABLES ~ closure }....................
def closure_factory():
'''
Arbitrary pure-Python closure factory function.
'''
# Arbitrary non-local variable.
outer_variable = 42
def closure():
'''
Arbitrary pure-Python closure.
'''
nonlocal outer_variable
# Return this closure.
return closure
def closure_cell_factory():
'''
Arbitrary pure-Python closure cell factory function.
'''
# Arbitrary non-local variable.
outer_variable = 1337
def closure():
'''
Arbitrary pure-Python closure.
'''
nonlocal outer_variable
# Return this closure's first and only cell variable.
return closure.__closure__[0]
# ....................{ CONSTANTS }....................
CALLABLE_CODE_OBJECT = function.__code__
'''
Arbitrary callable code object.
'''
# Initialized below.
TRACEBACK = None
'''
Arbitrary traceback object.
'''
# Define the "TRACEBACK" constant via EAFP.
try:
raise TypeError
except TypeError:
TRACEBACK = exc_info()[2]
# ....................{ CONSTANTS ~ filenames }....................
MODULE_FILENAME = __file__
'''
Absolute filename of the current submodule, declared purely for convenience.
'''
# ....................{ SETS ~ callable }....................
CALLABLES_PYTHON = frozenset((function, Class, Class.instance_method))
'''
Frozen set of pure-Python callables exercising edge cases.
'''
CALLABLES_C = frozenset((
len, # Built-in FunctionType
().count, # Built-in Method Type
object.__init__, # Wrapper Descriptor Type
object().__str__, # Method Wrapper Type
str.join, # Method Descriptor Type
#FIXME: *UGH.* This probably should be callable under PyPy 3.6, but
#it's not, which is why we've currently disabled this. That's clearly a
#PyPy bug. Uncomment this *AFTER* we drop support for PyPy 3.6 (and any
#newer PyPy versions also failing to implement this properly). We
#should probably also consider filing an upstream issue with PyPy,
#because this is non-ideal and non-orthogonal behaviour with CPython.
#dict.__dict__['fromkeys'],
))
'''
Frozen set of C-based callables exercising edge cases.
'''
CALLABLES = CALLABLES_PYTHON | CALLABLES_C
'''
Frozen set of both pure-Python *and* C-based callables exercising edge cases.
'''
NON_CALLABLES = (
CALLABLE_CODE_OBJECT,
type.__dict__, # Mapping Proxy Type
implementation, # Simple Namespace Type
async_generator,
closure_cell_factory(), # Cell type
coroutine,
generator_factory(),
TRACEBACK,
TRACEBACK.tb_frame,
)
'''
Tuple of callable-like non-callable objects exercising edge cases,
intentionally defined as a tuple rather than frozen set due to the
unhashability of one or more members (e.g., ``TRACEBACK``).
'''
# ....................{ SETS ~ type : builtin }....................
TYPES_BUILTIN = frozenset((
bool,
complex,
dict,
float,
frozenset,
int,
list,
set,
str,
tuple,
))
'''
Frozen set of all **builtin types** i.e., globally accessible C-based type
requiring *no* explicit importation)(.
'''
# ....................{ SETS ~ type : non-builtin }....................
TYPES_BUILTIN_FAKE = frozenset((
# Type of this builtin.
builtin.__class__
# For each builtin (i.e., globally accessible object implicitly imported by
# the active Python interpreter into *EVERY* lexical context)...
for builtin in builtins.__dict__.values()
# If...
if (
# The type of this builtin also insists itself to be defined by the
# "builtins" module and thus also be a builtin *AND*...
builtin.__class__.__module__ == BUILTINS_MODULE_NAME and
# The "builtins" module contains *NO* globally-scoped attribute whose
# name is that of this type...
builtin.__class__.__name__ not in builtins.__dict__
# Then add this cheatin', lyin', no-good type to this set.
)
))
'''
Frozen set of all **fake builtin types** (i.e., types that are *not* builtin
but which nonetheless erroneously masquerade as being builtin).
See Also
----------
:data:`beartype._data.cls.datacls.TYPES_BUILTIN_FAKE`
Related runtime set. Whereas that runtime-specific set is efficiently
defined explicitly by listing all non-builtin builtin mimic types, this
test-specific set is inefficiently defined implicitly by introspecting the
:mod:`builtins` module. While less efficient, this test-specific set serves
as an essential sanity check on that runtime-specific set.
'''
TYPES_NONBUILTIN = frozenset((
# Arbitrary non-builtin type.
Class,
)) | TYPES_BUILTIN_FAKE
'''
Frozen set of arbitrary non-builtin types.
'''
| [
"leycec@gmail.com"
] | leycec@gmail.com |
8d69b25e44042226f07ebb909716a8ce394a4844 | 91e810849c5680311ce51c9e66289afb0a76336c | /docs/source/conf.py | 2241ae833fcd4f0e8d257f2c255164791026f69e | [
"MIT"
] | permissive | zhanglixixi/pytool | 307b0505dfa6312223239d34c0dc6a03904e1df3 | 35fb633149b663e6a0fe69c666e5883172223b45 | refs/heads/master | 2022-03-13T08:43:27.572958 | 2019-12-16T07:54:13 | 2019-12-16T07:54:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,947 | py | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('../..'))
#
# Added by Zhi Liu
# for using Read the Docs theme
import sphinx_rtd_theme
# -- Project information -----------------------------------------------------
project = 'PyTool'
copyright = '2018, Zhi Liu'
author = 'Zhi Liu'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = '1.0'
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.napoleon',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.githubpages',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# html_theme = 'alabaster'
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'PyTooldoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'PyTool.tex', 'PyTool Documentation',
'Zhi Liu', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pytool', 'PyTool Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'PyTool', 'PyTool Documentation',
author, 'PyTool', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# -- Extension configuration -------------------------------------------------
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
# -- Options for todo extension ----------------------------------------------
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
| [
"zhiliu.mind@gmail.com"
] | zhiliu.mind@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.